dgf-prototype/ethereum/contracts/Rollup.sol

83 lines
2.3 KiB
Solidity
Raw Normal View History

2024-04-28 15:06:10 -05:00
// SPDX-License-Identifier: Unlicense
pragma solidity ^0.8.24;
import "./core/DAO.sol";
import "./Availability.sol";
contract Rollup is Availability {
constructor(DAO dao) Availability(dao) {}
2024-04-28 16:51:35 -05:00
struct BatchItem {
address worker;
uint stakeAmount;
uint fee;
string evidenceContentId;
}
mapping(uint => BatchItem) items;
uint itemCount;
address batchWorker;
uint batchWorkerStakeIndex;
function addItem(
address worker,
uint stakeAmount,
uint fee,
string calldata evidenceContentId
) public {
BatchItem storage item = items[itemCount++];
item.worker = worker;
item.stakeAmount = stakeAmount;
item.fee = fee;
item.evidenceContentId = evidenceContentId;
}
function submitBatch(
string calldata batchPostId,
uint poolDuration
) public {
if (batchWorker != address(0)) {
require(
msg.sender == batchWorker,
"Batch result must be submitted by current batch worker"
);
}
// initiate a validation pool for this batch
uint fee;
for (uint i = 0; i < itemCount; i++) {
fee += items[i].fee;
}
uint poolIndex = dao.initiateValidationPool{value: fee}(
batchPostId,
poolDuration,
[uint256(1), uint256(3)],
[uint256(1), uint256(2)],
100,
true,
false,
""
);
// Include all the availability stakes from the batched work
for (uint i = 0; i < itemCount; i++) {
dao.delegatedStakeOnValidationPool(
poolIndex,
items[i].worker,
items[i].stakeAmount,
true
);
}
// Include availability stakes from the batch worker
dao.delegatedStakeOnValidationPool(
poolIndex,
batchWorker,
stakes[batchWorkerStakeIndex].amount,
true
);
// Reset item count so we can start the next batch
itemCount = 0;
// Select the next worker
batchWorkerStakeIndex = assignWork();
batchWorker = stakes[batchWorkerStakeIndex].worker;
}
2024-04-28 15:06:10 -05:00
}