2024-04-28 15:06:10 -05:00
|
|
|
// SPDX-License-Identifier: Unlicense
|
|
|
|
pragma solidity ^0.8.24;
|
|
|
|
|
|
|
|
import "./core/DAO.sol";
|
|
|
|
import "./Availability.sol";
|
|
|
|
|
|
|
|
contract Rollup is Availability {
|
|
|
|
constructor(DAO dao) Availability(dao) {}
|
2024-04-28 16:51:35 -05:00
|
|
|
|
|
|
|
struct BatchItem {
|
2024-05-01 22:24:53 -05:00
|
|
|
address sender;
|
|
|
|
address worker;
|
2024-04-28 16:51:35 -05:00
|
|
|
uint stakeAmount;
|
|
|
|
uint fee;
|
2024-04-28 18:02:39 -05:00
|
|
|
string postId;
|
2024-04-28 16:51:35 -05:00
|
|
|
}
|
|
|
|
|
2024-05-01 22:24:53 -05:00
|
|
|
mapping(uint => BatchItem) public items;
|
|
|
|
uint public itemCount;
|
|
|
|
address public batchWorker;
|
2024-04-28 16:51:35 -05:00
|
|
|
uint batchWorkerStakeIndex;
|
|
|
|
|
2024-05-01 22:24:53 -05:00
|
|
|
event BatchItemAdded(string postId, address sender, uint fee);
|
|
|
|
event BatchWorkerAssigned(address batchWorker);
|
|
|
|
|
2024-04-28 18:02:39 -05:00
|
|
|
/// Instead of initiating a validation pool, call this method to include
|
|
|
|
/// the stakes and fee in the next batch validation pool
|
2024-04-28 16:51:35 -05:00
|
|
|
function addItem(
|
2024-04-28 18:02:39 -05:00
|
|
|
address author,
|
2024-04-28 16:51:35 -05:00
|
|
|
uint stakeAmount,
|
2024-04-28 18:02:39 -05:00
|
|
|
string calldata postId
|
2024-04-28 17:45:07 -05:00
|
|
|
) public payable {
|
2024-04-28 16:51:35 -05:00
|
|
|
BatchItem storage item = items[itemCount++];
|
2024-05-01 22:24:53 -05:00
|
|
|
item.sender = msg.sender;
|
|
|
|
item.worker = author;
|
2024-04-28 16:51:35 -05:00
|
|
|
item.stakeAmount = stakeAmount;
|
2024-04-28 17:45:07 -05:00
|
|
|
item.fee = msg.value;
|
2024-04-28 18:02:39 -05:00
|
|
|
item.postId = postId;
|
2024-05-01 22:24:53 -05:00
|
|
|
emit BatchItemAdded(postId, item.sender, item.fee);
|
2024-04-28 16:51:35 -05:00
|
|
|
}
|
|
|
|
|
2024-04-28 18:02:39 -05:00
|
|
|
/// To be called by the currently assigned batch worker,
|
|
|
|
/// If no batch worker has been assigned this may be called by anybody,
|
|
|
|
/// but it will only succeed if it is able to assign a new worker.
|
2024-04-28 16:51:35 -05:00
|
|
|
function submitBatch(
|
|
|
|
string calldata batchPostId,
|
2024-05-03 13:10:15 -05:00
|
|
|
string[] calldata batchItems,
|
2024-04-28 16:51:35 -05:00
|
|
|
uint poolDuration
|
2024-04-28 18:02:39 -05:00
|
|
|
) public returns (uint poolIndex) {
|
2024-04-28 16:51:35 -05:00
|
|
|
if (batchWorker != address(0)) {
|
|
|
|
require(
|
|
|
|
msg.sender == batchWorker,
|
|
|
|
"Batch result must be submitted by current batch worker"
|
|
|
|
);
|
|
|
|
}
|
2024-05-03 13:10:15 -05:00
|
|
|
require(batchItems.length <= itemCount, "Batch size too large");
|
|
|
|
// Make sure all batch items match
|
|
|
|
for (uint i = 0; i < batchItems.length; i++) {
|
|
|
|
require(
|
|
|
|
keccak256(bytes(batchItems[i])) ==
|
|
|
|
keccak256(bytes(items[i].postId)),
|
|
|
|
"Batch item mismatch"
|
|
|
|
);
|
|
|
|
}
|
2024-04-28 16:51:35 -05:00
|
|
|
// initiate a validation pool for this batch
|
|
|
|
uint fee;
|
2024-05-03 13:10:15 -05:00
|
|
|
for (uint i = 0; i < batchItems.length; i++) {
|
2024-04-28 16:51:35 -05:00
|
|
|
fee += items[i].fee;
|
|
|
|
}
|
2024-04-28 18:02:39 -05:00
|
|
|
poolIndex = dao.initiateValidationPool{value: fee}(
|
2024-04-28 16:51:35 -05:00
|
|
|
batchPostId,
|
|
|
|
poolDuration,
|
|
|
|
[uint256(1), uint256(3)],
|
|
|
|
[uint256(1), uint256(2)],
|
|
|
|
100,
|
|
|
|
true,
|
|
|
|
false,
|
|
|
|
""
|
|
|
|
);
|
|
|
|
// Include all the availability stakes from the batched work
|
2024-05-03 13:10:15 -05:00
|
|
|
for (uint i = 0; i < batchItems.length; i++) {
|
2024-04-28 16:51:35 -05:00
|
|
|
dao.delegatedStakeOnValidationPool(
|
|
|
|
poolIndex,
|
2024-05-01 22:24:53 -05:00
|
|
|
items[i].worker,
|
2024-04-28 16:51:35 -05:00
|
|
|
items[i].stakeAmount,
|
|
|
|
true
|
|
|
|
);
|
|
|
|
}
|
|
|
|
// Include availability stakes from the batch worker
|
2024-04-28 18:02:39 -05:00
|
|
|
if (batchWorker != address(0)) {
|
|
|
|
dao.delegatedStakeOnValidationPool(
|
|
|
|
poolIndex,
|
|
|
|
batchWorker,
|
|
|
|
stakes[batchWorkerStakeIndex].amount,
|
|
|
|
true
|
|
|
|
);
|
|
|
|
}
|
2024-05-03 13:10:15 -05:00
|
|
|
if (batchItems.length < itemCount) {
|
2024-04-28 18:02:39 -05:00
|
|
|
// Some items were added after this batch was computed.
|
|
|
|
// Keep them in the queue to be included in the next batch.
|
2024-05-03 13:10:15 -05:00
|
|
|
for (uint i = 0; i < itemCount - batchItems.length; i++) {
|
|
|
|
items[i] = items[batchItems.length + i];
|
2024-04-28 18:02:39 -05:00
|
|
|
}
|
2024-05-03 13:10:15 -05:00
|
|
|
itemCount = itemCount - batchItems.length;
|
2024-04-28 18:02:39 -05:00
|
|
|
} else {
|
|
|
|
// Reset item count so we can start the next batch
|
|
|
|
itemCount = 0;
|
|
|
|
}
|
2024-04-28 16:51:35 -05:00
|
|
|
// Select the next worker
|
|
|
|
batchWorkerStakeIndex = assignWork();
|
|
|
|
batchWorker = stakes[batchWorkerStakeIndex].worker;
|
2024-05-01 22:24:53 -05:00
|
|
|
emit BatchWorkerAssigned(batchWorker);
|
2024-04-28 16:51:35 -05:00
|
|
|
}
|
2024-04-28 15:06:10 -05:00
|
|
|
}
|