Compare commits

..

No commits in common. "ee6730e13f978ccc6a474d6c107552817e691438" and "6376a37d9b1515047b81ccdda35aeab6a4518b2c" have entirely different histories.

17 changed files with 109 additions and 154 deletions

View File

@ -42,8 +42,8 @@ module.exports = async (req, res) => {
// We want to add a post representing this matrix message.
const authors = [{ authorAddress, weightPPM: 1000000 }];
// TODO: Take references as input to this API call, referencing other posts or matrix events
const references = [];
// TODO: Take citations as input to this API call, referencing other posts or matrix events
const citations = [];
const content = `Matrix event URI: ${eventUri}`;
const embeddedData = {
roomId,
@ -55,11 +55,11 @@ module.exports = async (req, res) => {
const signature = await wallet.signMessage(contentToVerify);
const { hash } = await write({
sender, authors, references, content, embeddedData, signature,
sender, authors, citations, content, embeddedData, signature,
});
// Now we want to add a post on-chain
const { alreadyAdded } = await addPostWithRetry(authors, hash, references);
const { alreadyAdded } = await addPostWithRetry(authors, hash, citations);
if (alreadyAdded) {
console.log(`Post already added for matrix event ${eventUri}`);

View File

@ -8,8 +8,8 @@ const { authorAddresses, authorPrivKeys } = require('../util/db');
const { dao } = require('../util/contracts');
const write = require('../util/forum/write');
// Each post allocates 30% of its reputation to references
const PPM_TO_REFERENCES = 300000;
// Each post allocates 30% of its reputation to citations
const PPM_TO_CITATIONS = 300000;
const fetchWithRetry = async (url, retryDelay = 5000) => {
let retry = false;
@ -116,14 +116,14 @@ HREF ${paper.url}`;
};
};
const addPostWithRetry = async (authors, hash, references, retryDelay = 5000) => {
const addPostWithRetry = async (authors, hash, citations, retryDelay = 5000) => {
try {
await dao.addPost(authors, hash, references);
await dao.addPost(authors, hash, citations);
} catch (e) {
if (e.code === 'REPLACEMENT_UNDERPRICED') {
console.log('retry delay (sec):', retryDelay / 1000);
await Promise.delay(retryDelay);
return addPostWithRetry(authors, hash, references, retryDelay * 2);
return addPostWithRetry(authors, hash, citations, retryDelay * 2);
} if (e.reason === 'A post with this postId already exists') {
return { alreadyAdded: true };
}
@ -135,19 +135,19 @@ const addPostWithRetry = async (authors, hash, references, retryDelay = 5000) =>
const importPaper = async (paper) => {
console.log('references count:', paper.references.length);
const { paperId } = paper;
const paperReferences = paper.references.filter((x) => !!x.paperId);
const eachReferenceWeightPPM = Math.floor(PPM_TO_REFERENCES / paperReferences.length);
const references = (await Promise.mapSeries(
paperReferences,
const references = paper.references.filter((x) => !!x.paperId);
const eachCitationWeightPPM = Math.floor(PPM_TO_CITATIONS / references.length);
const citations = (await Promise.mapSeries(
references,
async (citedPaper) => {
// We need to fetch this paper so we can generate the post we WOULD add to the forum.
// That way, if we later add the cited paper to the blockchain it will have the correct hash.
// The forum allows dangling references to support this use case.
// The forum allows dangling citations to support this use case.
try {
const citedPost = await generatePost(citedPaper);
const citedPostHash = objectHash(citedPost);
return {
weightPPM: eachReferenceWeightPPM,
weightPPM: eachCitationWeightPPM,
targetPostId: citedPostHash,
};
} catch (e) {
@ -156,10 +156,10 @@ const importPaper = async (paper) => {
},
)).filter((x) => !!x);
// Make sure reference weights sum to the designated total
if (references.length) {
const totalReferenceWeight = references.reduce((t, { weightPPM }) => t + weightPPM, 0);
references[0].weightPPM += PPM_TO_REFERENCES - totalReferenceWeight;
// Make sure citation weights sum to the designated total
if (citations.length) {
const totalCitationWeight = citations.reduce((t, { weightPPM }) => t + weightPPM, 0);
citations[0].weightPPM += PPM_TO_CITATIONS - totalCitationWeight;
}
// Create a post for this paper
@ -169,12 +169,12 @@ const importPaper = async (paper) => {
// Write the new post to our database
const { hash } = await write({
authors, content, signature, embeddedData, references,
authors, content, signature, embeddedData, citations,
});
// Add the post to the forum (on-chain)
console.log('addPostWithRetry', { authors, hash, references });
const { alreadyAdded } = await addPostWithRetry(authors, hash, references);
console.log('addPostWithRetry', { authors, hash, citations });
const { alreadyAdded } = await addPostWithRetry(authors, hash, citations);
if (alreadyAdded) {
console.log(`Post already added for paper ${paperId}`);
} else {

View File

@ -33,8 +33,8 @@ const submitRollup = async () => {
if (!authors.length) {
return { batchItems: [] };
}
// TODO: Compute references as aggregate of the references of posts in the batch
const references = [];
// TODO: Compute citations as aggregate of the citations of posts in the batch
const citations = [];
const content = `Batch of ${batchItems.length} items`;
const embeddedData = {
batchItems,
@ -45,11 +45,11 @@ const submitRollup = async () => {
const signature = await wallet.signMessage(contentToVerify);
// Write to the forum database
const { hash: batchPostId } = await write({
sender, authors, references, content, embeddedData, signature,
sender, authors, citations, content, embeddedData, signature,
});
// Add rollup post on-chain
console.log('adding batch post on-chain', { authors, batchPostId, references });
await addPostWithRetry(authors, batchPostId, references);
console.log('adding batch post on-chain', { authors, batchPostId, citations });
await addPostWithRetry(authors, batchPostId, citations);
// Stake our availability to be the next rollup worker
console.log('staking availability to be the next rollup worker');
await stakeRollupAvailability();

View File

@ -1,9 +1,9 @@
const callWithRetry = require('./call-with-retry');
const { dao } = require('./contracts');
const addPostWithRetry = async (authors, hash, references) => {
const addPostWithRetry = async (authors, hash, citations) => {
try {
await callWithRetry(() => dao.addPost(authors, hash, references));
await callWithRetry(() => dao.addPost(authors, hash, citations));
} catch (e) {
if (e.reason === 'A post with this postId already exists') {
return { alreadyAdded: true };

View File

@ -10,7 +10,7 @@ const read = async (hash) => {
data.embeddedData = data.embeddedData || undefined;
const {
sender, authors, content, signature, embeddedData, references,
sender, authors, content, signature, embeddedData, citations,
} = data;
// Verify hash
@ -29,7 +29,7 @@ const read = async (hash) => {
}
return {
sender, authors, content, signature, embeddedData, references,
sender, authors, content, signature, embeddedData, citations,
};
};

View File

@ -2,10 +2,9 @@ const objectHash = require('object-hash');
const verifySignature = require('../verify-signature');
const { forum } = require('../db');
const read = require('./read');
const write = async ({
sender, authors, content, references, embeddedData, signature,
sender, authors, content, citations, embeddedData, signature,
}) => {
// Check author signature
if (!verifySignature({
@ -19,33 +18,15 @@ const write = async ({
// Compute content hash
const data = {
sender, authors, content, signature, embeddedData, references,
sender, authors, content, signature, embeddedData, citations,
};
// We omit references from the hash in order to support forum graph import.
// We omit citations from the hash in order to support forum graph import.
// When a post is imported, the hashes can be precomputed for cited posts,
// without traversing the graph infinitely to compute hashes along entire reference chain.
const hash = objectHash({
sender, authors, content, signature, embeddedData,
});
// Make sure a post with this hash has not already been written
let existingPost;
try {
existingPost = await read(hash);
// If this doesn't throw, it means a post with this hash was already written
} catch (e) {
if (e.status !== 404) {
throw e;
}
}
if (existingPost) {
const err = new Error();
err.status = 403;
err.message = `A post with hash ${hash} already exists`;
throw err;
}
// Store content
await forum.put(hash, data);

View File

@ -29,7 +29,7 @@ contract Onboarding is Work, IOnValidate {
// Make work evidence post
Author[] memory authors = new Author[](1);
authors[0] = Author(1000000, stake.worker);
dao.addPost(authors, request.evidencePostId, request.references);
dao.addPost(authors, request.evidencePostId, request.citations);
emit WorkApprovalSubmitted(requestIndex, approval);
// Initiate validation pool
uint poolIndex = dao.initiateValidationPool{
@ -73,10 +73,10 @@ contract Onboarding is Work, IOnValidate {
return;
}
// Make onboarding post
Reference[] memory emptyReferences;
Citation[] memory emptyCitations;
Author[] memory authors = new Author[](1);
authors[0] = Author(1000000, request.customer);
dao.addPost(authors, request.requestPostId, emptyReferences);
dao.addPost(authors, request.requestPostId, emptyCitations);
dao.initiateValidationPool{value: request.fee / 10}(
request.requestPostId,
POOL_DURATION,

View File

@ -34,7 +34,7 @@ abstract contract RollableWork is Work {
// Make work evidence post
Author[] memory authors = new Author[](1);
authors[0] = Author(1000000, stake.worker);
dao.addPost(authors, request.evidencePostId, request.references);
dao.addPost(authors, request.evidencePostId, request.citations);
// send worker stakes and customer fee to rollup contract
dao.forwardAllowance(

View File

@ -22,7 +22,7 @@ abstract contract Work is Availability, IOnProposalAccepted {
uint stakeIndex;
string requestPostId;
string evidencePostId;
Reference[] references;
Citation[] citations;
bool approval;
}
@ -71,7 +71,7 @@ abstract contract Work is Availability, IOnProposalAccepted {
function submitWorkEvidence(
uint requestIndex,
string calldata evidencePostId,
Reference[] calldata references
Citation[] calldata citations
) external {
WorkRequest storage request = requests[requestIndex];
require(
@ -85,8 +85,8 @@ abstract contract Work is Availability, IOnProposalAccepted {
);
request.status = WorkStatus.EvidenceSubmitted;
request.evidencePostId = evidencePostId;
for (uint i = 0; i < references.length; i++) {
request.references.push(references[i]);
for (uint i = 0; i < citations.length; i++) {
request.citations.push(citations[i]);
}
emit WorkEvidenceSubmitted(requestIndex);
}
@ -107,7 +107,7 @@ abstract contract Work is Availability, IOnProposalAccepted {
// Make work evidence post
Author[] memory authors = new Author[](1);
authors[0] = Author(1000000, stake.worker);
dao.addPost(authors, request.evidencePostId, request.references);
dao.addPost(authors, request.evidencePostId, request.citations);
emit WorkApprovalSubmitted(requestIndex, approval);
// Initiate validation pool
uint poolIndex = dao.initiateValidationPool{value: request.fee}(

View File

@ -251,9 +251,9 @@ contract DAO {
function addPost(
Author[] calldata authors,
string calldata postId,
Reference[] calldata references
Citation[] calldata citations
) public {
forum.addPost(msg.sender, authors, postId, references);
forum.addPost(msg.sender, authors, postId, citations);
}
function posts(

View File

@ -3,7 +3,7 @@ pragma solidity ^0.8.24;
import "./DAO.sol";
struct Reference {
struct Citation {
int weightPPM;
string targetPostId;
}
@ -17,7 +17,7 @@ struct Post {
string id;
address sender;
Author[] authors;
Reference[] references;
Citation[] citations;
uint reputation;
// TODO: timestamp
}
@ -47,7 +47,7 @@ contract Forum {
address sender,
Author[] calldata authors,
string calldata postId,
Reference[] calldata references
Citation[] calldata citations
) external {
require(
msg.sender == address(dao),
@ -72,31 +72,31 @@ contract Forum {
authorTotalWeightPPM == 1000000,
"Author weights must sum to 1000000"
);
for (uint i = 0; i < references.length; i++) {
post.references.push(references[i]);
for (uint i = 0; i < citations.length; i++) {
post.citations.push(citations[i]);
}
int totalReferenceWeightPos;
int totalReferenceWeightNeg;
for (uint i = 0; i < post.references.length; i++) {
int weight = post.references[i].weightPPM;
int totalCitationWeightPos;
int totalCitationWeightNeg;
for (uint i = 0; i < post.citations.length; i++) {
int weight = post.citations[i].weightPPM;
require(
weight >= -1000000,
"Each reference weight must be >= -1000000"
"Each citation weight must be >= -1000000"
);
require(
weight <= 1000000,
"Each reference weight must be <= 1000000"
"Each citation weight must be <= 1000000"
);
if (weight > 0) totalReferenceWeightPos += weight;
else totalReferenceWeightNeg += weight;
if (weight > 0) totalCitationWeightPos += weight;
else totalCitationWeightNeg += weight;
}
require(
totalReferenceWeightPos <= 1000000,
"Sum of positive references must be <= 1000000"
totalCitationWeightPos <= 1000000,
"Sum of positive citations must be <= 1000000"
);
require(
totalReferenceWeightNeg >= -1000000,
"Sum of negative references must be >= -1000000"
totalCitationWeightNeg >= -1000000,
"Sum of negative citations must be >= -1000000"
);
dao.emitPostAdded(postId);
}
@ -108,15 +108,15 @@ contract Forum {
return post.authors;
}
function _handleReference(
function _handleCitation(
string memory postId,
Reference memory ref,
Citation memory citation,
int amount,
bool initialNegative,
uint depth
) internal returns (int outboundAmount) {
outboundAmount = (amount * ref.weightPPM) / 1000000;
if (bytes(ref.targetPostId).length == 0) {
outboundAmount = (amount * citation.weightPPM) / 1000000;
if (bytes(citation.targetPostId).length == 0) {
// Incineration
require(
outboundAmount >= 0,
@ -125,7 +125,7 @@ contract Forum {
dao.burn(address(dao), uint(outboundAmount));
return outboundAmount;
}
int balanceToOutbound = _edgeBalances[postId][ref.targetPostId];
int balanceToOutbound = _edgeBalances[postId][citation.targetPostId];
if (initialNegative) {
if (outboundAmount < 0) {
outboundAmount = outboundAmount > -balanceToOutbound
@ -138,13 +138,13 @@ contract Forum {
}
}
int refund = propagateReputation(
ref.targetPostId,
citation.targetPostId,
outboundAmount,
initialNegative || (depth == 0 && ref.weightPPM < 0),
initialNegative || (depth == 0 && citation.weightPPM < 0),
depth + 1
);
outboundAmount -= refund;
_edgeBalances[postId][ref.targetPostId] += outboundAmount;
_edgeBalances[postId][citation.targetPostId] += outboundAmount;
}
function _distributeAmongAuthors(
@ -200,28 +200,28 @@ contract Forum {
}
Post storage post = posts[postId];
if (post.authors.length == 0) {
// We most likely got here via a reference to a post that hasn't been added yet.
// We support this scenario so that a reference graph can be imported one post at a time.
// We most likely got here via a citation to a post that hasn't been added yet.
// We support this scenario so that a citation graph can be imported one post at a time.
return amount;
}
// Propagate negative references first
for (uint i = 0; i < post.references.length; i++) {
if (post.references[i].weightPPM < 0) {
amount -= _handleReference(
// Propagate negative citations first
for (uint i = 0; i < post.citations.length; i++) {
if (post.citations[i].weightPPM < 0) {
amount -= _handleCitation(
postId,
post.references[i],
post.citations[i],
amount,
initialNegative,
depth
);
}
}
// Now propagate positive references
for (uint i = 0; i < post.references.length; i++) {
if (post.references[i].weightPPM > 0) {
amount -= _handleReference(
// Now propagate positive citations
for (uint i = 0; i < post.citations.length; i++) {
if (post.citations[i].weightPPM > 0) {
amount -= _handleCitation(
postId,
post.references[i],
post.citations[i],
amount,
initialNegative,
depth

View File

@ -39,10 +39,10 @@ describe('Forum', () => {
{ value: fee ?? POOL_FEE },
);
const addPost = (author, postId, references) => dao.addPost([{
const addPost = (author, postId, citations) => dao.addPost([{
weightPPM: 1000000,
authorAddress: author,
}], postId, references);
}], postId, citations);
describe('Post', () => {
beforeEach(async () => {
@ -113,7 +113,7 @@ describe('Forum', () => {
], postId, [])).to.be.rejectedWith('Author weights must sum to 1000000');
});
it('should be able to donate reputation via references', async () => {
it('should be able to donate reputation via citations', async () => {
await addPost(account1, 'content-id', []);
await addPost(account2, 'second-content-id', [{ weightPPM: 500000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'second-content-id' });
@ -124,7 +124,7 @@ describe('Forum', () => {
expect(await dao.balanceOf(account2)).to.equal(50);
});
it('should be able to leach reputation via references', async () => {
it('should be able to leach reputation via citations', async () => {
await addPost(account1, 'content-id', []);
expect((await dao.posts('content-id')).reputation).to.equal(0);
await initiateValidationPool({ postId: 'content-id' });
@ -144,7 +144,7 @@ describe('Forum', () => {
expect((await dao.posts('second-content-id')).reputation).to.equal(150);
});
it('should be able to redistribute power via references', async () => {
it('should be able to redistribute power via citations', async () => {
await addPost(account1, 'content-id', []);
await initiateValidationPool({ postId: 'content-id' });
await dao.evaluateOutcome(0);
@ -165,7 +165,7 @@ describe('Forum', () => {
expect(await dao.balanceOf(account3)).to.equal(0);
});
it('should be able to reverse a negative reference with a negative reference', async () => {
it('should be able to reverse a negative citation with a negative citation', async () => {
await addPost(account1, 'content-id', []);
await initiateValidationPool({ postId: 'content-id' });
await dao.evaluateOutcome(0);
@ -294,7 +294,7 @@ describe('Forum', () => {
expect(await dao.totalSupply()).to.equal(50);
});
describe('negative reference of a post, the author having already staked and lost reputation', async () => {
describe('negative citation of a post, the author having already staked and lost reputation', async () => {
beforeEach(async () => {
await addPost(account1, 'content-id', []);
await initiateValidationPool({ postId: 'content-id' });
@ -372,7 +372,7 @@ describe('Forum', () => {
});
});
describe('negative reference of a post with multiple authors', async () => {
describe('negative citation of a post with multiple authors', async () => {
beforeEach(async () => {
await dao.addPost([
{ weightPPM: 500000, authorAddress: account1 },

View File

@ -8,7 +8,7 @@ function ViewPostModal({
}) {
const handleClose = () => setShow(false);
const {
content, authors, embeddedData, references,
content, authors, embeddedData, citations,
} = post;
const embeddedDataJson = JSON.stringify(embeddedData, null, 2);
@ -46,12 +46,12 @@ function ViewPostModal({
{embeddedDataJson}
</pre>
)}
{references && references.length > 0 && (
{citations && citations.length > 0 && (
<>
<hr />
<h5>References</h5>
<h5>Citations</h5>
<Stack>
{references.map(({ weightPPM, targetPostId }) => (
{citations.map(({ weightPPM, targetPostId }) => (
<div key={targetPostId}>
{targetPostId}
{' '}

View File

@ -8,7 +8,7 @@ window.Buffer = Buffer;
class Post {
constructor({
sender, authors, content, signature, hash, embeddedData, references,
sender, authors, content, signature, hash, embeddedData, citations,
}) {
this.sender = sender;
this.authors = authors;
@ -16,14 +16,14 @@ class Post {
this.signature = signature;
this.hash = hash;
this.embeddedData = embeddedData ?? {};
this.references = references ?? [];
this.citations = citations ?? [];
}
// Read from API
static async read(hash) {
const {
data: {
sender, authors, content, signature, embeddedData, references,
sender, authors, content, signature, embeddedData, citations,
},
} = await axios.get(`/api/read/${hash}`);
// Verify hash
@ -46,7 +46,7 @@ class Post {
}
}
return new Post({
sender, authors, content, signature, hash, embeddedData, references,
sender, authors, content, signature, hash, embeddedData, citations,
});
}
@ -73,7 +73,7 @@ class Post {
content: this.content,
signature: this.signature,
embeddedData: this.embeddedData,
references: this.references,
citations: this.citations,
};
const { data: hash } = await axios.post('/api/write', data);
this.hash = hash;
@ -81,7 +81,7 @@ class Post {
// Upload hash to blockchain
async publish(DAO, account) {
await DAO.methods.addPost(this.authors, this.hash, this.references ?? []).send({
await DAO.methods.addPost(this.authors, this.hash, this.citations ?? []).send({
from: account,
gas: 1000000,
});

View File

@ -10,7 +10,7 @@ Validation Pools mint and award Reputation (REP). REP can be used for staking fo
#### Reputation
1. The Reputation contract must be able to associate a non-negative numeric value with any given wallet address.
1. The Reputation contract must be able to associate a non-negative numeric value with a given wallet address.
1. The value associated with a given wallet address may be modified only by the results of a Validation Pool (explained below). The Validation Pool must be able to execute the following operations

View File

@ -156,44 +156,18 @@ To achieve the Rollup requirements, the contract must do the following:
1. If this method is called to replace a batch worker who has failed to submit the next batch, a Validation Pool should be initiated and the batch worker's stakes submitted in favor of the VP. The DAO members may then stake against this VP, punishing the worker who failed to submit the batch.
## Off-chain Operations
## Backend
As outlined in the Rollup section above, we need to define processes for handling off-chain Posts and Validation Pools. On-chain, Posts are represented by a unique identifier, but the Post content is always stored off-chain. So, every on-chain Post must have a corresponding off-chain Post. These off-chain posts should be visible to the public. To achieve this, we introduce a Forum API, that supports writing and reading off-chain Posts.
### API
### Forum API
- Read
- Write
#### Write
Parameters
| Name | Type |
| --- | --- |
| `sender` | Wallet address |
| `authors` | Array of tuples: (Wallet address, weight) |
| `content` | String |
| `references` | Array of tuples: (Post ID, weight) |
| `embeddedData` | Object |
| `signature` | Sender or author signature of `content` and `embeddedData` |
In order to protect the integrity of the off-chain Forum, the API should verify that the Post is signed by one of its authors, or by the sender. The reason for allowing the Post to be signed by the sender rather than by an author, is to support the use case of adding a Post on behalf of its author(s).
The API should compute a hash of all input parameters except for `references`, and use this hash as the key for storing the Post. The hash should also be returned to the caller.
The reason for excluding `references` from the hash, is to support the use case of importing Posts from an existing data source. If we included the references, then to import any Posts from an existing data source, we would have to import every referenced post, starting with the earliest, in order to compute the entire tree of references made by a given Post. By omitting references from the hash, it becomes possible to precompute the hash (a.k.a. ID) of referenced Posts that have not yet been imported.
The reason for excluding `references` from the signature, is to reduce the number of queries that must be made to an existing data source when importing a Post.
Note that because `references` is not included in the hash, there is a replay attack vulnerability. Someone could read an existing Post, modify the `references`, and write the modified Post back to the API. The signatures will still be valid even though the references have changed, and the new references will overwrite the previous references. Note that this would only affect the off-chain record of the Post's references. If the Post is published to the on-chain Forum, it is not subject to such modification, as a Post with a given ID can only be added once. To mitigate this vulnerabliity in the off-chain Forum, we should reject a write attempt if a Post with the given ID already exists.
When
#### Read
## Automated Staking
### Automated Staking
### Validation Pool
#### Validation Pool
### Rollup
#### Rollup
## User Interface

View File

@ -14,7 +14,7 @@
| Customer | A user with a Blockchain Wallet. Pays to engage a work contract. |
| Staking Client | An application that listens for validation pools, performs validation and stakes for/against the pool. |
| Reputation (REP) | A non-transferable ERC20 token representing the results of validation pools within a given expertise. |
| Forum Post | A signed contribution with weighted references |
| Forum Post | A signed contribution with weighted citations |
| Validation Pool (VP) | Takes CSPR/ETH and author stakes as input. Requires REP-weighted consensus. Mints and distributes REP; distributes CSPR/ETH. |
| Work Evidence (WEV) | A forum post representing work that fulfills some work request(s) |
| Work Smart Contract (WSC) | A contract that takes REP stakes from workers and CSPR/ETH from customers, and puts them toward a validation pool (VP) targeting work evidence (WEV). |