From 3023408e04cd67d47cb6faebf6e7e6b18487855d Mon Sep 17 00:00:00 2001 From: Ladd Hoffman Date: Mon, 10 Jun 2024 12:31:48 -0500 Subject: [PATCH] compute references for batch post --- .../rollup/batch/compute-author-weights.js | 35 ----- .../batch/compute-author-weights.test.js | 33 ----- .../rollup/batch/compute-batch-post.js | 59 ++++++++ .../rollup/batch/compute-batch-post.test.js | 128 ++++++++++++++++++ .../rollup/batch/submit-rollup.js | 10 +- backend/src/event-handlers/rollup/index.js | 22 +-- backend/src/event-handlers/rollup/utils.js | 9 ++ specification/docs/system-design.md | 2 +- 8 files changed, 215 insertions(+), 83 deletions(-) delete mode 100644 backend/src/event-handlers/rollup/batch/compute-author-weights.js delete mode 100644 backend/src/event-handlers/rollup/batch/compute-author-weights.test.js create mode 100644 backend/src/event-handlers/rollup/batch/compute-batch-post.js create mode 100644 backend/src/event-handlers/rollup/batch/compute-batch-post.test.js diff --git a/backend/src/event-handlers/rollup/batch/compute-author-weights.js b/backend/src/event-handlers/rollup/batch/compute-author-weights.js deleted file mode 100644 index bf39d06..0000000 --- a/backend/src/event-handlers/rollup/batch/compute-author-weights.js +++ /dev/null @@ -1,35 +0,0 @@ -const Promise = require('bluebird'); -const read = require('../../../util/forum/read'); -const { matrixPools } = require('../../../util/db'); - -const computeAuthorWeights = async (batchItems_) => { - const weights = {}; - await Promise.each(batchItems_, async (postId) => { - const post = await read(postId); - const matrixPool = await matrixPools.get(postId); - const { fee, result: { votePasses, quorumMet } } = matrixPool; - post.authors.forEach(({ authorAddress, weightPPM }) => { - if (votePasses && quorumMet) { - weights[authorAddress] = weights[authorAddress] ?? 0; - // scale by matrix pool outcome and strength - weights[authorAddress] += weightPPM * fee; - } - // TODO: Rewards for policing - // TODO: Propagation via references - }); - }); - // Rescale author weights so they sum to 1000000 - const sumOfWeights = Object.values(weights).reduce((t, v) => t + v, 0); - if (!sumOfWeights) { - return []; - } - const scaledWeights = Object.values(weights) - .map((weight) => Math.floor((weight * 1000000) / sumOfWeights)); - const sumOfScaledWeights = scaledWeights.reduce((t, v) => t + v, 0); - scaledWeights[0] += 1000000 - sumOfScaledWeights; - const authors = Object.keys(weights) - .map((authorAddress, i) => ({ authorAddress, weightPPM: scaledWeights[i] })); - return authors; -}; - -module.exports = computeAuthorWeights; diff --git a/backend/src/event-handlers/rollup/batch/compute-author-weights.test.js b/backend/src/event-handlers/rollup/batch/compute-author-weights.test.js deleted file mode 100644 index 84df940..0000000 --- a/backend/src/event-handlers/rollup/batch/compute-author-weights.test.js +++ /dev/null @@ -1,33 +0,0 @@ -// const { expect } = require('chai'); -const assert = require('assert'); -const proxyquire = require('proxyquire'); - -let posts = {}; -let pools = {}; -const read = (postId) => posts[postId]; -const matrixPools = { - get: (postId) => pools[postId], -}; - -const computeAuthorWeights = proxyquire('./compute-author-weights', { - '../../util/forum/read': read, - '../../util/db': { matrixPools }, -}); - -describe('computeAuthorWeights', () => { - it('computes authorship for multiple posts by one author', async () => { - posts = { - a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, - b: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, - c: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, - }; - pools = { - a: { fee: 100, result: { votePasses: true, quorumMet: true } }, - b: { fee: 100, result: { votePasses: true, quorumMet: true } }, - c: { fee: 100, result: { votePasses: true, quorumMet: true } }, - }; - - const authors = await computeAuthorWeights(['a', 'b', 'c']); - assert.deepEqual(authors, [{ authorAddress: '0xa1', weightPPM: 1000000 }]); - }); -}); diff --git a/backend/src/event-handlers/rollup/batch/compute-batch-post.js b/backend/src/event-handlers/rollup/batch/compute-batch-post.js new file mode 100644 index 0000000..3b0a93b --- /dev/null +++ b/backend/src/event-handlers/rollup/batch/compute-batch-post.js @@ -0,0 +1,59 @@ +const Promise = require('bluebird'); +const read = require('../../../util/forum/read'); +const { matrixPools } = require('../../../util/db'); + +const WEIGHT_TO_REFERENCES = 300000; + +const computeBatchPost = async (batchItems_) => { + const weights = {}; + let references = []; + await Promise.each(batchItems_, async (postId) => { + const post = await read(postId); + const matrixPool = await matrixPools.get(postId); + const { fee, result: { votePasses, quorumMet } } = matrixPool; + if (votePasses && quorumMet) { + post.authors.forEach(({ authorAddress, weightPPM }) => { + weights[authorAddress] = weights[authorAddress] ?? 0; + // scale by matrix pool fee + weights[authorAddress] += weightPPM * fee; + }); + post.references?.forEach(({ targetPostId, weightPPM }) => { + // scale by matrix pool fee + references.push({ + targetPostId, + weightPPM: weightPPM * fee, + }); + }); + } + // TODO: Rewards for policing + }); + + // Rescale author weights so they sum to 1000000 + const sumOfWeights = Object.values(weights).reduce((t, v) => t + v, 0); + if (!sumOfWeights) { + return []; + } + const scaledWeights = Object.values(weights) + .map((weight) => Math.floor((weight * 1000000) / sumOfWeights)); + const sumOfScaledWeights = scaledWeights.reduce((t, v) => t + v, 0); + scaledWeights[0] += 1000000 - sumOfScaledWeights; + const authors = Object.keys(weights) + .map((authorAddress, i) => ({ authorAddress, weightPPM: scaledWeights[i] })); + + // Rescale reference weights so they sum to WEIGHT_TO_REFERENCES + if (references.length) { + const sumOfReferenceWeights = references.reduce((t, { weightPPM }) => t + weightPPM, 0); + const scaledReferences = references.map((reference) => ({ + targetPostId: reference.targetPostId, + weightPPM: Math.floor((reference.weightPPM * WEIGHT_TO_REFERENCES) / sumOfReferenceWeights), + })); + const sumOfScaledReferenceWeights = scaledReferences + .reduce((t, { weightPPM }) => t + weightPPM, 0); + scaledReferences[0].weightPPM += WEIGHT_TO_REFERENCES - sumOfScaledReferenceWeights; + references = scaledReferences; + } + + return { authors, references }; +}; + +module.exports = computeBatchPost; diff --git a/backend/src/event-handlers/rollup/batch/compute-batch-post.test.js b/backend/src/event-handlers/rollup/batch/compute-batch-post.test.js new file mode 100644 index 0000000..6d20f2c --- /dev/null +++ b/backend/src/event-handlers/rollup/batch/compute-batch-post.test.js @@ -0,0 +1,128 @@ +// const { expect } = require('chai'); +const assert = require('assert'); +const proxyquire = require('proxyquire'); + +let posts = {}; +let pools = {}; +const read = (postId) => posts[postId]; +const matrixPools = { + get: (postId) => pools[postId], +}; + +const computeBatchPost = proxyquire('./compute-batch-post', { + '../../../util/forum/read': read, + '../../../util/db': { matrixPools }, +}); + +describe('computeBatchPost', () => { + it('multiple posts by one author', async () => { + posts = { + a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, + b: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, + c: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, + }; + pools = { + a: { fee: 100, result: { votePasses: true, quorumMet: true } }, + b: { fee: 100, result: { votePasses: true, quorumMet: true } }, + c: { fee: 100, result: { votePasses: true, quorumMet: true } }, + }; + + const { authors, references } = await computeBatchPost(['a', 'b', 'c']); + assert.deepEqual(authors, [{ authorAddress: '0xa1', weightPPM: 1000000 }]); + assert.deepEqual(references, []); + }); + + it('posts by different authors', async () => { + posts = { + a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, + b: { authors: [{ authorAddress: '0xa2', weightPPM: 1000000 }] }, + }; + pools = { + a: { fee: 100, result: { votePasses: true, quorumMet: true } }, + b: { fee: 100, result: { votePasses: true, quorumMet: true } }, + }; + + const { authors, references } = await computeBatchPost(['a', 'b']); + assert.deepEqual(authors, [ + { authorAddress: '0xa1', weightPPM: 500000 }, + { authorAddress: '0xa2', weightPPM: 500000 }, + ]); + assert.deepEqual(references, []); + }); + + it('posts by different authors and pools with different fees', async () => { + posts = { + a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, + b: { authors: [{ authorAddress: '0xa2', weightPPM: 1000000 }] }, + }; + pools = { + a: { fee: 100, result: { votePasses: true, quorumMet: true } }, + b: { fee: 200, result: { votePasses: true, quorumMet: true } }, + }; + + const { authors, references } = await computeBatchPost(['a', 'b']); + assert.deepEqual(authors, [ + { authorAddress: '0xa1', weightPPM: 333334 }, + { authorAddress: '0xa2', weightPPM: 666666 }, + ]); + assert.deepEqual(references, []); + }); + + it('posts with multiple authors', async () => { + posts = { + a: { authors: [{ authorAddress: '0xa1', weightPPM: 500000 }, { authorAddress: '0xa2', weightPPM: 500000 }] }, + b: { authors: [{ authorAddress: '0xa1', weightPPM: 500000 }, { authorAddress: '0xa3', weightPPM: 500000 }] }, + }; + pools = { + a: { fee: 100, result: { votePasses: true, quorumMet: true } }, + b: { fee: 100, result: { votePasses: true, quorumMet: true } }, + }; + + const { authors, references } = await computeBatchPost(['a', 'b']); + assert.deepEqual(authors, [ + { authorAddress: '0xa1', weightPPM: 500000 }, + { authorAddress: '0xa2', weightPPM: 250000 }, + { authorAddress: '0xa3', weightPPM: 250000 }, + ]); + assert.deepEqual(references, []); + }); + + it('post with references', async () => { + posts = { + a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, + b: { authors: [{ authorAddress: '0xa2', weightPPM: 1000000 }], references: [{ targetPostId: 'a', weightPPM: 500000 }] }, + }; + pools = { + b: { fee: 100, result: { votePasses: true, quorumMet: true } }, + }; + + const { authors, references } = await computeBatchPost(['b']); + assert.deepEqual(authors, [ + { authorAddress: '0xa2', weightPPM: 1000000 }, + ]); + assert.deepEqual(references, [{ targetPostId: 'a', weightPPM: 300000 }]); + }); + + it('post with references and pools with different fees', async () => { + posts = { + a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] }, + b: { authors: [{ authorAddress: '0xa2', weightPPM: 1000000 }] }, + c: { authors: [{ authorAddress: '0xa3', weightPPM: 1000000 }], references: [{ targetPostId: 'a', weightPPM: 500000 }] }, + d: { authors: [{ authorAddress: '0xa4', weightPPM: 1000000 }], references: [{ targetPostId: 'b', weightPPM: 500000 }] }, + }; + pools = { + c: { fee: 100, result: { votePasses: true, quorumMet: true } }, + d: { fee: 200, result: { votePasses: true, quorumMet: true } }, + }; + + const { authors, references } = await computeBatchPost(['c', 'd']); + assert.deepEqual(authors, [ + { authorAddress: '0xa3', weightPPM: 333334 }, + { authorAddress: '0xa4', weightPPM: 666666 }, + ]); + assert.deepEqual(references, [ + { targetPostId: 'a', weightPPM: 100000 }, + { targetPostId: 'b', weightPPM: 200000 }, + ]); + }); +}); diff --git a/backend/src/event-handlers/rollup/batch/submit-rollup.js b/backend/src/event-handlers/rollup/batch/submit-rollup.js index cbcf9ea..8a39686 100644 --- a/backend/src/event-handlers/rollup/batch/submit-rollup.js +++ b/backend/src/event-handlers/rollup/batch/submit-rollup.js @@ -4,7 +4,7 @@ const write = require('../../../util/forum/write'); const addPostWithRetry = require('../../../util/add-post-with-retry'); const callWithRetry = require('../../../util/call-with-retry'); const { getBatchItems, clearBatchItems } = require('./batch-items'); -const computeAuthorWeights = require('./compute-author-weights'); +const computeBatchPost = require('./compute-batch-post'); const { wallet, rollup } = require('../../../util/contracts'); const { sendMatrixEvent } = require('../../../matrix-bot'); const { stakeRollupAvailability } = require('../utils'); @@ -29,12 +29,10 @@ const submitRollup = async () => { if (!batchItems.length) { return { batchItems: [] }; } - const authors = await computeAuthorWeights(batchItems); + const { authors, references } = await computeBatchPost(batchItems); if (!authors.length) { return { batchItems: [] }; } - // TODO: Compute references as aggregate of the references of posts in the batch - const references = []; const content = `Batch of ${batchItems.length} items`; const embeddedData = { batchItems, @@ -58,7 +56,9 @@ const submitRollup = async () => { const poolDuration = 60; await callWithRetry(() => rollup.submitBatch(batchPostId, batchItems, poolDuration)); // Send matrix event - await sendMatrixEvent('io.dgov.rollup.submit', { batchPostId, batchItems, authors }); + await sendMatrixEvent('io.dgov.rollup.submit', { + batchPostId, batchItems, authors, references, + }); // Clear the batch in preparation for next batch await clearBatchItems(batchItems); return { diff --git a/backend/src/event-handlers/rollup/index.js b/backend/src/event-handlers/rollup/index.js index 0c967ec..ca1dd0d 100644 --- a/backend/src/event-handlers/rollup/index.js +++ b/backend/src/event-handlers/rollup/index.js @@ -10,13 +10,14 @@ const read = require('../../util/forum/read'); const { availabilityStakeDuration } = require('./config'); const { stakeRollupAvailability, authorsMatch, validatePost, + referencesMatch, } = require('./utils'); const computeMatrixPoolResult = require('./matrix-pools/compute-result'); const { initializeBatchItems, addBatchItem, clearBatchItems } = require('./batch/batch-items'); const { getCurrentBatchWorker, initializeBatchWorker } = require('./batch/batch-worker'); const initiateMatrixPool = require('./matrix-pools/initiate'); const { initiateMatrixPools } = require('./matrix-pools/initiate-matrix-pools'); -const computeAuthorWeights = require('./batch/compute-author-weights'); +const computeBatchPost = require('./batch/compute-batch-post'); const start = async () => { console.log('registering validation pool decider for rollup'); @@ -30,16 +31,16 @@ const start = async () => { if (!post.embeddedData?.batchItems) return false; // Our task here is to check whether the posted result agrees with our own computations - let expectedAuthors; try { - expectedAuthors = await computeAuthorWeights(post.embeddedData.batchItems); + const { authors, references } = await computeBatchPost(post.embeddedData.batchItems); + const valid = authorsMatch(post.authors, authors) + && referencesMatch(post.references, references); + console.log(`batch post ${pool.props.postId} is ${valid ? 'valid' : 'invalid'}`); + return valid; } catch (e) { console.error('Error calculating batch post author weights', e); return null; } - const valid = authorsMatch(post.authors, expectedAuthors); - console.log(`batch post ${pool.props.postId} is ${valid ? 'valid' : 'invalid'}`); - return valid; }); // Even if we're not the current batch worker, keep track of batch items @@ -165,10 +166,13 @@ const start = async () => { } case 'io.dgov.rollup.submit': { // This should include the identifier of the on-chain validation pool - const { batchPostId, batchItems, authors } = event.content; + const { + batchPostId, batchItems, authors, references, + } = event.content; // Compare batch worker's result with ours to verify - const expectedAuthors = await computeAuthorWeights(batchItems); - if (!authorsMatch(authors, expectedAuthors)) { + const { expectedAuthors, expectedReferences } = await computeBatchPost(batchItems); + if (!authorsMatch(authors, expectedAuthors) + || !referencesMatch(references, expectedReferences)) { sendMatrixText(`Unexpected result for batch post ${batchPostId}`); } // Reset batchItems in preparation for next batch diff --git a/backend/src/event-handlers/rollup/utils.js b/backend/src/event-handlers/rollup/utils.js index 8a0509d..de459fe 100644 --- a/backend/src/event-handlers/rollup/utils.js +++ b/backend/src/event-handlers/rollup/utils.js @@ -24,6 +24,14 @@ const authorsMatch = async (authors, expectedAuthors) => { }); }; +const referencesMatch = async (references, expectedReferences) => { + if (expectedReferences.length !== references.length) return false; + return references.every(({ targetPostId, weightPPM }) => { + const expectedReference = expectedReferences.find((x) => x.targetPostId === targetPostId); + return weightPPM === expectedReference.weightPPM; + }); +}; + const validateWorkEvidence = async (sender, post) => { let valid = false; if (sender === work2.target) { @@ -44,6 +52,7 @@ const validatePost = async (sender, post) => { module.exports = { stakeRollupAvailability, authorsMatch, + referencesMatch, validateWorkEvidence, validatePost, }; diff --git a/specification/docs/system-design.md b/specification/docs/system-design.md index a6dca79..c0437ee 100644 --- a/specification/docs/system-design.md +++ b/specification/docs/system-design.md @@ -266,7 +266,7 @@ Rather than submit every Post on-chain and conduct every Validation Pool on-chai With this Rollup Post, we have the opportunity to attribute credit to multiple authors, with a weight assigned to each author. -The Rollup Post can weight authorship in accordance with the off-chain Validation Pools that have taken place. The off-chain system can fully model the Forum and Bench outlined in the [Requirements](./requirements.md) section. For demonstration purposes, our prototype makes some simplifying assumptions. Work Evidence Posts (WEV) are assumed to contain no references to prior Posts. In reality, we want WEV to be able to reference prior Posts, such as those representing policies of the DAO, prior work by other DAO members, prior art outside the DAO, and so on. So, a proper implementation of this system should account for these references. +The Rollup Post should weight authorship in accordance with the off-chain Validation Pools that have taken place. To achieve the Rollup requirements, the contract must do the following: