use PPM rather than percent for weights
Gitea Actions Demo / Explore-Gitea-Actions (push) Failing after 34s Details

This commit is contained in:
Ladd Hoffman 2024-04-21 10:28:44 -05:00
parent 33241143ff
commit e16bb4968f
26 changed files with 147 additions and 139 deletions

View File

@ -1,14 +1,14 @@
{
"localhost": {
"DAO": "0xE799A82AEB525D35c671eAEb6AE6858E02a1EB24",
"Work1": "0x358A07B26F4c556140872ecdB69c58e8807E7178",
"Onboarding": "0x63472674239ffb70618Fae043610917f2d9B781C",
"Proposals": "0x38AE4ABD47B10f6660CD70Cc8FF3401341E13d9e"
"DAO": "0x4C767e62c92b58B2308E02ba5Cc4A3BD246060ac",
"Work1": "0x39B7522Ee1A5B13aE5580C40114239D4cE0e7D29",
"Onboarding": "0xC0Bb36820Ba891DE4ed6D60f75066805361dbeB8",
"Proposals": "0x268A0A6bB80282542e0Be0864Cfa1c2206c5491F"
},
"sepolia": {
"DAO": "0x8e9D6cb0097CE2E283Cdd5762F288d2D394eB713",
"Work1": "0xFC95d5d09e71f0C83F59765107eAa79125b01624",
"Onboarding": "0x32f5A03687406A5c66878349A7de667d0b5C2922",
"Proposals": "0x8273062d41a828ac67ae5837547d2Dd19f8CDc27"
"DAO": "0x8e5bd58B2ca8910C5F9be8de847d6883B15c60d2",
"Work1": "0x1708A144F284C1a9615C25b674E4a08992CE93e4",
"Onboarding": "0xb21D4c986715A1adb5e87F752842613648C20a7B",
"Proposals": "0x930c47293F206780E8F166338bDaFF3520306032"
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -11,6 +11,9 @@ const { getContractAddressByNetworkName } = require('./contract-config');
const { authorAddresses, authorPrivKeys, forum } = require('./db');
const DAOArtifact = require('../contractArtifacts/DAO.json');
// Each post allocates 30% of its reputation to citations
const PPM_TO_CITATIONS = 300000;
const network = process.env.ETH_NETWORK;
console.log('network:', network);
const getProvider = () => {
@ -98,16 +101,15 @@ const generatePost = async (paper) => {
if (!authorsInfo.length) {
throw new Error('Paper has no authors with id');
}
const PERCENT_TO_CITATIONS = 30;
const firstAuthorWallet = new ethers.Wallet(authorsInfo[0].authorPrivKey);
const eachAuthorWeightPercent = Math.floor(PERCENT_TO_CITATIONS / authorsInfo.length);
const eachAuthorWeightPercent = Math.floor(1000000 / authorsInfo.length);
const authors = authorsInfo.map(({ authorAddress }) => ({
weightPercent: eachAuthorWeightPercent,
weightPPM: eachAuthorWeightPercent,
authorAddress,
}));
// Make sure author weights sum to 100
const totalAuthorsWeight = authors.reduce((t, { weightPercent }) => t + weightPercent, 0);
authors[0].weightPercent += PERCENT_TO_CITATIONS - totalAuthorsWeight;
// Make sure author weights sum to 100%
const totalAuthorsWeight = authors.reduce((t, { weightPPM }) => t + weightPPM, 0);
authors[0].weightPPM += 1000000 - totalAuthorsWeight;
const content = `Semantic Scholar paper ${paper.paperId}
${paper.title}
@ -150,7 +152,7 @@ module.exports = async (req, res) => {
console.log('references count:', paper.references.length);
const eachCitationWeightPercent = Math.floor(30 / paper.references.length);
const eachCitationWeightPercent = Math.floor(PPM_TO_CITATIONS / paper.references.length);
const citations = await Promise.mapSeries(
paper.references.filter((x) => !!x.paperId),
async (citedPaper) => {
@ -159,15 +161,15 @@ module.exports = async (req, res) => {
// The forum allows dangling citations to support this use case.
const citedPost = await generatePost(citedPaper);
return {
weightPercent: eachCitationWeightPercent,
weightPPM: eachCitationWeightPercent,
targetPostId: citedPost.hash,
};
},
);
// Make sure citation weights sum to 100
const totalCitationWeight = citations.reduce((t, { weightPercent }) => t + weightPercent, 0);
citations[0].weightPercent += 100 - totalCitationWeight;
// Make sure citation weights sum to the designated total
const totalCitationWeight = citations.reduce((t, { weightPPM }) => t + weightPPM, 0);
citations[0].weightPPM += PPM_TO_CITATIONS - totalCitationWeight;
// Create a post for this paper
const {

View File

@ -1,14 +1,14 @@
{
"localhost": {
"DAO": "0xE799A82AEB525D35c671eAEb6AE6858E02a1EB24",
"Work1": "0x358A07B26F4c556140872ecdB69c58e8807E7178",
"Onboarding": "0x63472674239ffb70618Fae043610917f2d9B781C",
"Proposals": "0x38AE4ABD47B10f6660CD70Cc8FF3401341E13d9e"
"DAO": "0x4C767e62c92b58B2308E02ba5Cc4A3BD246060ac",
"Work1": "0x39B7522Ee1A5B13aE5580C40114239D4cE0e7D29",
"Onboarding": "0xC0Bb36820Ba891DE4ed6D60f75066805361dbeB8",
"Proposals": "0x268A0A6bB80282542e0Be0864Cfa1c2206c5491F"
},
"sepolia": {
"DAO": "0x8e9D6cb0097CE2E283Cdd5762F288d2D394eB713",
"Work1": "0xFC95d5d09e71f0C83F59765107eAa79125b01624",
"Onboarding": "0x32f5A03687406A5c66878349A7de667d0b5C2922",
"Proposals": "0x8273062d41a828ac67ae5837547d2Dd19f8CDc27"
"DAO": "0x8e5bd58B2ca8910C5F9be8de847d6883B15c60d2",
"Work1": "0x1708A144F284C1a9615C25b674E4a08992CE93e4",
"Onboarding": "0xb21D4c986715A1adb5e87F752842613648C20a7B",
"Proposals": "0x930c47293F206780E8F166338bDaFF3520306032"
}
}

View File

@ -28,7 +28,7 @@ contract Onboarding is WorkContract, IOnValidate {
request.approval = approval;
// Make work evidence post
Author[] memory authors = new Author[](1);
authors[0] = Author(100, stake.worker);
authors[0] = Author(1000000, stake.worker);
dao.addPost(authors, request.evidenceContentId, request.citations);
emit WorkApprovalSubmitted(requestIndex, approval);
// Initiate validation pool
@ -75,7 +75,7 @@ contract Onboarding is WorkContract, IOnValidate {
// Make onboarding post
Citation[] memory emptyCitations;
Author[] memory authors = new Author[](1);
authors[0] = Author(100, request.customer);
authors[0] = Author(1000000, request.customer);
dao.addPost(authors, request.requestContentId, emptyCitations);
dao.initiateValidationPool{value: request.fee / 10}(
request.requestContentId,

View File

@ -74,7 +74,7 @@ contract Proposals is DAOContract, IOnValidate {
// TODO: Take citations as a parameter
Citation[] memory emptyCitations;
Author[] memory authors = new Author[](1);
authors[0] = Author(100, author);
authors[0] = Author(1000000, author);
dao.addPost(authors, contentId, emptyCitations);
proposalIndex = proposalCount++;
Proposal storage proposal = proposals[proposalIndex];
@ -217,7 +217,7 @@ contract Proposals is DAOContract, IOnValidate {
} else {
referendum.retryCount += 1;
}
// Handle Referendum 100%
// Handle Referendum 1000000%
} else if (proposal.stage == Stage.Referendum100) {
require(referendumIndex == 2, "Stage 2 index mismatch");
if (votePasses && participationAboveThreshold) {

View File

@ -180,7 +180,7 @@ abstract contract WorkContract is
request.approval = approval;
// Make work evidence post
Author[] memory authors = new Author[](1);
authors[0] = Author(100, stake.worker);
authors[0] = Author(1000000, stake.worker);
dao.addPost(authors, request.evidenceContentId, request.citations);
emit WorkApprovalSubmitted(requestIndex, approval);
// Initiate validation pool

View File

@ -5,12 +5,12 @@ import "./Reputation.sol";
import "hardhat/console.sol";
struct Citation {
int weightPercent;
int weightPPM;
string targetPostId;
}
struct Author {
uint weightPercent;
uint weightPPM;
address authorAddress;
}
@ -52,12 +52,12 @@ contract Forum is Reputation {
post.id = contentId;
uint authorTotalWeightPercent;
for (uint i = 0; i < authors.length; i++) {
authorTotalWeightPercent += authors[i].weightPercent;
authorTotalWeightPercent += authors[i].weightPPM;
post.authors.push(authors[i]);
}
require(
authorTotalWeightPercent == 100,
"Author weights must sum to 100%"
authorTotalWeightPercent == 1000000,
"Author weights must sum to 1000000"
);
for (uint i = 0; i < citations.length; i++) {
post.citations.push(citations[i]);
@ -65,19 +65,25 @@ contract Forum is Reputation {
int totalCitationWeightPos;
int totalCitationWeightNeg;
for (uint i = 0; i < post.citations.length; i++) {
int weight = post.citations[i].weightPercent;
require(weight >= -100, "Each citation weight must be >= -100");
require(weight <= 100, "Each citation weight must be <= 100");
int weight = post.citations[i].weightPPM;
require(
weight >= -1000000,
"Each citation weight must be >= -1000000"
);
require(
weight <= 1000000,
"Each citation weight must be <= 1000000"
);
if (weight > 0) totalCitationWeightPos += weight;
else totalCitationWeightNeg += weight;
}
require(
totalCitationWeightPos <= 100,
"Sum of positive citations must be <= 100"
totalCitationWeightPos <= 1000000,
"Sum of positive citations must be <= 1000000"
);
require(
totalCitationWeightNeg >= -100,
"Sum of negative citations must be >= -100"
totalCitationWeightNeg >= -1000000,
"Sum of negative citations must be >= -1000000"
);
emit PostAdded(contentId);
}
@ -100,7 +106,7 @@ contract Forum is Reputation {
bool initialNegative,
uint depth
) internal returns (int outboundAmount) {
outboundAmount = (amount * citation.weightPercent) / 100;
outboundAmount = (amount * citation.weightPPM) / 1000000;
if (bytes(citation.targetPostId).length == 0) {
// Incineration
require(
@ -125,7 +131,7 @@ contract Forum is Reputation {
int refund = _propagateReputation(
citation.targetPostId,
outboundAmount,
initialNegative || (depth == 0 && citation.weightPercent < 0),
initialNegative || (depth == 0 && citation.weightPPM < 0),
depth + 1
);
outboundAmount -= refund;
@ -149,7 +155,7 @@ contract Forum is Reputation {
Author memory author = post.authors[i];
int share;
if (i < post.authors.length - 1) {
share = (amount * int(author.weightPercent)) / 100;
share = (amount * int(author.weightPPM)) / 1000000;
allocated += share;
} else {
// For the last author, allocate the remainder.
@ -194,7 +200,7 @@ contract Forum is Reputation {
}
// Propagate negative citations first
for (uint i = 0; i < post.citations.length; i++) {
if (post.citations[i].weightPercent < 0) {
if (post.citations[i].weightPPM < 0) {
amount -= _handleCitation(
postId,
post.citations[i],
@ -206,7 +212,7 @@ contract Forum is Reputation {
}
// Now propagate positive citations
for (uint i = 0; i < post.citations.length; i++) {
if (post.citations[i].weightPercent > 0) {
if (post.citations[i].weightPPM > 0) {
amount -= _handleCitation(
postId,
post.citations[i],

View File

@ -197,7 +197,7 @@ contract ValidationPools is Reputation, Forum {
// Value of losing stakes should be distributed among winners, in proportion to their stakes
// Only bindingPercent % should be redistributed
// Stake senders should get (100-bindingPercent) % back
// Stake senders should get (1000000-bindingPercent) % back
uint amountFromWinners = votePasses ? stakedFor : stakedAgainst;
uint totalRewards;
uint totalAllocated;

View File

@ -40,7 +40,7 @@ describe('Forum', () => {
);
const addPost = (author, contentId, citations) => dao.addPost([{
weightPercent: 100,
weightPPM: 1000000,
authorAddress: author,
}], contentId, citations);
@ -59,7 +59,7 @@ describe('Forum', () => {
expect(post.id).to.equal(contentId);
const postAuthors = await dao.getPostAuthors(contentId);
expect(postAuthors).to.have.length(1);
expect(postAuthors[0].weightPercent).to.equal(100);
expect(postAuthors[0].weightPPM).to.equal(1000000);
expect(postAuthors[0].authorAddress).to.equal(account1);
});
@ -71,24 +71,24 @@ describe('Forum', () => {
expect(post.id).to.equal(contentId);
const postAuthors = await dao.getPostAuthors(contentId);
expect(postAuthors).to.have.length(1);
expect(postAuthors[0].weightPercent).to.equal(100);
expect(postAuthors[0].weightPPM).to.equal(1000000);
expect(postAuthors[0].authorAddress).to.equal(account2);
});
it('should be able to add a post with multiple authors', async () => {
const contentId = 'some-id';
await expect(dao.addPost([
{ weightPercent: 50, authorAddress: account1 },
{ weightPercent: 50, authorAddress: account2 },
{ weightPPM: 500000, authorAddress: account1 },
{ weightPPM: 500000, authorAddress: account2 },
], contentId, [])).to.emit(dao, 'PostAdded').withArgs('some-id');
const post = await dao.posts(contentId);
expect(post.sender).to.equal(account1);
expect(post.id).to.equal(contentId);
const postAuthors = await dao.getPostAuthors(contentId);
expect(postAuthors).to.have.length(2);
expect(postAuthors[0].weightPercent).to.equal(50);
expect(postAuthors[0].weightPPM).to.equal(500000);
expect(postAuthors[0].authorAddress).to.equal(account1);
expect(postAuthors[1].weightPercent).to.equal(50);
expect(postAuthors[1].weightPPM).to.equal(500000);
expect(postAuthors[1].authorAddress).to.equal(account2);
await initiateValidationPool({ postId: 'some-id' });
await time.increase(POOL_DURATION + 1);
@ -100,22 +100,22 @@ describe('Forum', () => {
it('should not be able to add a post with total author weight < 100%', async () => {
const contentId = 'some-id';
await expect(dao.addPost([
{ weightPercent: 50, authorAddress: account1 },
{ weightPercent: 40, authorAddress: account2 },
], contentId, [])).to.be.rejectedWith('Author weights must sum to 100%');
{ weightPPM: 500000, authorAddress: account1 },
{ weightPPM: 400000, authorAddress: account2 },
], contentId, [])).to.be.rejectedWith('Author weights must sum to 1000000');
});
it('should not be able to add a post with total author weight > 100%', async () => {
const contentId = 'some-id';
await expect(dao.addPost([
{ weightPercent: 50, authorAddress: account1 },
{ weightPercent: 60, authorAddress: account2 },
], contentId, [])).to.be.rejectedWith('Author weights must sum to 100%');
{ weightPPM: 500000, authorAddress: account1 },
{ weightPPM: 600000, authorAddress: account2 },
], contentId, [])).to.be.rejectedWith('Author weights must sum to 1000000');
});
it('should be able to donate reputation via citations', async () => {
await addPost(account1, 'content-id', []);
await addPost(account2, 'second-content-id', [{ weightPercent: 50, targetPostId: 'content-id' }]);
await addPost(account2, 'second-content-id', [{ weightPPM: 500000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'second-content-id' });
const pool = await dao.validationPools(0);
expect(pool.postId).to.equal('second-content-id');
@ -131,7 +131,7 @@ describe('Forum', () => {
await dao.evaluateOutcome(0);
expect(await dao.balanceOf(account1)).to.equal(100);
expect((await dao.posts('content-id')).reputation).to.equal(100);
await addPost(account2, 'second-content-id', [{ weightPercent: -50, targetPostId: 'content-id' }]);
await addPost(account2, 'second-content-id', [{ weightPPM: -500000, targetPostId: 'content-id' }]);
expect((await dao.posts('second-content-id')).reputation).to.equal(0);
await initiateValidationPool({ postId: 'second-content-id' });
const pool = await dao.validationPools(1);
@ -152,8 +152,8 @@ describe('Forum', () => {
await addPost(account2, 'second-content-id', []);
expect(await dao.balanceOf(account2)).to.equal(0);
await addPost(account3, 'third-content-id', [
{ weightPercent: -100, targetPostId: 'content-id' },
{ weightPercent: 100, targetPostId: 'second-content-id' },
{ weightPPM: -1000000, targetPostId: 'content-id' },
{ weightPPM: 1000000, targetPostId: 'second-content-id' },
]);
await initiateValidationPool({ postId: 'third-content-id' });
const pool = await dao.validationPools(1);
@ -170,13 +170,13 @@ describe('Forum', () => {
await initiateValidationPool({ postId: 'content-id' });
await dao.evaluateOutcome(0);
expect(await dao.balanceOf(account1)).to.equal(100);
await addPost(account2, 'second-content-id', [{ weightPercent: -100, targetPostId: 'content-id' }]);
await addPost(account2, 'second-content-id', [{ weightPPM: -1000000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'second-content-id' });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(1);
expect(await dao.balanceOf(account1)).to.equal(0);
expect(await dao.balanceOf(account2)).to.equal(200);
await addPost(account3, 'third-content-id', [{ weightPercent: -100, targetPostId: 'second-content-id' }]);
await addPost(account3, 'third-content-id', [{ weightPPM: -1000000, targetPostId: 'second-content-id' }]);
await initiateValidationPool({ postId: 'third-content-id' });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(2);
@ -188,8 +188,8 @@ describe('Forum', () => {
it('should be able to cite a post that has not (yet) been added', async () => {
await addPost(account1, 'content-id', []);
await addPost(account2, 'second-content-id', [
{ weightPercent: 10, targetPostId: 'content-id' },
{ weightPercent: 10, targetPostId: 'nonexistent-content-id' },
{ weightPPM: 100000, targetPostId: 'content-id' },
{ weightPPM: 100000, targetPostId: 'nonexistent-content-id' },
]);
await initiateValidationPool({ postId: 'second-content-id' });
const pool = await dao.validationPools(0);
@ -210,7 +210,7 @@ describe('Forum', () => {
await dao.evaluateOutcome(1);
expect(await dao.balanceOf(account1)).to.equal(100);
expect(await dao.balanceOf(account2)).to.equal(100);
await addPost(account3, 'third-content-id', [{ weightPercent: -100, targetPostId: 'second-content-id' }]);
await addPost(account3, 'third-content-id', [{ weightPPM: -1000000, targetPostId: 'second-content-id' }]);
await initiateValidationPool({ postId: 'third-content-id' });
await dao.stakeOnValidationPool(2, 100, true);
await time.increase(POOL_DURATION + 1);
@ -226,13 +226,13 @@ describe('Forum', () => {
await initiateValidationPool({ postId: 'content-id' });
await dao.evaluateOutcome(0);
expect(await dao.balanceOf(account1)).to.equal(100);
await addPost(account2, 'second-content-id', [{ weightPercent: 50, targetPostId: 'content-id' }]);
await addPost(account2, 'second-content-id', [{ weightPPM: 500000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'second-content-id' });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(1);
expect(await dao.balanceOf(account1)).to.equal(150);
expect(await dao.balanceOf(account2)).to.equal(50);
await addPost(account3, 'third-content-id', [{ weightPercent: -100, targetPostId: 'second-content-id' }]);
await addPost(account3, 'third-content-id', [{ weightPPM: -1000000, targetPostId: 'second-content-id' }]);
await initiateValidationPool({ postId: 'third-content-id', fee: 200 });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(2);
@ -246,13 +246,13 @@ describe('Forum', () => {
await initiateValidationPool({ postId: 'content-id' });
await dao.evaluateOutcome(0);
expect(await dao.balanceOf(account1)).to.equal(100);
await addPost(account2, 'second-content-id', [{ weightPercent: -100, targetPostId: 'content-id' }]);
await addPost(account2, 'second-content-id', [{ weightPPM: -1000000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'second-content-id' });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(1);
expect(await dao.balanceOf(account1)).to.equal(0);
expect(await dao.balanceOf(account2)).to.equal(200);
await addPost(account3, 'third-content-id', [{ weightPercent: -100, targetPostId: 'second-content-id' }]);
await addPost(account3, 'third-content-id', [{ weightPPM: -1000000, targetPostId: 'second-content-id' }]);
await initiateValidationPool({ postId: 'third-content-id', fee: 200 });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(2);
@ -263,9 +263,9 @@ describe('Forum', () => {
it('should enforce depth limit', async () => {
await addPost(account1, 'content-id-1', []);
await addPost(account1, 'content-id-2', [{ weightPercent: 100, targetPostId: 'content-id-1' }]);
await addPost(account1, 'content-id-3', [{ weightPercent: 100, targetPostId: 'content-id-2' }]);
await addPost(account1, 'content-id-4', [{ weightPercent: 100, targetPostId: 'content-id-3' }]);
await addPost(account1, 'content-id-2', [{ weightPPM: 1000000, targetPostId: 'content-id-1' }]);
await addPost(account1, 'content-id-3', [{ weightPPM: 1000000, targetPostId: 'content-id-2' }]);
await addPost(account1, 'content-id-4', [{ weightPPM: 1000000, targetPostId: 'content-id-3' }]);
await initiateValidationPool({ postId: 'content-id-4' });
await dao.evaluateOutcome(0);
const posts = await Promise.all([
@ -283,7 +283,7 @@ describe('Forum', () => {
it('should be able to incinerate reputation', async () => {
await addPost(account1, 'content-id-1', [
{
weightPercent: 50,
weightPPM: 500000,
targetPostId: '',
},
]);
@ -328,7 +328,7 @@ describe('Forum', () => {
it('author and post rep can be completely destroyed', async () => {
// account1's post is later strongly negatively referenced
await addPost(account3, 'third-content-id', [{ weightPercent: -100, targetPostId: 'content-id' }]);
await addPost(account3, 'third-content-id', [{ weightPPM: -1000000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'third-content-id', fee: 200 });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(3);
@ -343,7 +343,7 @@ describe('Forum', () => {
it('author rep can be destroyed while some post rep remains', async () => {
// account1's post is later strongly negatively referenced
await addPost(account3, 'third-content-id', [{ weightPercent: -100, targetPostId: 'content-id' }]);
await addPost(account3, 'third-content-id', [{ weightPPM: -1000000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'third-content-id', fee: 70 });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(3);
@ -358,7 +358,7 @@ describe('Forum', () => {
it('author rep can be destroyed while some post rep remains (odd amount)', async () => {
// account1's post is later strongly negatively referenced
await addPost(account3, 'third-content-id', [{ weightPercent: -100, targetPostId: 'content-id' }]);
await addPost(account3, 'third-content-id', [{ weightPPM: -1000000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'third-content-id', fee: 75 });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(3);
@ -375,8 +375,8 @@ describe('Forum', () => {
describe('negative citation of a post with multiple authors', async () => {
beforeEach(async () => {
await dao.addPost([
{ weightPercent: 50, authorAddress: account1 },
{ weightPercent: 50, authorAddress: account2 },
{ weightPPM: 500000, authorAddress: account1 },
{ weightPPM: 500000, authorAddress: account2 },
], 'content-id', []);
await initiateValidationPool({ postId: 'content-id' });
await dao.evaluateOutcome(0);
@ -399,7 +399,7 @@ describe('Forum', () => {
it('author and post rep can be completely destroyed', async () => {
// account1's post is later strongly negatively referenced
await addPost(account3, 'second-content-id', [{ weightPercent: -100, targetPostId: 'content-id' }]);
await addPost(account3, 'second-content-id', [{ weightPPM: -1000000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'second-content-id', fee: 400 });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(2);
@ -413,7 +413,7 @@ describe('Forum', () => {
it('author rep can be destroyed while some post rep remains', async () => {
// account1's post is later strongly negatively referenced
await addPost(account3, 'second-content-id', [{ weightPercent: -100, targetPostId: 'content-id' }]);
await addPost(account3, 'second-content-id', [{ weightPPM: -1000000, targetPostId: 'content-id' }]);
await initiateValidationPool({ postId: 'second-content-id', fee: 70 });
await time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(2);

View File

@ -19,7 +19,7 @@ describe('Onboarding', () => {
const Onboarding = await ethers.getContractFactory('Onboarding');
const onboarding = await Onboarding.deploy(dao.target, proposals.target, PRICE);
await dao.addPost([{ weightPercent: 100, authorAddress: account1 }], 'content-id', []);
await dao.addPost([{ weightPPM: 1000000, authorAddress: account1 }], 'content-id', []);
const callbackData = ethers.AbiCoder.defaultAbiCoder().encode([], []);
await dao.initiateValidationPool(
'content-id',
@ -75,7 +75,7 @@ describe('Onboarding', () => {
expect(post.id).to.equal('evidence-content-id');
const postAuthors = await dao.getPostAuthors('evidence-content-id');
expect(postAuthors).to.have.length(1);
expect(postAuthors[0].weightPercent).to.equal(100);
expect(postAuthors[0].weightPPM).to.equal(1000000);
expect(postAuthors[0].authorAddress).to.equal(account1);
const pool = await dao.validationPools(1);
expect(pool.postId).to.equal('evidence-content-id');
@ -128,7 +128,7 @@ describe('Onboarding', () => {
expect(post.id).to.equal('req-content-id');
const postAuthors = await dao.getPostAuthors('req-content-id');
expect(postAuthors).to.have.length(1);
expect(postAuthors[0].weightPercent).to.equal(100);
expect(postAuthors[0].weightPPM).to.equal(1000000);
expect(postAuthors[0].authorAddress).to.equal(account2);
const pool = await dao.validationPools(2);
expect(pool.postId).to.equal('req-content-id');

View File

@ -16,8 +16,8 @@ describe('Proposal', () => {
const Proposals = await ethers.getContractFactory('Proposals');
const proposals = await Proposals.deploy(dao.target);
await dao.addPost([{ weightPercent: 100, authorAddress: account1 }], 'some-content-id', []);
await dao.addPost([{ weightPercent: 100, authorAddress: account2 }], 'some-other-content-id', []);
await dao.addPost([{ weightPPM: 1000000, authorAddress: account1 }], 'some-content-id', []);
await dao.addPost([{ weightPPM: 1000000, authorAddress: account2 }], 'some-other-content-id', []);
const callbackData = ethers.AbiCoder.defaultAbiCoder().encode([], []);
await dao.initiateValidationPool(
'some-content-id',
@ -89,7 +89,7 @@ describe('Proposal', () => {
it('Can submit a proposal', async () => {
const postAuthors = await dao.getPostAuthors('proposal-content-id');
expect(postAuthors).to.have.length(1);
expect(postAuthors[0].weightPercent).to.equal(100);
expect(postAuthors[0].weightPPM).to.equal(1000000);
expect(postAuthors[0].authorAddress).to.equal(account1);
});

View File

@ -38,7 +38,7 @@ describe('Validation Pools', () => {
beforeEach(async () => {
({ dao, account1, account2 } = await loadFixture(deploy));
await dao.addPost([{ weightPercent: 100, authorAddress: account1 }], 'content-id', []);
await dao.addPost([{ weightPPM: 1000000, authorAddress: account1 }], 'content-id', []);
const init = () => initiateValidationPool({ fee: POOL_FEE });
await expect(init()).to.emit(dao, 'ValidationPoolInitiated').withArgs(0);
expect(await dao.validationPoolCount()).to.equal(1);
@ -192,7 +192,7 @@ describe('Validation Pools', () => {
beforeEach(async () => {
time.increase(POOL_DURATION + 1);
await dao.evaluateOutcome(0);
await dao.addPost([{ weightPercent: 100, authorAddress: account2 }], 'content-id-2', []);
await dao.addPost([{ weightPPM: 1000000, authorAddress: account2 }], 'content-id-2', []);
const init = () => initiateValidationPool({ postId: 'content-id-2' });
await expect(init()).to.emit(dao, 'ValidationPoolInitiated').withArgs(1);
time.increase(POOL_DURATION + 1);

View File

@ -19,7 +19,7 @@ describe('Work1', () => {
const Work1 = await ethers.getContractFactory('Work1');
const work1 = await Work1.deploy(dao.target, proposals.target, WORK1_PRICE);
await dao.addPost([{ weightPercent: 100, authorAddress: account1 }], 'some-content-id', []);
await dao.addPost([{ weightPPM: 1000000, authorAddress: account1 }], 'some-content-id', []);
const callbackData = ethers.AbiCoder.defaultAbiCoder().encode([], []);
await dao.initiateValidationPool(
'some-content-id',
@ -216,7 +216,7 @@ describe('Work1', () => {
expect(post.id).to.equal('evidence-content-id');
const postAuthors = await dao.getPostAuthors('evidence-content-id');
expect(postAuthors).to.have.length(1);
expect(postAuthors[0].weightPercent).to.equal(100);
expect(postAuthors[0].weightPPM).to.equal(1000000);
expect(postAuthors[0].authorAddress).to.equal(account1);
const pool = await dao.validationPools(1);
expect(pool.fee).to.equal(WORK1_PRICE);

View File

@ -1,14 +1,14 @@
{
"localhost": {
"DAO": "0xE799A82AEB525D35c671eAEb6AE6858E02a1EB24",
"Work1": "0x358A07B26F4c556140872ecdB69c58e8807E7178",
"Onboarding": "0x63472674239ffb70618Fae043610917f2d9B781C",
"Proposals": "0x38AE4ABD47B10f6660CD70Cc8FF3401341E13d9e"
"DAO": "0x4C767e62c92b58B2308E02ba5Cc4A3BD246060ac",
"Work1": "0x39B7522Ee1A5B13aE5580C40114239D4cE0e7D29",
"Onboarding": "0xC0Bb36820Ba891DE4ed6D60f75066805361dbeB8",
"Proposals": "0x268A0A6bB80282542e0Be0864Cfa1c2206c5491F"
},
"sepolia": {
"DAO": "0x8e9D6cb0097CE2E283Cdd5762F288d2D394eB713",
"Work1": "0xFC95d5d09e71f0C83F59765107eAa79125b01624",
"Onboarding": "0x32f5A03687406A5c66878349A7de667d0b5C2922",
"Proposals": "0x8273062d41a828ac67ae5837547d2Dd19f8CDc27"
"DAO": "0x8e5bd58B2ca8910C5F9be8de847d6883B15c60d2",
"Work1": "0x1708A144F284C1a9615C25b674E4a08992CE93e4",
"Onboarding": "0xb21D4c986715A1adb5e87F752842613648C20a7B",
"Proposals": "0x930c47293F206780E8F166338bDaFF3520306032"
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -428,11 +428,11 @@ function App() {
<td>{post.id.toString()}</td>
<td>
<Stack>
{post.authors.map(({ authorAddress, weightPercent }) => (
{post.authors.map(({ authorAddress, weightPPM }) => (
<div key={authorAddress}>
{getAddressName(chainId, authorAddress)}
{' '}
{weightPercent.toString()}
{Number(weightPPM) / 10000}
%
</div>
))}

View File

@ -19,7 +19,7 @@ function AddPostModal({
const handleSubmit = useCallback(async () => {
// Upload content to API
const post = new Post({ content, authors: [{ weightPercent: 100, authorAddress: account }] });
const post = new Post({ content, authors: [{ weightPPM: 1000000, authorAddress: account }] });
// Include metamask signature
await post.sign(provider, account);
// Clear the input and hide the modal

View File

@ -23,11 +23,11 @@ function ViewPostModal({
<Modal.Body>
<h5>Authors</h5>
<Stack>
{authors?.map(({ authorAddress, weightPercent }) => (
{authors?.map(({ authorAddress, weightPPM }) => (
<div key={authorAddress}>
{authorAddress}
{' '}
{weightPercent.toString()}
{Number(weightPPM) / 10000}
%
</div>
))}
@ -47,11 +47,11 @@ function ViewPostModal({
<hr />
<h5>Citations</h5>
<Stack>
{citations.map(({ weightPercent, targetPostId }) => (
{citations.map(({ weightPPM, targetPostId }) => (
<div key={targetPostId}>
{targetPostId}
{' '}
{weightPercent.toString()}
{Number(weightPPM) / 10000}
%
</div>
))}

View File

@ -34,7 +34,7 @@ function ProposePriceChangeModal({
const handleClose = () => setShow(false);
const handleSubmit = useCallback(async () => {
const post = new Post({ content, authors: [{ weightPercent: 100, authorAddress: account }] });
const post = new Post({ content, authors: [{ weightPPM: 1000000, authorAddress: account }] });
// Include price as embedded data
post.embeddedData = { proposedPrice };
// Include metamask signature