Compare commits

...

4 Commits

Author SHA1 Message Date
Ladd Hoffman c080778872 successful semantic scholar import
Gitea Actions Demo / Explore-Gitea-Actions (push) Failing after 34s Details
2024-04-20 12:37:59 -05:00
Ladd Hoffman 7c2de64dff move contract-addresses.json 2024-04-19 18:08:33 -05:00
Ladd Hoffman 90e5f599b8 backend: first cut at semantic scholar import 2024-04-19 18:08:18 -05:00
Ladd Hoffman 626905bad6 add multi-author support to frontend 2024-04-19 18:07:48 -05:00
37 changed files with 4812 additions and 2263 deletions

View File

@ -101,7 +101,7 @@ Clone this repository to a directory on your machine
1. Run the daemon
node index.js
node src/index.js
### Hardhat

View File

@ -1,2 +1,5 @@
PORT=3000
DATA_DIR="./data"
DATA_DIR="./data"
SEMANTIC_SCHOLAR_API_KEY=
ETH_NETWORK="localhost"
ETH_PRIVATE_KEY=

View File

@ -2,8 +2,8 @@ FROM node
WORKDIR /app
ADD package.json package-lock.json index.js /app/
ADD package.json package-lock.json src/ /app/
RUN npm ci
ENTRYPOINT ["node", "index.js"]
ENTRYPOINT ["node", "src/index.js"]

View File

@ -1,9 +1,9 @@
{
"localhost": {
"DAO": "0x8d914D38dD301FC4606f5aa9fEcF8A76389020d3",
"Work1": "0xfe58B9EB03F75A603de1B286584f5E9532ab8fB5",
"Onboarding": "0x1d63FDe5B461106729fE1e5e38A02fc68C518Af5",
"Proposals": "0x050C420Cc4995B41217Eba1B54B82Fd5687e9139"
"DAO": "0xD60A1c64B96a133587A75C2771690072F238a549",
"Work1": "0xCF3f16D151052FA7b99a71E79EC3b0e6C793aa0b",
"Onboarding": "0xE148e864A646B8bFc95dcc9acd3dBcB52704EE60",
"Proposals": "0x981234BBBC1ec93200F5BB3a65e2F9711A6109aa"
},
"sepolia": {
"DAO": "0x241514DC94568e98222fBE66662b054b545A61AE",

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,109 +0,0 @@
const express = require('express');
const { Level } = require('level');
const { recoverPersonalSignature } = require('@metamask/eth-sig-util');
const objectHash = require('object-hash');
require('dotenv').config();
const app = express();
const port = process.env.PORT || 3000;
const dataDir = process.env.DATA_DIR || 'data';
const db = new Level(`${dataDir}/forum`, { valueEncoding: 'json' });
const verifySignature = ({
author, content, signature, embeddedData,
}) => {
let contentToVerify = content;
if (embeddedData && Object.entries(embeddedData).length) {
contentToVerify += `\n\n${JSON.stringify(embeddedData, null, 2)}`;
}
try {
const account = recoverPersonalSignature({ data: contentToVerify, signature });
if (account !== author) {
console.log('error: author does not match signature');
return false;
}
} catch (e) {
console.log('error: failed to recover signature:', e.message);
return false;
}
return true;
};
app.use(express.json());
app.post('/write', async (req, res) => {
const {
body: {
author, content, signature, embeddedData,
},
} = req;
// Check author signature
if (!verifySignature({
author, content, signature, embeddedData,
})) {
res.status(403).end();
return;
}
// Compute content hash
const data = {
author, content, signature, embeddedData,
};
const hash = objectHash(data);
console.log('write', hash);
console.log(data);
// Store content
db.put(hash, data);
// Return hash
res.send(hash);
});
app.get('/read/:hash', async (req, res) => {
const { hash } = req.params;
console.log('read', hash);
// Fetch content
let data;
try {
data = await db.get(req.params.hash);
} catch (e) {
console.log('read error:', e.message, hash);
res.status(e.status).end();
return;
}
data.embeddedData = data.embeddedData || undefined;
console.log(data);
// Verify hash
const derivedHash = objectHash(data);
if (derivedHash !== hash) {
console.log('error: hash mismatch');
res.status(500).end();
return;
}
// Verify signature
if (!verifySignature(data)) {
console.log('error: signature verificaition failed');
res.status(500).end();
return;
}
// Return content
res.json(data);
});
app.get('*', (req, res) => {
console.log(`404 req.path: ${req.path}`);
res.status(404).json({ errorCode: 404 });
});
app.listen(port, () => {
console.log(`Listening on port ${port}`);
});

View File

@ -10,8 +10,10 @@
"license": "ISC",
"dependencies": {
"@metamask/eth-sig-util": "^7.0.1",
"axios": "^1.6.7",
"axios": "^1.6.8",
"bluebird": "^3.7.2",
"dotenv": "^16.4.5",
"ethers": "^6.12.0",
"express": "^4.18.2",
"level": "^8.0.1",
"object-hash": "^3.0.0"
@ -34,6 +36,11 @@
"node": ">=0.10.0"
}
},
"node_modules/@adraffy/ens-normalize": {
"version": "1.10.1",
"resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz",
"integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw=="
},
"node_modules/@babel/runtime": {
"version": "7.23.9",
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.9.tgz",
@ -400,6 +407,11 @@
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz",
"integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g=="
},
"node_modules/@types/node": {
"version": "18.15.13",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz",
"integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q=="
},
"node_modules/@ungap/structured-clone": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
@ -456,6 +468,11 @@
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
}
},
"node_modules/aes-js": {
"version": "4.0.0-beta.5",
"resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz",
"integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q=="
},
"node_modules/ajv": {
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
@ -705,11 +722,11 @@
}
},
"node_modules/axios": {
"version": "1.6.7",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
"version": "1.6.8",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz",
"integrity": "sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==",
"dependencies": {
"follow-redirects": "^1.15.4",
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
@ -748,6 +765,11 @@
}
]
},
"node_modules/bluebird": {
"version": "3.7.2",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz",
"integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg=="
},
"node_modules/body-parser": {
"version": "1.20.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
@ -1701,6 +1723,55 @@
"@scure/bip39": "1.2.2"
}
},
"node_modules/ethers": {
"version": "6.12.0",
"resolved": "https://registry.npmjs.org/ethers/-/ethers-6.12.0.tgz",
"integrity": "sha512-zL5NlOTjML239gIvtVJuaSk0N9GQLi1Hom3ZWUszE5lDTQE/IVB62mrPkQ2W1bGcZwVGSLaetQbWNQSvI4rGDQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/ethers-io/"
},
{
"type": "individual",
"url": "https://www.buymeacoffee.com/ricmoo"
}
],
"dependencies": {
"@adraffy/ens-normalize": "1.10.1",
"@noble/curves": "1.2.0",
"@noble/hashes": "1.3.2",
"@types/node": "18.15.13",
"aes-js": "4.0.0-beta.5",
"tslib": "2.4.0",
"ws": "8.5.0"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/ethers/node_modules/@noble/curves": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.2.0.tgz",
"integrity": "sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==",
"dependencies": {
"@noble/hashes": "1.3.2"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/ethers/node_modules/@noble/hashes": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.2.tgz",
"integrity": "sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/express": {
"version": "4.18.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz",
@ -1861,9 +1932,9 @@
"dev": true
},
"node_modules/follow-redirects": {
"version": "1.15.5",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"funding": [
{
"type": "individual",
@ -3802,6 +3873,11 @@
"strip-bom": "^3.0.0"
}
},
"node_modules/tslib": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
},
"node_modules/tweetnacl": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz",
@ -4078,6 +4154,26 @@
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"dev": true
},
"node_modules/ws": {
"version": "8.5.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.5.0.tgz",
"integrity": "sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg==",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",

View File

@ -2,7 +2,7 @@
"name": "backend",
"version": "1.0.0",
"description": "",
"main": "index.js",
"main": "src/index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
@ -10,8 +10,10 @@
"license": "ISC",
"dependencies": {
"@metamask/eth-sig-util": "^7.0.1",
"axios": "^1.6.7",
"axios": "^1.6.8",
"bluebird": "^3.7.2",
"dotenv": "^16.4.5",
"ethers": "^6.12.0",
"express": "^4.18.2",
"level": "^8.0.1",
"object-hash": "^3.0.0"

View File

@ -0,0 +1,23 @@
const contractAddresses = require('../contract-addresses.json');
const networks = {
localhost: '0x539',
sepolia: '0xaa36a7',
};
const getContractAddressByNetworkName = (networkName, contractName) => {
const address = contractAddresses[networkName][contractName];
if (!address) throw new Error(`Contract ${contractName} not recognized`);
return address;
};
const getContractAddressByChainId = (chainId, contractName) => {
const network = Object.entries(networks).find(([, id]) => id === chainId)[0];
if (!network) throw new Error(`Chain ID ${chainId} not recognized`);
return getContractAddressByNetworkName(network, contractName);
};
module.exports = {
getContractAddressByChainId,
getContractAddressByNetworkName,
};

9
backend/src/db.js Normal file
View File

@ -0,0 +1,9 @@
const { Level } = require('level');
const dataDir = process.env.DATA_DIR || 'data';
module.exports = {
forum: new Level(`${dataDir}/forum`, { valueEncoding: 'json' }),
authorAddresses: new Level(`${dataDir}/authorAddresses`, { valueEncoding: 'utf8' }),
authorPrivKeys: new Level(`${dataDir}/authorPrivKeys`, { valueEncoding: 'utf8' }),
};

View File

@ -0,0 +1,189 @@
const axios = require('axios');
const ethers = require('ethers');
const crypto = require('crypto');
const objectHash = require('object-hash');
const Promise = require('bluebird');
require('dotenv').config();
const verifySignature = require('./verify-signature');
const { getContractAddressByNetworkName } = require('./contract-config');
const { authorAddresses, authorPrivKeys, forum } = require('./db');
const DAOArtifact = require('../contractArtifacts/DAO.json');
const network = process.env.ETH_NETWORK;
console.log('network:', network);
const getProvider = () => {
switch (network) {
case 'localhost':
return ethers.getDefaultProvider('http://localhost:8545');
default:
throw new Error('Unknown network');
}
};
const signer = new ethers.Wallet(process.env.ETH_PRIVATE_KEY, getProvider());
const getContract = (name) => new ethers.Contract(
getContractAddressByNetworkName(process.env.ETH_NETWORK, name),
DAOArtifact.abi,
signer,
);
const fetchPaperInfo = async (paperId, retryDelay = 5000) => {
const url = `https://api.semanticscholar.org/graph/v1/paper/${paperId}?fields=title,url,authors,references`;
console.log('url:', url);
let retry = false;
let paper;
const response = await axios.get(url, {
headers: {
'api-key': process.env.SEMANTIC_SCHOLAR_API_KEY,
},
}).catch(async (error) => {
if (error.response?.status === 429) {
// Rate limit
retry = true;
return;
}
// Some other error occurred
throw new Error(error);
});
if (retry) {
console.log('retry delay (sec):', retryDelay / 1000);
await new Promise((resolve) => {
setTimeout(resolve, retryDelay);
});
paper = await fetchPaperInfo(paperId, retryDelay * 2);
} else {
paper = response.data;
}
return paper;
};
const getAuthorsInfo = async (paper) => Promise.mapSeries(
paper.authors.filter((x) => !!x.authorId),
async ({ authorId }) => {
// Check if we already have an account for each author
let authorAddress;
let authorPrivKey;
try {
authorAddress = await authorAddresses.get(authorId);
} catch (e) {
// Probably not found
}
if (authorAddress) {
// This should always succeed, so we don't use try/catch here
authorPrivKey = await authorPrivKeys.get(authorAddress);
} else {
// Generate and store a new account
const id = crypto.randomBytes(32).toString('hex');
authorPrivKey = `0x${id}`;
const wallet = new ethers.Wallet(authorPrivKey);
authorAddress = wallet.address;
await authorAddresses.put(authorId, authorAddress);
await authorPrivKeys.put(authorAddress, authorPrivKey);
}
return {
authorAddress,
authorPrivKey,
};
},
);
const generatePost = async (paper) => {
const authorsInfo = await getAuthorsInfo(paper);
if (!authorsInfo.length) {
throw new Error('Paper has no authors with id');
}
const firstAuthorWallet = new ethers.Wallet(authorsInfo[0].authorPrivKey);
const eachAuthorWeightPercent = Math.floor(100 / authorsInfo.length);
const authors = authorsInfo.map(({ authorAddress }) => ({
weightPercent: eachAuthorWeightPercent,
authorAddress,
}));
// Make sure author weights sum to 100
const totalAuthorsWeight = authors.reduce((t, { weightPercent }) => t + weightPercent, 0);
authors[0].weightPercent += 100 - totalAuthorsWeight;
const content = `Semantic Scholar paper ${paper.paperId}
${paper.title}
HREF ${paper.url}`;
const embeddedData = {
semanticScholarPaperId: paper.paperId,
};
let contentToSign = content;
if (embeddedData && Object.entries(embeddedData).length) {
contentToSign += `\n\n${JSON.stringify(embeddedData, null, 2)}`;
}
const signature = firstAuthorWallet.signMessageSync(contentToSign);
console.log({
authors, content, signature, embeddedData,
});
const verified = verifySignature({
authors, content, signature, embeddedData,
});
if (!verified) {
throw new Error('Signature verification failed');
}
const hash = objectHash({
authors, content, signature, embeddedData,
});
return {
hash, authors, content, signature, embeddedData,
};
};
module.exports = async (req, res) => {
const dao = await getContract('DAO');
const {
body: {
paperId,
},
} = req;
console.log(`importFromSS ${paperId}`);
// Read the paper info from SS
const paper = await fetchPaperInfo(paperId);
console.log('references count:', paper.references.length);
const eachCitationWeightPercent = Math.floor(30 / paper.references.length);
const citations = await Promise.mapSeries(
paper.references.filter((x) => !!x.paperId),
async ({ paperId: citedPaperId }) => {
// We need to fetch this paper so we can generate the post we WOULD add to the forum.
// That way, if we later add the cited paper to the blockchain it will have the correct hash.
// The forum allows dangling citations to support this use case.
const citedPaper = await fetchPaperInfo(citedPaperId);
const citedPost = await generatePost(citedPaper);
return {
weightPercent: eachCitationWeightPercent,
targetPostId: citedPost.hash,
};
},
);
// Make sure citation weights sum to 100
const totalCitationWeight = citations.reduce((t, { weightPercent }) => t + weightPercent, 0);
citations[0].weightPercent += 100 - totalCitationWeight;
// Create a post for this paper
const {
hash, authors, content, signature, embeddedData,
} = await generatePost(paper);
console.log({
hash, authors, content, signature, embeddedData, citations,
});
// Write the new post to our database
await forum.put(hash, {
authors, content, signature, embeddedData, citations,
});
// Add the post to the form (on-chain)
await dao.addPost(authors, hash, citations);
console.log(`Added post to blockchain for paper ${paperId}`);
res.end();
};

25
backend/src/index.js Normal file
View File

@ -0,0 +1,25 @@
const express = require('express');
const read = require('./read');
const write = require('./write');
const importFromSS = require('./import-from-ss');
require('dotenv').config();
const app = express();
const port = process.env.PORT || 3000;
app.use(express.json());
app.post('/write', write);
app.get('/read/:hash', read);
app.post('/importFromSemanticScholar', importFromSS);
app.get('*', (req, res) => {
console.log(`404 req.path: ${req.path}`);
res.status(404).json({ errorCode: 404 });
});
app.listen(port, () => {
console.log(`Listening on port ${port}`);
});

47
backend/src/read.js Normal file
View File

@ -0,0 +1,47 @@
const objectHash = require('object-hash');
const verifySignature = require('./verify-signature');
const { forum } = require('./db');
module.exports = async (req, res) => {
const { hash } = req.params;
console.log('read', hash);
// Fetch content
let data;
try {
data = await forum.get(req.params.hash);
} catch (e) {
console.log('read error:', e.message, hash);
res.status(e.status).end();
return;
}
data.embeddedData = data.embeddedData || undefined;
console.log(data);
const {
authors, content, signature, embeddedData,
} = data;
// Verify hash
const derivedHash = objectHash({
authors, content, signature, embeddedData,
});
if (derivedHash !== hash) {
console.log('error: hash mismatch');
res.status(500).end();
return;
}
// Verify signature
if (!verifySignature(data)) {
console.log('error: signature verificaition failed');
res.status(500).end();
return;
}
// Return content
res.json(data);
};

View File

@ -0,0 +1,25 @@
const { recoverPersonalSignature } = require('@metamask/eth-sig-util');
const verifySignature = ({
authors, content, signature, embeddedData,
}) => {
let contentToVerify = content;
if (embeddedData && Object.entries(embeddedData).length) {
contentToVerify += `\n\n${JSON.stringify(embeddedData, null, 2)}`;
}
try {
const account = recoverPersonalSignature({ data: contentToVerify, signature });
console.log(`recovered account: ${account}`);
const authorAddresses = authors.map((author) => author.authorAddress.toLowerCase());
if (!authorAddresses.includes(account.toLowerCase())) {
console.log('error: signer is not among the authors');
return false;
}
} catch (e) {
console.log('error: failed to recover signature:', e.message);
return false;
}
return true;
};
module.exports = verifySignature;

35
backend/src/write.js Normal file
View File

@ -0,0 +1,35 @@
const objectHash = require('object-hash');
const verifySignature = require('./verify-signature');
const { forum } = require('./db');
module.exports = async (req, res) => {
const {
body: {
authors, content, signature, embeddedData, citations,
},
} = req;
// Check author signature
if (!verifySignature({
authors, content, signature, embeddedData,
})) {
res.status(403).end();
return;
}
// Compute content hash
const data = {
authors, content, signature, embeddedData, citations,
};
const hash = objectHash({
authors, content, signature, embeddedData,
});
console.log('write', hash);
console.log(data);
// Store content
await forum.put(hash, data);
// Return hash
res.send(hash);
};

View File

@ -1,9 +1,9 @@
{
"localhost": {
"DAO": "0x8d914D38dD301FC4606f5aa9fEcF8A76389020d3",
"Work1": "0xfe58B9EB03F75A603de1B286584f5E9532ab8fB5",
"Onboarding": "0x1d63FDe5B461106729fE1e5e38A02fc68C518Af5",
"Proposals": "0x050C420Cc4995B41217Eba1B54B82Fd5687e9139"
"DAO": "0xD60A1c64B96a133587A75C2771690072F238a549",
"Work1": "0xCF3f16D151052FA7b99a71E79EC3b0e6C793aa0b",
"Onboarding": "0xE148e864A646B8bFc95dcc9acd3dBcB52704EE60",
"Proposals": "0x981234BBBC1ec93200F5BB3a65e2F9711A6109aa"
},
"sepolia": {
"DAO": "0x241514DC94568e98222fBE66662b054b545A61AE",

View File

@ -15,9 +15,12 @@ const deployContract = async (name, args = [], isCore = false) => {
contractAddresses[network][name] = contract.target;
const from = `./artifacts/contracts/${isCore ? 'core/' : ''}${name}.sol/${name}.json`;
const to = `../frontend/src/assets/${name}.json`;
fs.copyFileSync(from, to);
console.log(`Copied ${fs.realpathSync(from)} to ${fs.realpathSync(to)}`);
const toFrontend = `../frontend/contractArtifacts/${name}.json`;
const toBackend = `../backend/contractArtifacts/${name}.json`;
fs.copyFileSync(from, toFrontend);
console.log(`Copied ${fs.realpathSync(from)} to ${fs.realpathSync(toFrontend)}`);
fs.copyFileSync(from, toBackend);
console.log(`Copied ${fs.realpathSync(from)} to ${fs.realpathSync(toBackend)}`);
writeContractAddresses(contractAddresses);
};

View File

@ -1,8 +1,11 @@
const fs = require('fs');
const writeContractAddresses = (contractAddresses) => {
fs.writeFileSync('../frontend/src/contract-addresses.json', JSON.stringify(contractAddresses, null, 2));
console.log('Wrote file', fs.realpathSync('../client/src/contract-addresses.json'));
fs.writeFileSync('../frontend/contract-addresses.json', JSON.stringify(contractAddresses, null, 2));
console.log('Wrote file', fs.realpathSync('../frontend/contract-addresses.json'));
fs.writeFileSync('../backend/contract-addresses.json', JSON.stringify(contractAddresses, null, 2));
console.log('Wrote file', fs.realpathSync('../backend/contract-addresses.json'));
fs.writeFileSync('./contract-addresses.json', JSON.stringify(contractAddresses, null, 2));
console.log('Wrote file', fs.realpathSync('./contract-addresses.json'));

View File

@ -0,0 +1,14 @@
{
"localhost": {
"DAO": "0xD60A1c64B96a133587A75C2771690072F238a549",
"Work1": "0xCF3f16D151052FA7b99a71E79EC3b0e6C793aa0b",
"Onboarding": "0xE148e864A646B8bFc95dcc9acd3dBcB52704EE60",
"Proposals": "0x981234BBBC1ec93200F5BB3a65e2F9711A6109aa"
},
"sepolia": {
"DAO": "0x241514DC94568e98222fBE66662b054b545A61AE",
"Work1": "0xc04152a440d8f79099e2049dc19b07EE7f2F8cc0",
"Onboarding": "0xFa5877940e527559320afc1303c06D0fb7E88907",
"Proposals": "0xeA9AF5fF56ef2bfd9DbC1295F1488302c61B92dF"
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -17,9 +17,9 @@ import './App.css';
import useList from './utils/List';
import { getContractAddressByChainId } from './utils/contract-config';
import Web3Context from './contexts/Web3Context';
import DAOArtifact from './assets/DAO.json';
import Work1Artifact from './assets/Work1.json';
import OnboardingArtifact from './assets/Onboarding.json';
import DAOArtifact from '../contractArtifacts/DAO.json';
import Work1Artifact from '../contractArtifacts/Work1.json';
import OnboardingArtifact from '../contractArtifacts/Onboarding.json';
import WorkContract from './components/work-contracts/WorkContract';
import AddPostModal from './components/posts/AddPostModal';
import ViewPostModal from './components/posts/ViewPostModal';
@ -82,9 +82,10 @@ function App() {
}, [DAORef, account]);
const fetchPost = useCallback(async (postId) => {
const p = await DAORef.current.methods.posts(postId).call();
dispatchPost({ type: 'updateById', item: p });
return p;
const post = await DAORef.current.methods.posts(postId).call();
post.authors = await DAORef.current.methods.getPostAuthors(postId).call();
dispatchPost({ type: 'updateById', item: post });
return post;
}, [DAORef, dispatchPost]);
const fetchPostId = useCallback(async (postIndex) => {
@ -186,14 +187,14 @@ function App() {
setWork1(Work1Contract);
setOnboarding(OnboardingContract);
const fetchReputationInterval = setInterval(() => {
console.log('reputation', reputation);
if (reputation !== undefined) {
clearInterval(fetchReputationInterval);
return;
}
fetchReputation();
}, 1000);
// const fetchReputationInterval = setInterval(() => {
// // console.log('reputation', reputation);
// if (reputation !== undefined) {
// clearInterval(fetchReputationInterval);
// return;
// }
// fetchReputation();
// }, 1000);
/* -------------------------------------------------------------------------------- */
/* --------------------------- BEGIN EVENT HANDLERS ------------------------------- */
@ -414,7 +415,7 @@ function App() {
<thead>
<tr>
<th>ID</th>
<th>Author</th>
<th>Authors</th>
<th>Sender</th>
<th>Actions</th>
</tr>
@ -423,7 +424,18 @@ function App() {
{posts.filter((x) => !!x).map((post) => (
<tr key={post.id}>
<td>{post.id.toString()}</td>
<td>{getAddressName(chainId, post.author)}</td>
<td>
<Stack>
{post.authors.map(({ authorAddress, weightPercent }) => (
<div key={authorAddress}>
{getAddressName(chainId, authorAddress)}
{' '}
{weightPercent.toString()}
%
</div>
))}
</Stack>
</td>
<td>{getAddressName(chainId, post.sender)}</td>
<td>
<Button onClick={() => handleShowViewPost(post)}>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -7,7 +7,7 @@ import Button from 'react-bootstrap/esm/Button';
import Stack from 'react-bootstrap/esm/Stack';
import useList from '../utils/List';
import Web3Context from '../contexts/Web3Context';
import ProposalsArtifact from '../assets/Proposals.json';
import ProposalsArtifact from '../../contractArtifacts/Proposals.json';
import { getContractAddressByChainId } from '../utils/contract-config';
import AddPostModal from './posts/AddPostModal';
import ViewPostModal from './posts/ViewPostModal';

View File

@ -19,7 +19,7 @@ function AddPostModal({
const handleSubmit = useCallback(async () => {
// Upload content to API
const post = new Post({ content });
const post = new Post({ content, authors: [{ weightPercent: 100, authorAddress: account }] });
// Include metamask signature
await post.sign(provider, account);
// Clear the input and hide the modal
@ -38,7 +38,7 @@ function AddPostModal({
}, [provider, DAO, account, content, setShow, postToBlockchain, onSubmit]);
return (
<Modal show={show} onHide={handleClose}>
<Modal className="modal" show={show} onHide={handleClose}>
<Modal.Header closeButton>
<Modal.Title>{title}</Modal.Title>
</Modal.Header>

View File

@ -1,37 +1,63 @@
import Button from 'react-bootstrap/Button';
import Modal from 'react-bootstrap/Modal';
import Stack from 'react-bootstrap/Stack';
import PropTypes from 'prop-types';
function ViewPostModal({
show, setShow, title, post,
}) {
const handleClose = () => setShow(false);
const { content, author, embeddedData } = post;
const {
content, authors, embeddedData, citations,
} = post;
const embeddedDataJson = JSON.stringify(embeddedData, null, 2);
return (
<Modal show={show} onHide={handleClose}>
<Modal className="modal-lg" show={show} onHide={handleClose}>
<Modal.Header closeButton>
<Modal.Title>
{title}
</Modal.Title>
</Modal.Header>
<Modal.Body>
<h6>
Author:
{' '}
{author}
</h6>
<h5>Authors</h5>
<Stack>
{authors?.map(({ authorAddress, weightPercent }) => (
<div key={authorAddress}>
{authorAddress}
{' '}
{weightPercent.toString()}
%
</div>
))}
</Stack>
<hr />
<p className="post-content">
{content}
</p>
{embeddedData && Object.entries(embeddedData).length && (
<hr />
{embeddedData && Object.entries(embeddedData).length > 0 && (
<pre>
{embeddedDataJson}
</pre>
)}
{citations && citations.length > 0 && (
<>
<hr />
<h5>Citations</h5>
<Stack>
{citations.map(({ weightPercent, targetPostId }) => (
<div key={targetPostId}>
{targetPostId}
{' '}
{weightPercent.toString()}
%
</div>
))}
</Stack>
</>
)}
</Modal.Body>
<Modal.Footer>
<Button variant="secondary" onClick={handleClose}>

View File

@ -9,7 +9,7 @@ import PropTypes from 'prop-types';
import Web3 from 'web3';
import Web3Context from '../../contexts/Web3Context';
import Post from '../../utils/Post';
import ProposalsArtifact from '../../assets/Proposals.json';
import ProposalsArtifact from '../../../contractArtifacts/Proposals.json';
import { getContractAddressByChainId } from '../../utils/contract-config';
import WorkContractContext from '../../contexts/WorkContractContext';
@ -34,7 +34,7 @@ function ProposePriceChangeModal({
const handleClose = () => setShow(false);
const handleSubmit = useCallback(async () => {
const post = new Post({ content });
const post = new Post({ content, authors: [{ weightPercent: 100, authorAddress: account }] });
// Include price as embedded data
post.embeddedData = { proposedPrice };
// Include metamask signature

View File

@ -9,25 +9,26 @@ window.Buffer = Buffer;
class Post {
constructor({
author, content, signature, hash, embeddedData,
authors, content, signature, hash, embeddedData, citations,
}) {
this.author = author;
this.authors = authors;
this.content = content;
this.signature = signature;
this.hash = hash;
this.embeddedData = embeddedData;
this.embeddedData = embeddedData ?? {};
this.citations = citations ?? [];
}
// Read from API
static async read(hash) {
const {
data: {
content, author, signature, embeddedData,
content, authors, signature, embeddedData, citations,
},
} = await axios.get(`/api/read/${hash}`);
// Verify hash
const derivedHash = objectHash({
author, content, signature, embeddedData,
authors, content, signature, embeddedData,
});
if (hash !== derivedHash) {
throw new Error('Hash mismatch');
@ -38,25 +39,21 @@ class Post {
contentToVerify += `\n\n${JSON.stringify(embeddedData, null, 2)}`;
}
const recovered = recoverPersonalSignature({ data: contentToVerify, signature });
if (recovered !== author) {
throw new Error('Author mismatch');
const authorAddresses = authors.map((author) => author.authorAddress.toLowerCase());
if (!authorAddresses.includes(recovered.toLowerCase())) {
throw new Error('Signer is not among the authors');
}
return new Post({
content, author, signature, hash, embeddedData,
content, authors, signature, hash, embeddedData, citations,
});
}
static deriveEmbeddedData(content) {
const dataStart = content.search(/^\{/);
const dataStr = content.substring(dataStart);
const embeddedData = JSON.parse(dataStr);
return embeddedData;
}
// Include MetaMask signature
async sign(web3Provider, account) {
this.author = account;
const author = this.authors?.find(({ authorAddress }) => authorAddress === account);
if (!author) {
throw new Error('Post must be signed by one of its authors');
}
let contentToSign = this.content;
if (this.embeddedData && Object.entries(this.embeddedData).length) {
contentToSign += `\n\n${JSON.stringify(this.embeddedData, null, 2)}`;
@ -72,10 +69,11 @@ class Post {
// Write to API
async write() {
const data = {
author: this.author,
authors: this.authors,
content: this.content,
signature: this.signature,
embeddedData: this.embeddedData,
citations: this.citations,
};
const { data: hash } = await axios.post('/api/write', data);
this.hash = hash;
@ -83,10 +81,7 @@ class Post {
// Upload hash to blockchain
async publish(DAO, account) {
await DAO.methods.addPost([{
weightPercent: 100,
authorAddress: account,
}], this.hash, []).send({
await DAO.methods.addPost(this.authors, this.hash, this.citations ?? []).send({
from: account,
gas: 1000000,
});

View File

@ -1,4 +1,4 @@
import contractAddresses from '../contract-addresses.json';
import contractAddresses from '../../contract-addresses.json';
const networks = {
localhost: '0x539',