Skip to content

Commit

Permalink
feat: Early Adoption list for the Mainnet (#341)
Browse files Browse the repository at this point in the history
  • Loading branch information
skhomuti authored Oct 2, 2024
1 parent 9c7d014 commit 7a5f3ae
Show file tree
Hide file tree
Showing 37 changed files with 150,872 additions and 3 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ node_modules/
.idea
.vscode
.direnv
.DS_Store

# Foundry files
broadcast/
Expand Down
19 changes: 19 additions & 0 deletions artifacts/mainnet/early-adoption/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@

## How to build

Install dependencies from the root of the repository:
```bash
yarn install
```

Run the script:
```bash
cd artifacts/mainnet/early-adoption
node compose.js
```

## Output files

- `addresses.json` - plain list of unique addresses
- `merkle-tree.json` - Merkle tree of the list
- `merkle-proofs.json` - Merkle proofs for each address
33,617 changes: 33,617 additions & 0 deletions artifacts/mainnet/early-adoption/addresses.json

Large diffs are not rendered by default.

148 changes: 148 additions & 0 deletions artifacts/mainnet/early-adoption/compose.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
const fs = require("node:fs");
const readline = require("node:readline");
const { StandardMerkleTree } = require("@openzeppelin/merkle-tree");

const csvFiles = [
"sources/galxe-lido-point-holders.csv",
"sources/lido-dappnode-buyers.csv",
"sources/obol-techne-credentials-base.csv",
"sources/obol-techne-credentials-bronze.csv",
"sources/obol-techne-credentials-silver.csv",
"sources/rated-solo-staker.csv",
"sources/stake-cat-gnosischain-solo-stakers.csv",
"sources/stake-cat-rocketpool-solo-stakers.csv",
"sources/stake-cat-solo-stakers-B.csv",
];

const performersCsvFiles = [
"sources/csm-testnet-good-performers.csv"
]

const allCsvFiles = [...csvFiles, ...performersCsvFiles];

const csvFilesToExclude = [
"sources/exclude/ever-slashed.csv",
"sources/exclude/pro-node-operators.csv",
"sources/exclude/csm-testnet-bad-performers.csv",
]


async function readCsvFiles(files) {
const addresses = {};

for (const file of files) {
const fileStream = fs.createReadStream(file);
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity,
});

for await (const line of rl) {
let [address] = line.split(","); // Assuming CSV has only one column for addresses
address = address.toLowerCase();
if (addresses[address]) {
addresses[address].sources.push(file);
} else {
addresses[address] = { sources: [file] };
}
}
}

return addresses;
}

function buildMerkleTree(addresses) {
const tree = StandardMerkleTree.of(
addresses.map((address) => [address]),
["address"],
);
return { tree };
}

function buildCsvContent(addresses) {
const header = ["address", ...allCsvFiles.map((file) => file.split("/").pop().split(".")[0])];
let content = header.join(",") + "\n";
for (const address in addresses) {
content += `${address},${allCsvFiles.map((file) => (addresses[address].sources.includes(file) ? "X" : "")).join(",")}\n`;
}
return content;
}

function buildExclusionCsvContent(addresses, excludeAddresses, goodPerformers) {
const header = ["address", "exclusion_reason", "sources"];
let content = header.join(",") + "\n";
for (const address in excludeAddresses) {
if (Object.keys(addresses).includes(address) && !Object.keys(goodPerformers).includes(address)) {
content += `${address},${excludeAddresses[address].sources.join(";")},${addresses[address].sources.join(";")}\n`;
}
}
return content;
}

(async function main() {
const allAddresses = await readCsvFiles(csvFiles);
const excludeAddresses = await readCsvFiles(csvFilesToExclude);

let addresses = {...allAddresses};
for (const address in excludeAddresses) {
delete addresses[address];
}

const goodPerformers = await readCsvFiles(performersCsvFiles);
for (const address in goodPerformers) {
if (addresses[address]) {
addresses[address].sources.push(...goodPerformers[address].sources);
} else {
addresses[address] = { sources: goodPerformers[address].sources };
}
}

console.log("Total addresses:", Object.keys(allAddresses).length);
console.log("Total excluded:", Object.keys(excludeAddresses).length);

const { tree } = buildMerkleTree(Object.keys(addresses));
console.log("Merkle Root:", tree.root);

const proofs = {}
for (const [i, v] of tree.entries()) {
proofs[v[0]] = tree.getProof(i);
}

const content = buildCsvContent(addresses);
// we do not report as excluded addresses that are in good performers list
const exclusionContent = buildExclusionCsvContent(allAddresses, excludeAddresses, goodPerformers);

fs.writeFileSync("sources.csv", content);
fs.writeFileSync("addresses.json", JSON.stringify(Object.keys(addresses), null, 2));
fs.writeFileSync("merkle-tree.json", JSON.stringify(tree.dump()));
fs.writeFileSync("merkle-proofs.json", JSON.stringify(proofs));
fs.writeFileSync("exclusions.csv", exclusionContent);
console.log("Merkle tree and proofs have been written to files.");

const sources = {};
for (const source of allCsvFiles) {
sources[source] = { total: 0, unique: 0, duplicate: 0 };
}

for (const [address, info] of Object.entries(addresses)) {
if (info.sources.length > 1) {
for (const source of info.sources) {
sources[source].total++;
sources[source].duplicate++;
}
continue;
}
sources[info.sources[0]].total++;
sources[info.sources[0]].unique++;
}

console.log("Unique addresses for each source:");
for (const fileName in sources) {
console.log(fileName + ":");
const fileData = sources[fileName];
for (const key in fileData) {
console.log(` ${key}:`, fileData[key]);
}
}
console.log("\nTotal unique addresses:", Object.keys(addresses).length);
})();
Loading

0 comments on commit 7a5f3ae

Please sign in to comment.