Skip to content

Commit

Permalink
Merge branch 'main' into overnight-rebase
Browse files Browse the repository at this point in the history
  • Loading branch information
matt5346 committed Apr 30, 2024
2 parents 7daef20 + 29970b8 commit 7c456ab
Show file tree
Hide file tree
Showing 37 changed files with 301,885 additions and 51,768 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ Goal: **Hourly snapshot of TVL by User by Asset**
For each protocol, we are looking for the following:
1. Query that fetches all relevant events required to calculate User TVL in the Protocol at hourly level.
2. Code that uses the above query, fetches all the data and converts it to csv file in below given format.
3. Token amount should be raw token amount. Please do not divide by decimals.

Teams can refer to the example we have in there to write the code required.

Expand Down
1 change: 1 addition & 0 deletions adapters/celer/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"version": "1.0.0",
"private": true,
"dependencies": {
"csv-parser": "^3.0.0",
"fast-csv": "^5.0.1"
},
"scripts": {
Expand Down
64 changes: 49 additions & 15 deletions adapters/celer/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { write } from "fast-csv";
import fs from "fs";
import csv from 'csv-parser';
import fs from 'fs';
import { write } from 'fast-csv';

interface BlockData {
blockNumber: number;
Expand Down Expand Up @@ -48,20 +49,53 @@ export const getUserTVLByBlock = async (data: BlockData) => {
return csvRows;
};

const getData = async () => {
// Write the CSV output to a file
const dataList = await getUserTVLByBlock({
blockNumber: 19506984,
blockTimestamp: 1711429021,
const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];

await new Promise<void>((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(csv()) // Specify the separator as '\t' for TSV files
.on('data', (row) => {
const blockNumber = parseInt(row.number, 10);
const blockTimestamp = parseInt(row.timestamp, 10);
if (!isNaN(blockNumber) && blockTimestamp) {
blocks.push({ blockNumber: blockNumber, blockTimestamp });
}
})
.on('end', () => {
resolve();
})
.on('error', (err) => {
reject(err);
});
});
const ws = fs.createWriteStream("outputData.csv");
write(dataList, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log("CSV file has been written.");
});

return blocks;
};

getData().then(() => {
console.log("Done");
readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: BlockData[]) => {
console.log(blocks);
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
const batchSize = 1000; // Size of batch to trigger writing to the file
let i = 0;

for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
allCsvRows.push(...result);
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
}
}
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
resolve;
});
});
}).catch((err) => {
console.error('Error reading CSV file:', err);
});
1 change: 1 addition & 0 deletions adapters/connext/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"dependencies": {
"@connext/nxtp-utils": "2.4.1",
"fast-csv": "5.0.1",
"csv-parser": "^3.0.0",
"viem": "2.8.16"
},
"devDependencies": {
Expand Down
106 changes: 92 additions & 14 deletions adapters/connext/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import { getUserTVLByBlock } from "./utils";
import { getUserTVLByBlock, writeCsv } from "./utils";
import csv from 'csv-parser';
import fs from 'fs';
import { write } from 'fast-csv';

export { getUserTVLByBlock };

Expand All @@ -7,17 +10,92 @@ export { getUserTVLByBlock };
// blockTimestamp: 1711044141,
// }

// const fileName = 'output.csv';
// console.log('Getting TVL at block:', input.blockNumber);
interface BlockData {
blockNumber: number;
blockTimestamp: number;
}

interface CSVRow {
block_number: number;
timestamp: number;
user_address: string;
token_address: string;
token_balance: string;
token_symbol: string;
usd_price: number;
}

// // returns all user balances at the input block by looking at the latest
// // balance for each user and token on the subgraph, capped at given block.
// getUserTVLByBlock(input).then((data) => {
// if (data.length === 0) {
// console.log("no data to write to file");
// return;
// }
// writeCsv(data, fileName).then(() => {
// console.log('CSV written to file:', fileName);
// })
// });
// returns all user balances at the input block by looking at the latest
// balance for each user and token on the subgraph, capped at given block.
getUserTVLByBlock(input).then((data) => {
if (data.length === 0) {
console.log("no data to write to file");
return;
}
writeCsv(data, fileName).then(() => {
console.log('CSV written to file:', fileName);
})
});

const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];

await new Promise<void>((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(csv()) // Specify the separator as '\t' for TSV files
.on('data', (row) => {
const blockNumber = parseInt(row.number, 10);
const blockTimestamp = parseInt(row.timestamp, 10);
if (!isNaN(blockNumber) && blockTimestamp) {
blocks.push({ blockNumber: blockNumber, blockTimestamp });
}
})
.on('end', () => {
resolve();
})
.on('error', (err) => {
reject(err);
});
});

return blocks;
};

readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => {
console.log(blocks);
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
const batchSize = 1000; // Size of batch to trigger writing to the file
let i = 0;

for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
// Accumulate CSV rows for all blocks
allCsvRows.push(...result);
// console.log(`Processed block ${i}`);
// Write to file when batch size is reached or at the end of loop
// if (i % batchSize === 0 || i === blocks.length) {
// }
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
}
}
await new Promise((resolve, reject) => {
// const randomTime = Math.random() * 1000;
// setTimeout(resolve, randomTime);
const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
resolve;
});
});

// Clear the accumulated CSV rows
// allCsvRows.length = 0;

}).catch((err) => {
console.error('Error reading CSV file:', err);
});

Loading

0 comments on commit 7c456ab

Please sign in to comment.