Skip to content

Commit

Permalink
export getUserTVLByBlock
Browse files Browse the repository at this point in the history
  • Loading branch information
notdian committed May 17, 2024
1 parent 9c237b5 commit c4cf000
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 23 deletions.
1 change: 1 addition & 0 deletions adapters/xfai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"@ethersproject/providers": "^5.7.2",
"ethers": "^5.7.2",
"fast-csv": "^5.0.1",
"csv-parser": "^3.0.0",
"lodash": "^4.17.21",
"pg": "^8.11.5"
},
Expand Down
64 changes: 41 additions & 23 deletions adapters/xfai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ import { uniq } from "lodash";
import { multicall } from "./sdk/mutlicall";
import { IXfaiPool__factory } from "./sdk/factories/IXfaiPool__factory";
import { getCreate2Address } from "ethers/lib/utils";
import { Block, StaticJsonRpcProvider } from "@ethersproject/providers";
import { format, write } from "@fast-csv/format";
import { time } from "console";
import { createWriteStream } from "fs";
import { StaticJsonRpcProvider } from "@ethersproject/providers";
import fs from "fs";
import { write } from "fast-csv";

function getPoolAddressFromTokenAddress(tokenAddress: string): string {
return getCreate2Address(
XFAI_FACTORY,
Expand Down Expand Up @@ -59,7 +59,7 @@ type ChangedLiquidity = {
blockNumber: bigint;
};

async function getUserTVLByBlock(
export async function getUserTVLByBlock(
block: BlockData
): Promise<OutputDataSchemaRow[]> {
const client = await getDBConnection();
Expand All @@ -80,7 +80,6 @@ async function getUserTVLByBlock(
values: [block.blockNumber, WETH],
});
const pgSqlShutdown = client.end();

const liquiditiesRows = liquidities.rows.map((r) => ({
...r,
pool: getPoolAddressFromTokenAddress(r.token),
Expand Down Expand Up @@ -131,14 +130,7 @@ async function getUserTVLByBlock(
);

const result: OutputDataSchemaRow[] = liquiditiesRows.flatMap(
({
owner,
token,
pool: poolAddress,
liquidity,
blockNumber: block_number,
timestamp,
}) => {
({ owner, token, pool: poolAddress, liquidity, timestamp }) => {
const poolSupply = poolSupplies[poolAddress];
const poolReserve = poolRes[poolAddress];
const tokenBalance =
Expand All @@ -148,7 +140,7 @@ async function getUserTVLByBlock(
return [
// Token reserve
{
block_number: Number(block_number),
block_number: Number(block.blockNumber),
timestamp,
user_address: owner,
token_address: token,
Expand All @@ -158,7 +150,7 @@ async function getUserTVLByBlock(
},
// WETH Reserve
{
block_number: Number(block_number),
block_number: Number(block.blockNumber),
timestamp,
user_address: owner,
token_address: WETH,
Expand All @@ -174,15 +166,41 @@ async function getUserTVLByBlock(
return result;
}

const ws = createWriteStream("outputData.csv");
const getData = async (blocks: BlockData[]) => {
console.log(blocks);
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks

getUserTVLByBlock({ blockNumber: 1140957, blockTimestamp: 1140957 })
.then((r) => {
write(r, { headers: true })
for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
// Accumulate CSV rows for all blocks
for (let i = 0; i < result.length; i++) {
allCsvRows.push(result[i]);
}
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
}
}
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(`outputData.csv`, { flags: "w" });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
ws.close();
console.log("CSV file has been written.");
console.log(`CSV file has been written.`);
resolve;
});
});
};

getData([
{
blockNumber: 3941531,
blockTimestamp: 1631062400,
},
])
.then(() => {
console.log("Done");
})
.catch((e) => console.error(e));
.catch((err) => {
console.error("Error reading CSV file:", err);
});

0 comments on commit c4cf000

Please sign in to comment.