Skip to content

Commit

Permalink
csv fix
Browse files Browse the repository at this point in the history
  • Loading branch information
matt5346 committed Apr 30, 2024
1 parent 80107f6 commit de67ecb
Show file tree
Hide file tree
Showing 3 changed files with 122 additions and 110 deletions.
140 changes: 39 additions & 101 deletions adapters/overnight/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import fs from 'fs';
import { write } from 'fast-csv';
import csv from 'csv-parser';

interface CSVRow {
export interface CSVRow {
block_number: number;
timestamp: number;
user_address: string;
Expand All @@ -19,80 +19,23 @@ interface CSVRow {
usd_price?: number;
}

export interface BlockData {
blockTimestamp: number;
blockNumber: number
}

const getData = async () => {
const csvRows: CSVRow[] = [];
let csvRows: CSVRow[] = [];

for (let block of SNAPSHOTS_BLOCKS) {
const timestamp = await getTimestampAtBlock(block);
const positions = await getUserTVLByBlock({
blockNumber: block,
blockTimestamp: timestamp,
});

console.log("Positions: ", positions.length);
let lpValueByUsers = getLPValueByUserAndPoolFromPositions(positions);

lpValueByUsers.forEach((value, key) => {
value.forEach((lpValue) => {
const lpValueStr = lpValue.toString();
// Accumulate CSV row data
csvRows.push({
user_address: key,
token_address: LP_LYNEX,
token_symbol: LP_LYNEX_SYMBOL,
token_balance: BigInt(lpValueStr),
block_number: block,
timestamp,
usd_price: 0
});
})
});
}

// counting rebase by blocks range
// [0, 100, 200] -> gonna be counted like [0, 100] + [100, 200]
for (let block of SNAPSHOTS_BLOCKS) {
console.log(`Blocks: 0 -> ${block}`);

if (block === 0) continue;

const positionsRebaseUsd = await getRebaseForUsersByPoolAtBlock({
const list = await getUserTVLByBlock({
blockNumber: block,
token: OVN_CONTRACTS.USDPLUS
});

const positionsRebaseUsdt = await getRebaseForUsersByPoolAtBlock({
blockNumber: block,
token: OVN_CONTRACTS.USDTPLUS
blockTimestamp: timestamp,
});

console.log("positionsRebase: ", positionsRebaseUsd.size);

// all results are counted for the END block
const timestamp = await getTimestampAtBlock(block);

positionsRebaseUsd.forEach((value, key) => {
csvRows.push({
user_address: key,
token_symbol: USD_PLUS_SYMBOL,
token_balance: BigInt(value),
token_address: USD_PLUS_LINEA,
block_number: block,
timestamp,
usd_price: 0
});
});
positionsRebaseUsdt.forEach((value, key) => {
csvRows.push({
user_address: key,
token_symbol: USDT_PLUS_SYMBOL,
token_balance: BigInt(value),
token_address: USDT_PLUS_LINEA,
block_number: block,
timestamp,
usd_price: 0
});
});
csvRows.concat(list)
}

// Write the CSV output to a file
Expand All @@ -102,14 +45,9 @@ const getData = async () => {
});
};

// getData().then(() => {
// console.log("Done");
// });

export interface BlockData {
blockNumber: number;
blockTimestamp: number;
}
getData().then(() => {
console.log("Done");
});

const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];
Expand All @@ -135,29 +73,29 @@ const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
return blocks;
};

readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: BlockData[]) => {
console.log(blocks);
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
const batchSize = 1000; // Size of batch to trigger writing to the file
let i = 0;

for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
allCsvRows.push(...result);
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
}
}
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
resolve;
});
});
}).catch((err) => {
console.error('Error reading CSV file:', err);
});
// readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: BlockData[]) => {
// console.log(blocks);
// const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
// const batchSize = 1000; // Size of batch to trigger writing to the file
// let i = 0;

// for (const block of blocks) {
// try {
// const result = await getUserTVLByBlock(block);
// allCsvRows.push(...result);
// } catch (error) {
// console.error(`An error occurred for block ${block}:`, error);
// }
// }
// await new Promise((resolve, reject) => {
// const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' });
// write(allCsvRows, { headers: true })
// .pipe(ws)
// .on("finish", () => {
// console.log(`CSV file has been written.`);
// resolve;
// });
// });
// }).catch((err) => {
// console.error('Error reading CSV file:', err);
// });
4 changes: 2 additions & 2 deletions adapters/overnight/src/sdk/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,11 @@ export const RPC_URLS = {


export const SNAPSHOTS_BLOCKS = [
0, 100000
0, 1000000
];

export const CHUNKS_SPLIT = 20;
export const BLOCK_STEP = 5000;
export const BLOCK_STEP = 500;
export const LINEA_RPC = "https://lb.drpc.org/ogrpc?network=linea&dkey=AsCWb9aYukugqNphr9pEGw5L893HadYR7ooVbrjxQOzW"
export const LP_LYNEX_SYMBOL = "oLYNX";
export const LP_LYNEX = "0x63349BA5E1F71252eCD56E8F950D1A518B400b60"
Expand Down
88 changes: 81 additions & 7 deletions adapters/overnight/src/sdk/subgraphDetails.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import BN from "bignumber.js";
import { LINEA_RPC, CHAINS, OVN_CONTRACTS, PROTOCOLS, RPC_URLS, SUBGRAPH_URLS, ZERO_ADD, CHUNKS_SPLIT, BLOCK_STEP } from "./config";
import { LINEA_RPC, CHAINS, OVN_CONTRACTS, PROTOCOLS, RPC_URLS, SUBGRAPH_URLS, ZERO_ADD, CHUNKS_SPLIT, BLOCK_STEP, LP_LYNEX, LP_LYNEX_SYMBOL, USD_PLUS_SYMBOL, USD_PLUS_LINEA, USDT_PLUS_LINEA, USDT_PLUS_SYMBOL } from "./config";
import { createPublicClient, extractChain, http } from "viem";
import { linea } from "viem/chains";
import { ethers } from "ethers";
import { ERC20_ABI } from "./abi";
import { CSVRow } from "..";

export interface BlockData {
blockNumber: number;
Expand Down Expand Up @@ -275,14 +276,11 @@ export const getRebaseForUsersByPoolAtBlock = async ({

return listNetRebase;
}

// OVN pools
// 0x58aacbccaec30938cb2bb11653cad726e5c4194a usdc/usd+
// 0xc5f4c5c2077bbbac5a8381cf30ecdf18fde42a91 usdt+/usd+
export const getUserTVLByBlock = async ({
blockNumber,
blockTimestamp,
}: BlockData): Promise<Position[]> => {
const getPoolsData = async (blockNumber: number, blockTimestamp: number): Promise<CSVRow[]> => {
let whereQuery = blockNumber ? `where: { blockNumber_lt: ${blockNumber} }` : "";
const poolsData = SUBGRAPH_URLS[CHAINS.LINEA][PROTOCOLS.OVN]

Expand Down Expand Up @@ -333,7 +331,83 @@ export const getUserTVLByBlock = async ({
return result
}))

return allPoolsRes.flat(1);
const positions = allPoolsRes.flat(1);

console.log("Positions: ", positions.length);
let lpValueByUsers = getLPValueByUserAndPoolFromPositions(positions);
const csvRows: CSVRow[] = [];

lpValueByUsers.forEach((value, key) => {
value.forEach((lpValue) => {
const lpValueStr = lpValue.toString();
// Accumulate CSV row data
csvRows.push({
user_address: key,
token_address: LP_LYNEX,
token_symbol: LP_LYNEX_SYMBOL,
token_balance: BigInt(lpValueStr),
block_number: blockNumber,
timestamp: blockTimestamp,
usd_price: 0
});
})
});

return csvRows;
}

// counting rebase by blocks range
// [0, 100, 200] -> gonna be counted like [0, 100] + [100, 200]
const getRebaseData = async (block: number, blockTimestamp: number): Promise<CSVRow[]> => {
console.log(`Blocks: 0 -> ${block}`);
const csvRows: CSVRow[] = [];

const positionsRebaseUsd = await getRebaseForUsersByPoolAtBlock({
blockNumber: block,
token: OVN_CONTRACTS.USDPLUS
});

const positionsRebaseUsdt = await getRebaseForUsersByPoolAtBlock({
blockNumber: block,
token: OVN_CONTRACTS.USDTPLUS
});

console.log("positionsRebase: ", positionsRebaseUsd.size);

positionsRebaseUsd.forEach((value, key) => {
csvRows.push({
user_address: key,
token_symbol: USD_PLUS_SYMBOL,
token_balance: BigInt(value),
token_address: USD_PLUS_LINEA,
block_number: block,
timestamp: blockTimestamp,
usd_price: 0
});
});
positionsRebaseUsdt.forEach((value, key) => {
csvRows.push({
user_address: key,
token_symbol: USDT_PLUS_SYMBOL,
token_balance: BigInt(value),
token_address: USDT_PLUS_LINEA,
block_number: block,
timestamp: blockTimestamp,
usd_price: 0
});
});

return csvRows;
}

export const getUserTVLByBlock = async ({
blockNumber,
blockTimestamp,
}: BlockData): Promise<CSVRow[]> => {
const poolsCsv = await getPoolsData(blockNumber, blockTimestamp);
const rebaseCsv = await getRebaseData(blockNumber, blockTimestamp);

return poolsCsv.concat(rebaseCsv);
}

export const getLPValueByUserAndPoolFromPositions = (
Expand Down

0 comments on commit de67ecb

Please sign in to comment.