diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..cfd38954 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,109 @@ +## Title: + +- [ ] Pattern for PR title "protocol_name : protocol_category : other comments". Example - "uniswap: dex : tvl by user" + +## Checklist before requesting a review +1. **index.ts file** + + - [ ] **Contains function** + + ```export const getUserTVLByBlock = async (blocks: BlockData) => { + const { blockNumber, blockTimestamp } = blocks + // Retrieve data using block number and timestamp + // YOUR LOGIC HERE + + return csvRows + + }; + ``` + - [ ] **getUserTVLByBlock function takes input with this schema** + + ``` + interface BlockData { + blockNumber: number; + blockTimestamp: number; + } + ``` + - [ ] **getUserTVLByBlock function returns output in this schema** + + ``` + const csvRows: OutputDataSchemaRow[] = []; + + type OutputDataSchemaRow = { + block_number: number; //block_number which was given as input + timestamp: number; // block timestamp which was given an input, epoch format + user_address: string; // wallet address, all lowercase + token_address: string; // token address all lowercase + token_balance: bigint; // token balance, raw amount. Please dont divide by decimals + token_symbol: string; //token symbol should be empty string if it is not available + usd_price: number; //assign 0 if not available + }; + ``` + - [ ] **contains function** + + ``` + const readBlocksFromCSV = async (filePath: string): Promise => { + const blocks: BlockData[] = []; + + await new Promise((resolve, reject) => { + fs.createReadStream(filePath) + .pipe(csv()) // Specify the separator as '\t' for TSV files + .on('data', (row) => { + const blockNumber = parseInt(row.number, 10); + const blockTimestamp = parseInt(row.timestamp, 10); + if (!isNaN(blockNumber) && blockTimestamp) { + blocks.push({ blockNumber: blockNumber, blockTimestamp }); + } + }) + .on('end', () => { + resolve(); + }) + .on('error', (err) => { + reject(err); + }); + }); + + return blocks; + }; + + ``` + - [ ] **has this code** + + ``` + readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => { + console.log(blocks); + const allCsvRows: any[] = []; + + for (const block of blocks) { + try { + const result = await getUserTVLByBlock(block); + allCsvRows.push(...result); + } catch (error) { + console.error(`An error occurred for block ${block}:`, error); + } + } + await new Promise((resolve, reject) => { + const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' }); + write(allCsvRows, { headers: true }) + .pipe(ws) + .on("finish", () => { + console.log(`CSV file has been written.`); + resolve; + }); + }); + + }).catch((err) => { + console.error('Error reading CSV file:', err); + }); + ``` +2. **Output data** + - [ ] Data is returned for underlying tokens only. Not for special tokens (lp/veTokens etc) + - [ ] Follows the exact sequence mentioned in OutputDataSchemaRow . This is needed as we want same column ordering in output csv + - Value of each field is : + - [ ] block_number *is same as input block number. This signifies TVL is as of this block_number.* + - [ ] timestamp is same as input timestamp. This signifies TVL is as of this timestamp. It is in epoch format. + - [ ] user_address is in lowercase + - [ ] token_address is in lowercase + - [ ] token_balance is in raw amount. Please dont divide by decimals. + - [ ] token_symbol value if present, empty string if value is not available. + - [ ] usd_price if value is available, 0 if value is not available. diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml new file mode 100644 index 00000000..fbbd09fb --- /dev/null +++ b/.github/workflows/deploy-prod.yml @@ -0,0 +1,20 @@ +name: Trigger deployment to production + +on: + push: + branches: + - main + paths: + - adapters/** + +jobs: + deploy-docker-image: + runs-on: ubuntu-latest + steps: + - name: Trigger workflow + uses: peter-evans/repository-dispatch@v2 + with: + token: ${{ secrets.GH_PAT }} + repository: delta-hq/openblocklabs-airflow-mwaa + event-type: trigger-workflow + client-payload: '{"ref": "main", "ENV": "prod"}' diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 00000000..13566b81 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,8 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml diff --git a/.idea/l2-lxp-liquidity-reward.iml b/.idea/l2-lxp-liquidity-reward.iml new file mode 100644 index 00000000..d6ebd480 --- /dev/null +++ b/.idea/l2-lxp-liquidity-reward.iml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..639900d1 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 00000000..8baa8fef --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/runConfigurations.xml b/.idea/runConfigurations.xml new file mode 100644 index 00000000..797acea5 --- /dev/null +++ b/.idea/runConfigurations.xml @@ -0,0 +1,10 @@ + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 00000000..35eb1ddf --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/README.md b/README.md index 304d2aa1..226eb872 100644 --- a/README.md +++ b/README.md @@ -178,7 +178,7 @@ And output a CSV file named `outputData.csv` with headers with the following col e.g. `adapters/renzo/src/index.ts` For testing the adapter code for a single hourly block, use the following `hourly_blocks.csv` file: -``` +``` number,timestamp 4243360,1714773599 ``` diff --git a/adapters/block_numbers.tsv b/adapters/block_numbers.tsv deleted file mode 100644 index 3b7f0254..00000000 --- a/adapters/block_numbers.tsv +++ /dev/null @@ -1,3 +0,0 @@ -block_timestamp number -1711023841 3041467 -1670496243 0 diff --git a/adapters/clip_finance/package.json b/adapters/clip_finance/package.json new file mode 100644 index 00000000..934e320c --- /dev/null +++ b/adapters/clip_finance/package.json @@ -0,0 +1,32 @@ +{ + "name": "clip-finance", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "start": "node dist/index.js", + "compile": "tsc", + "watch": "tsc -w", + "clear": "rm -rf dist" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "@types/big.js": "^6.2.2", + "big.js": "^6.2.1", + "bignumber.js": "^9.1.2", + "csv-parser": "^3.0.0", + "decimal.js-light": "^2.5.1", + "fast-csv": "^5.0.1", + "jsbi": "^4.3.0", + "tiny-invariant": "^1.3.1", + "toformat": "^2.0.0", + "viem": "^2.8.13" + }, + "devDependencies": { + "@types/node": "^20.11.17", + "typescript": "^5.3.3" + } +} \ No newline at end of file diff --git a/adapters/clip_finance/src/index.ts b/adapters/clip_finance/src/index.ts new file mode 100644 index 00000000..dd5fb2b8 --- /dev/null +++ b/adapters/clip_finance/src/index.ts @@ -0,0 +1,100 @@ +import { getTimestampAtBlock, getUserBalanceSnapshotAtBlock } from "./sdk/subgraphDetails"; +import fs from 'fs'; +import csv from 'csv-parser'; +import { write } from 'fast-csv'; + +interface CSVRow { + block_number: number; + timestamp: number; + user_address: string; + token_address: string; + token_balance: bigint; + token_symbol: string; + usd_price: number +} + +interface BlockData { + blockNumber: number; + blockTimestamp: number; +} + +export const getUserTVLByBlock = async (blocks: BlockData) => { + const { blockNumber, blockTimestamp } = blocks; + const snapshotBlocks: number[] = [blockNumber]; + + const csvRows: CSVRow[] = []; + + for (const block of snapshotBlocks) { + let snapshots = await getUserBalanceSnapshotAtBlock(block, ""); + + // const timestamp = await getTimestampAtBlock(block); + + for (const snapshot of snapshots) { + const csvRow: CSVRow = { + block_number: block, + timestamp: blockTimestamp, + user_address: snapshot.id, + token_address: snapshot.token, + token_balance: BigInt(snapshot.balance.toString()), + token_symbol: snapshot.tokenSymbol, + usd_price: 0 + }; + csvRows.push(csvRow); + } + } + + console.log("Total rows:", csvRows.length); + + return csvRows; +}; + +const readBlocksFromCSV = async (filePath: string): Promise => { + const blocks: BlockData[] = []; + + await new Promise((resolve, reject) => { + fs.createReadStream(filePath) + .pipe(csv()) // Specify the separator as '\t' for TSV files + .on('data', (row) => { + + const blockNumber = parseInt(row.number, 10); + const blockTimestamp = parseInt(row.timestamp, 10); + if (!isNaN(blockNumber) && blockTimestamp) { + blocks.push({ blockNumber: blockNumber, blockTimestamp }); + } + }) + .on('end', () => { + resolve(); + }) + .on('error', (err) => { + reject(err); + }); + }); + + return blocks; +}; + +readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => { + console.log(blocks); + const allCsvRows: any[] = []; + + for (const block of blocks) { + try { + const result = await getUserTVLByBlock(block); + allCsvRows.push(...result); + } catch (error) { + console.error(`An error occurred for block ${block}:`, error); + } + } + await new Promise((resolve, reject) => { + const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' }); + write(allCsvRows, { headers: true }) + .pipe(ws) + .on("finish", () => { + console.log(`CSV file has been written.`); + resolve; + }); + }); + + }).catch((err) => { + console.error('Error reading CSV file:', err); + }); \ No newline at end of file diff --git a/adapters/clip_finance/src/sdk/config.ts b/adapters/clip_finance/src/sdk/config.ts new file mode 100644 index 00000000..0b2dbb2a --- /dev/null +++ b/adapters/clip_finance/src/sdk/config.ts @@ -0,0 +1,19 @@ +export const enum CHAINS { + LINEA = 59144, +} + +export const SUBGRAPH_URLS = { + [CHAINS.LINEA]: + "https://api.goldsky.com/api/public/project_cltzfe75l0y4u01s98n3c7fmu/subgraphs/clip-finance-shares-token/v2.4/gn", +}; + +export const RESERVE_SUBGRAPH_URLS = { + [CHAINS.LINEA]: + "https://api.goldsky.com/api/public/project_cltzfe75l0y4u01s98n3c7fmu/subgraphs/clip-finance-shares-token/v2.5/gn", + +} + +export const RPC_URLS = { + [CHAINS.LINEA]: + "https://rpc.linea.build", +}; \ No newline at end of file diff --git a/adapters/clip_finance/src/sdk/subgraphDetails.ts b/adapters/clip_finance/src/sdk/subgraphDetails.ts new file mode 100644 index 00000000..4a9c2db8 --- /dev/null +++ b/adapters/clip_finance/src/sdk/subgraphDetails.ts @@ -0,0 +1,317 @@ +import { createPublicClient, extractChain, http } from "viem"; +import { linea } from "viem/chains"; +import { SUBGRAPH_URLS, CHAINS, RPC_URLS, RESERVE_SUBGRAPH_URLS } from "./config"; +import Big from "big.js"; + +export interface UserBalanceSnapshot { + id: string; + balance: Big; + token : string; + tokenSymbol: string; +} + +export interface User { + id: string; + balance: Big; + token : string; + tokenSymbol: string; +} + +interface SharePricesSnapshot { + id: string; + price0: Big; + price01: Big; + price1: Big; + price10: Big; + token0: string; + token0Symbol: string; + token1 : string; + token1Symbol: string; +} + +interface UserSharesSnapshot { + id: string; + shares0: Big; + shares1: Big; +} + +function delay(ms: number) { + return new Promise( resolve => setTimeout(resolve, ms) ); +} + +export const getUserBalanceSnapshotAtBlock = async ( + blockNumber: number, + address: string +): Promise => { + let subgraphUrl = SUBGRAPH_URLS[CHAINS.LINEA]; + let blockQuery = blockNumber !== 0 ? ` block: {number: ${blockNumber}}` : ``; + + let idQuery = address !== "" ? `id: "${address.toLowerCase()}"` : ``; + let showZeroBalances = false; + let balanceNotZeroQuery = showZeroBalances ? "" : `balance_gt: 0`; + let whereQueries = [idQuery, balanceNotZeroQuery]; + + let skip = 0; + let fetchNext = true; + let result: UserBalanceSnapshot[] = []; + const sharePricesMap = new Map(); + while (fetchNext) { + const query = `{ + sharePrices( + ${blockQuery} + first:1000, skip:${skip} + ){ + id + price0 + price01 + price1 + price10 + token0 + token0Symbol + token1 + token1Symbol + } + } + `; + let count = 0; + let response; + do { + response = await fetch(subgraphUrl, { + method: "POST", + body: JSON.stringify({ query }), + headers: { "Content-Type": "application/json" }, + }); + if (response.status != 200) { + subgraphUrl = RESERVE_SUBGRAPH_URLS[CHAINS.LINEA]; + response = await fetch(subgraphUrl, { + method: "POST", + body: JSON.stringify({ query }), + headers: { "Content-Type": "application/json" }, + }); + } + if (response.status != 200) { + console.log("sharePrices fetching failed. Try again in 15 sec"); + await delay(15000); + } + ++count + } while ((response.status != 200) && (count < 10)) + + let data = await response.json(); + let snapshots = data.data.sharePrices; + for (const snapshot of snapshots) { + const sharePriceSnapshot: SharePricesSnapshot = { + id: snapshot.id, + price0: Big(snapshot.price0), + price01: Big(snapshot.price01), + price1 : Big(snapshot.price1), + price10: Big(snapshot.price10), + token0 : snapshot.token0, + token0Symbol: snapshot.token0Symbol, + token1 : snapshot.token1, + token1Symbol: snapshot.token1Symbol, + } + sharePricesMap.set(snapshot.id, sharePriceSnapshot); + } + if (snapshots.length < 1000) { + fetchNext = false; + } else { + skip += 1000; + } + } + + skip = 0; + fetchNext = true; + const balanceMap = new Map(); + const strategyRouterSharesMap = new Map(); + let strategyRouterBalance = new Map(); + const addBalance = (balance: UserBalanceSnapshot, share: UserBalanceSnapshot) => { + const user= share.id.substring(0, 42); + const key = user.concat(balance.token); + if (user == "0xa663f143055254a503467ff8b18aa9e70b9455b6") { + strategyRouterBalance.set(key.concat(balance.token), balance); + } else if (balance.balance.gt(0)) { + if (!balanceMap.has(key)) { + balanceMap.set(key, balance); + } else { + const oldUserBalanceSnapshot = balanceMap.get(key); + if (oldUserBalanceSnapshot) { + oldUserBalanceSnapshot.balance = oldUserBalanceSnapshot.balance.plus(balance.balance); + balanceMap.set(key, oldUserBalanceSnapshot); + } + } + } + }; + + while (fetchNext) { + const query = `{ + userShares( + ${blockQuery} + first:1000, skip:${skip} + ){ + id + shares0 + shares1 + } + } + `; + let count = 0; + let response; + do { + response = await fetch(subgraphUrl, { + method: "POST", + body: JSON.stringify({ query }), + headers: { "Content-Type": "application/json" }, + }); + if (response.status != 200) { + await delay(15000); + console.log("userShares fetching failed. Try again in 15 sec"); + } + ++count; + } while ((count < 10) && (response.status != 200)) { + + } + let data = await response.json(); + let snapshots = data.data.userShares; + for (const snapshot of snapshots) { + const contract = "0x".concat(snapshot.id.substring(42)); + const sharePrice = sharePricesMap.get(contract); + const user = snapshot.id.substring(0, 42); + if (sharePrice) { + let userBalanceSnapshot: UserBalanceSnapshot = { + id: "", + balance: Big(0), + token : "", + tokenSymbol: "", + + }; + if (sharePrice.price0.gt(0)) { + userBalanceSnapshot = { + id: user.toLowerCase(), + balance: Big(Math.round(Big(snapshot.shares0).mul(sharePrice.price0).div(1e18).toNumber())), + token : sharePrice.token0.toLowerCase(), + tokenSymbol: sharePrice.token0Symbol, + + } + addBalance(userBalanceSnapshot, snapshot); + } + if (sharePrice.price01.gt(0)) { + userBalanceSnapshot = { + id: user.toLowerCase(), + balance: Big(Math.round(Big(snapshot.shares0).mul(sharePrice.price01).div(1e18).toNumber())), + token : sharePrice.token1.toLowerCase(), + tokenSymbol: sharePrice.token1Symbol, + + } + addBalance(userBalanceSnapshot, snapshot); + } + if (sharePrice.price1.gt(0)) { + userBalanceSnapshot = { + id: user.toLowerCase(), + balance: Big(Math.round(Big(snapshot.shares1).mul(sharePrice.price1).div(1e18).toNumber())), + token : sharePrice.token1.toLowerCase(), + tokenSymbol: sharePrice.token1Symbol, + } + addBalance(userBalanceSnapshot, snapshot); + } + if (sharePrice.price10.gt(0)) { + userBalanceSnapshot = { + id: user.toLowerCase(), + balance: Big(Math.round(Big(snapshot.shares1).mul(sharePrice.price10).div(1e18).toNumber())), + token : sharePrice.token0.toLowerCase(), + tokenSymbol: sharePrice.token0Symbol, + + } + addBalance(userBalanceSnapshot, snapshot); + } + } else { + if (Big(snapshot.shares0).gt(0)) { + strategyRouterSharesMap.set(snapshot.id, snapshot); + } + } + } + if (snapshots.length < 1000) { + fetchNext = false; + } else { + skip += 1000; + } + } + + const query = `{ + sharesTokenSharesCounts ( + ${blockQuery} + ){ + id + total + } + } + `; + + let count = 0; + let response; + do { + response = await fetch(subgraphUrl, { + method: "POST", + body: JSON.stringify({ query }), + headers: { "Content-Type": "application/json" }, + }); + if (response.status != 200) { + console.log("sharesTokenSharesCounts fetching failed. Try again in 15 sec"); + await delay(15000) + } + ++count; + } while ((count < 10) && (response.status != 200)); + let data = await response.json(); + let snapshots = data.data.sharesTokenSharesCounts; + let strategyRouterTotalShares: Big = Big(0); + for (const snapshot of snapshots) { + strategyRouterTotalShares = Big(snapshot.total); + } + let countedTotalShares: Big = Big(0); + if (strategyRouterTotalShares.gt(0)) { + let checkBalance = Big(0); + strategyRouterSharesMap.forEach((share: UserSharesSnapshot, id: string)=> { + const user = share.id.substring(0, 42); + for (const srbKey of strategyRouterBalance.keys()) { + const balance = strategyRouterBalance.get(srbKey); + if (balance) { + countedTotalShares = countedTotalShares.plus(Big(share.shares0)); + const userBalance : UserBalanceSnapshot = { + id: user.toLowerCase(), + balance: Big(Math.round(Big(share.shares0).mul(balance.balance).div(strategyRouterTotalShares).toNumber())), + token : balance.token.toLowerCase(), + tokenSymbol: balance.tokenSymbol + } + + checkBalance = checkBalance.plus(userBalance.balance); + const key = user.concat(balance.token); + if (!balanceMap.has(key)) { + balanceMap.set(key, userBalance); + } else { + const oldUserBalance = balanceMap.get(key); + if (oldUserBalance) { + oldUserBalance.balance = oldUserBalance.balance.plus(userBalance.balance); + balanceMap.set(key, userBalance); + } + } + } + } + }); + } + + return Array.from(balanceMap.values()); +}; + +export const getTimestampAtBlock = async (blockNumber: number) => { + const publicClient = createPublicClient({ + chain: extractChain({ chains: [linea], id: linea.id }), + transport: http(RPC_URLS[CHAINS.LINEA], { + retryCount: 5, + timeout: 60_000, + }), + }); + + const block = await publicClient.getBlock({ + blockNumber: BigInt(blockNumber), + }); + return Number(block.timestamp * 1000n); +}; \ No newline at end of file diff --git a/adapters/clip_finance/tsconfig.json b/adapters/clip_finance/tsconfig.json new file mode 100644 index 00000000..55603863 --- /dev/null +++ b/adapters/clip_finance/tsconfig.json @@ -0,0 +1,109 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "es2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + "rootDir": "src/", /* Specify the root folder within your source files. */ + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "dist/", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + } +} \ No newline at end of file diff --git a/adapters/connext/src/utils/getUserTvlByBlock.ts b/adapters/connext/src/utils/getUserTvlByBlock.ts index e8b5fd55..f3ba22b5 100644 --- a/adapters/connext/src/utils/getUserTvlByBlock.ts +++ b/adapters/connext/src/utils/getUserTvlByBlock.ts @@ -1,4 +1,4 @@ -import { getCompositeBalances, getLpAccountBalanceAtBlock } from "./subgraph"; +import { getBlock, getCompositeBalances, getLpAccountBalanceAtBlock } from "./subgraph"; import { BlockData, OutputDataSchemaRow } from "./types"; export const getUserTVLByBlock = async (blocks: BlockData): Promise => { @@ -9,13 +9,16 @@ export const getUserTVLByBlock = async (blocks: BlockData): Promise { + composite.forEach(({ account, underlyingBalances, underlyingTokens }) => { results.push(...underlyingBalances.map((b, i) => { const formatted: OutputDataSchemaRow = { - timestamp: +modified, - block_number: +block, + timestamp: +timestamp.toString(), + block_number: blockNumber, user_address: account.id, token_address: underlyingTokens[i], token_balance: BigInt(b), diff --git a/adapters/connext/src/utils/subgraph.ts b/adapters/connext/src/utils/subgraph.ts index 2be6bc23..74e8b642 100644 --- a/adapters/connext/src/utils/subgraph.ts +++ b/adapters/connext/src/utils/subgraph.ts @@ -1,7 +1,7 @@ import { PoolInformation, getPoolInformationFromLpToken } from "./cartographer"; import { LpAccountBalanceHourly, SubgraphResult } from "./types"; import { linea } from "viem/chains"; -import { createPublicClient, formatUnits, http, parseUnits } from "viem"; +import { createPublicClient, http, parseUnits } from "viem"; export const CONNEXT_SUBGRAPH_QUERY_URL = "https://api.goldsky.com/api/public/project_clssc64y57n5r010yeoly05up/subgraphs/amarok-stableswap-analytics/1.0/gn"; export const LINEA_CHAIN_ID = 59144; @@ -209,3 +209,8 @@ const appendSubgraphData = (data: LpAccountBalanceHourly[], existing: Map { + const client = createPublicClient({ chain: linea, transport: http() }); + const block = await client.getBlock({ blockNumber: BigInt(blockNumber) }); + return block; +} \ No newline at end of file diff --git a/adapters/gravita/src/index.ts b/adapters/gravita/src/index.ts index dd7ae294..1cdd4b5a 100644 --- a/adapters/gravita/src/index.ts +++ b/adapters/gravita/src/index.ts @@ -5,7 +5,7 @@ import { write } from "fast-csv"; /** * The objective is to quantify: * - TVL on Linea (size of collateral minting GRAI on Linea) - * - GRAI stability pool deposits on Linea + * - GRAI stability pool deposits on Linea * * For that, we'll be querying an existing Gravita Subgraph deployed on TheGraph. */ @@ -30,6 +30,8 @@ const GRAI_ADDRESS = "0x894134a25a5faC1c2C26F1d8fBf05111a3CB9487"; const GRAVITA_SUBGRAPH_QUERY_URL = "https://api.studio.thegraph.com/query/54829/gravita-sp-lp-linea-v1/version/latest"; +const PAGE_SIZE = 1_000 + const post = async (url: string, data: any): Promise => { const response = await fetch(url, { method: "POST", @@ -44,15 +46,17 @@ const post = async (url: string, data: any): Promise => { const getStabilityPoolData = async ( blockNumber: number, - blockTimestamp: number + blockTimestamp: number, + lastId = '' ): Promise => { const GRAVITA_STABILITY_POOL_QUERY = ` query StabilityPoolQuery { poolDeposits( - first: 1000, - where: { poolName: "Gravita StabilityPool", withdrawTxHash: null }, + first: ${PAGE_SIZE}, + where: { poolName: "Gravita StabilityPool", withdrawTxHash: null, id_gt: "${lastId}" }, block: { number: ${blockNumber} } ) { + id user { id } @@ -75,20 +79,26 @@ const getStabilityPoolData = async ( usd_price: 0, }); } + if (responseJson.data.poolDeposits.length == PAGE_SIZE) { + const lastRecord = responseJson.data.poolDeposits[responseJson.data.poolDeposits.length - 1] as any + csvRows.push(...await getStabilityPoolData(blockNumber, blockTimestamp, lastRecord.id)) + } return csvRows; }; const getVesselDepositsData = async ( blockNumber: number, - blockTimestamp: number + blockTimestamp: number, + lastId = '' ): Promise => { const GRAVITA_VESSELS_QUERY = ` query VesselsQuery { vessels( - first: 1000, - where: { closeTimestamp: null }, + first: ${PAGE_SIZE}, + where: { closeTimestamp: null, id_gt: "${lastId}" }, block: { number: ${blockNumber} } ) { + id asset user { id @@ -119,6 +129,10 @@ const getVesselDepositsData = async ( usd_price: 0, }); } + if (responseJson.data.vessels.length == PAGE_SIZE) { + const lastRecord = responseJson.data.vessels[responseJson.data.vessels.length - 1] as any + csvRows.push(...await getVesselDepositsData(blockNumber, blockTimestamp, lastRecord.id)) + } return csvRows; }; diff --git a/adapters/interport/src/index.ts b/adapters/interport/src/index.ts index 34a81141..582eef2a 100644 --- a/adapters/interport/src/index.ts +++ b/adapters/interport/src/index.ts @@ -1,6 +1,10 @@ import {client} from "./utils/client"; import {searchStartBlock, stablecoinFarmAddress, vaultsAddresses, zeroAddress} from "./utils/constants"; import {vaultAbi} from "./utils/vault-abi" +import fs from "fs"; +import { write } from "fast-csv"; +import csv from 'csv-parser'; + interface BlockData { blockNumber: number; @@ -17,14 +21,14 @@ type OutputDataSchemaRow = { usd_price: number; //assign 0 if not available }; -const getBlockTimestamp = async (blockNumber: bigint) => { - const data = await client.getBlock({ - blockNumber: blockNumber - }) - return Number(data.timestamp); -} +// const getBlockTimestamp = async (blockNumber: bigint) => { +// const data = await client.getBlock({ +// blockNumber: blockNumber +// }) +// return Number(data.timestamp); +// } -const collectTransferEvents = async (events: any[], token_symbol: string) => { +const collectTransferEvents = async (events: any[], token_symbol: string, block_timestamp: number) => { const csvRows: OutputDataSchemaRow[] = []; for (let i = 0; i < events.length; i++) { const { @@ -32,7 +36,7 @@ const collectTransferEvents = async (events: any[], token_symbol: string) => { blockNumber, address: token_address } = events[i] - const timestamp = await getBlockTimestamp(blockNumber) + const timestamp = block_timestamp if(senderAddress_address !== stablecoinFarmAddress && senderAddress_address !== zeroAddress) { csvRows.push({ block_number: Number(blockNumber), @@ -62,7 +66,7 @@ const collectTransferEvents = async (events: any[], token_symbol: string) => { export const getUserTVLByBlock = async ( blocks: BlockData ): Promise => { - const {blockNumber} = blocks + const {blockNumber, blockTimestamp} = blocks const allCsvRows: OutputDataSchemaRow[] = []; for (let i = 0; i < vaultsAddresses.length; i++) { const {address, token_symbol} = vaultsAddresses[i]; @@ -76,10 +80,64 @@ export const getUserTVLByBlock = async ( fromBlock: BigInt(currentStartingBlock), toBlock: BigInt(endBlock), }); - const transferCsvRows = await collectTransferEvents(transferEvents, token_symbol); + const transferCsvRows = await collectTransferEvents(transferEvents, token_symbol, blockTimestamp); allCsvRows.push(...transferCsvRows) currentStartingBlock = endBlock } } return allCsvRows } + + +const readBlocksFromCSV = async (filePath: string): Promise => { + const blocks: BlockData[] = []; + + await new Promise((resolve, reject) => { + fs.createReadStream(filePath) + .pipe(csv()) // Specify the separator as '\t' for TSV files + .on('data', (row) => { + const blockNumber = parseInt(row.number, 10); + const blockTimestamp = parseInt(row.timestamp, 10); + if (!isNaN(blockNumber) && blockTimestamp) { + blocks.push({ blockNumber: blockNumber, blockTimestamp }); + } + }) + .on('end', () => { + resolve(); + }) + .on('error', (err) => { + reject(err); + }); + }); + + return blocks; + }; + + + readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => { + console.log(blocks); + const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks + + for (const block of blocks) { + try { + const result = await getUserTVLByBlock(block); + for(let i = 0; i < result.length; i++){ + allCsvRows.push(result[i]) + } + } catch (error) { + console.error(`An error occurred for block ${block}:`, error); + } + } + await new Promise((resolve, reject) => { + const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' }); + write(allCsvRows, { headers: true }) + .pipe(ws) + .on("finish", () => { + console.log(`CSV file has been written.`); + resolve; + }); + }); + }).catch((err) => { + console.error('Error reading CSV file:', err); + }); + \ No newline at end of file diff --git a/adapters/lynex/src/index.ts b/adapters/lynex/src/index.ts index 4866091a..12aae4ce 100644 --- a/adapters/lynex/src/index.ts +++ b/adapters/lynex/src/index.ts @@ -1,9 +1,8 @@ import fs from "fs"; import { write } from "fast-csv"; -import csv from 'csv-parser'; +import csv from "csv-parser"; import { getTimestampAtBlock, getUserAddresses } from "./sdk/subgraphDetails"; import { - VE_LYNX_ADDRESS, LYNX_ADDRESS, fetchUserPools, fetchUserVotes, @@ -16,7 +15,7 @@ import { } from "./sdk/pools"; const getData = async () => { - const snapshotBlocks = [3460121]; + const snapshotBlocks = [4328548]; const csvRows: OutputSchemaRow[] = []; @@ -89,18 +88,40 @@ export const getUserStakedTVLByBlock = async ({ [userAddress: string]: { [tokenAddress: string]: BigNumber }; }; - const userPoolFetch = []; - const userVotesFetch = []; + let userPoolFetch = []; + let userVotesFetch = []; + + const batchSize = 400; + let position = 0; + let userFetchResult: any = []; + let userVotesResult: any = []; for (const user of userAddresses) { userPoolFetch.push( fetchUserPools(BigInt(blockNumber), user.id, user.pools) ); userVotesFetch.push(fetchUserVotes(BigInt(blockNumber), user.id)); + if (position % batchSize === 0) { + userFetchResult = [ + ...userFetchResult, + ...(await Promise.all(userPoolFetch)), + ]; + userPoolFetch = []; + userVotesResult = [ + ...userVotesResult, + ...(await Promise.all(userVotesFetch)), + ]; + userVotesFetch = []; + } + position++; } - const userFetchResult = await Promise.all(userPoolFetch); - const userVotesResult = await Promise.all(userVotesFetch); + userVotesResult = [ + ...userVotesResult, + ...(await Promise.all(userVotesFetch)), + ]; + + userFetchResult = [...userFetchResult, ...(await Promise.all(userPoolFetch))]; for (const userFetchedPools of userFetchResult) { for (const userPool of userFetchedPools) { @@ -208,17 +229,17 @@ const readBlocksFromCSV = async (filePath: string): Promise => { await new Promise((resolve, reject) => { fs.createReadStream(filePath) .pipe(csv()) // Specify the separator as '\t' for TSV files - .on('data', (row) => { + .on("data", (row) => { const blockNumber = parseInt(row.number, 10); const blockTimestamp = parseInt(row.timestamp, 10); if (!isNaN(blockNumber) && blockTimestamp) { blocks.push({ blockNumber: blockNumber, blockTimestamp }); } }) - .on('end', () => { + .on("end", () => { resolve(); }) - .on('error', (err) => { + .on("error", (err) => { reject(err); }); }); @@ -226,7 +247,6 @@ const readBlocksFromCSV = async (filePath: string): Promise => { return blocks; }; - readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => { console.log(blocks); const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks diff --git a/adapters/lynex/src/sdk/lensDetails.ts b/adapters/lynex/src/sdk/lensDetails.ts index b1f92bf6..07c3d690 100644 --- a/adapters/lynex/src/sdk/lensDetails.ts +++ b/adapters/lynex/src/sdk/lensDetails.ts @@ -83,7 +83,12 @@ export const fetchUserPools = async ( calls, blockNumber )) as any; + const delay = (ms: number | undefined) => new Promise(resolve => setTimeout(resolve, ms)) + await delay(100) return res.map((r: any) => { + if (r.status !== 'success') { + throw new Error("RPC call error. Status: " + r.status); + } return { result: { ...r.result, userAddress } }; }) as LensResponseWithBlock[]; }; diff --git a/adapters/lyve/package.json b/adapters/lyve/package.json index adfa77fa..18f5f95f 100644 --- a/adapters/lyve/package.json +++ b/adapters/lyve/package.json @@ -14,11 +14,13 @@ "author": "", "license": "UNLICENSED", "dependencies": { - "fast-csv": "^5.0.1", "csv-parser": "^3.0.0", + "fast-csv": "^5.0.1", + "lodash": "^4.17.21", "node-fetch": "^3.3.2" }, "devDependencies": { + "@types/lodash": "^4.17.1", "@types/node": "^20.11.30", "typescript": "^5.4.3" } diff --git a/adapters/lyve/src/index.ts b/adapters/lyve/src/index.ts index 95b2b64d..bf5bdbd2 100644 --- a/adapters/lyve/src/index.ts +++ b/adapters/lyve/src/index.ts @@ -1,6 +1,7 @@ -import * as fs from "fs"; -import { write } from "fast-csv"; import csv from 'csv-parser'; +import fs from "fs"; +import { write } from "fast-csv"; +import { get } from "lodash"; type OutputDataSchemaRow = { @@ -13,131 +14,210 @@ type OutputDataSchemaRow = { usd_price: number; }; -const LINEA_RPC = "https://rpc.linea.build"; +interface BlockData { + blockNumber: number; + blockTimestamp: number; +} + const LYU_ADDRESS = "0xb20116eE399f15647BB1eEf9A74f6ef3b58bc951"; const LYVE_SUBGRAPH_QUERY_URL = "https://api.studio.thegraph.com/query/53783/lyve-lp-tvl/version/latest"; -const LYVE_STABILITY_POOL_QUERY = ` - query StabilityPoolQuery { - userDeposits(first: 1000,orderBy: _newDeposit, orderDirection: desc) { - _depositor, - _newDeposit - } - } -`; - -const _VESSELS_QUERY = ` - query VesselQuery { - vessels(first: 1000,where: { _status: 0 }) { - id - _borrower - _asset - updates { - _coll - blockTimestamp - } - } - } -`; - const post = async (url: string, data: any): Promise => { const response = await fetch(url, { - method: "POST", - headers: { - "Content-Type": "application/json", - Accept: "application/json", - }, - body: JSON.stringify(data), + method: "POST", + headers: { + "Content-Type": "application/json", + Accept: "application/json", + }, + body: JSON.stringify(data), }); return await response.json(); -}; + }; -const getStabilityPoolData = async (blockNumber: number, blockTimestamp: number): Promise => { - const csvRows: OutputDataSchemaRow[] = []; - const responseJson = await post(LYVE_SUBGRAPH_QUERY_URL, { query: LYVE_STABILITY_POOL_QUERY }); - for (const item of responseJson.data.userDeposits) { - csvRows.push({ - block_number: blockNumber, - timestamp: blockTimestamp, - user_address: item._depositor, - token_address: LYU_ADDRESS, - token_balance: item._newDeposit, - token_symbol: "LYU", - usd_price: 0 + const fetchAllData = async (query: string, variables: any, dataPath: string): Promise => { + let hasMore = true; + let skip = 0; + const allData: any[] = []; + + while (hasMore) { + const responseJson = await post(LYVE_SUBGRAPH_QUERY_URL, { + query: query, + variables: { ...variables, skip: skip } }); + + const responseData = get(responseJson, dataPath); + + if (!responseData || responseData.length === 0) { + hasMore = false; + } else { + allData.push(...responseData); + skip += variables.first; // Increase skip by 'first', assuming 'first' is the batch size + } } - return csvRows; + return allData; +}; + +const getStabilityPoolData = async ( + blockNumber: number, + blockTimestamp: number +): Promise => { + const LYVE_STABILITY_POOL_QUERY = ` + query StabilityPoolQuery($skip: Int!, $first: Int!) { + userDeposits(skip: $skip, first: $first) { + depositor + updates { + blockNumber + blockTimestamp + newDeposit + } + } + } + `; + const csvRows: OutputDataSchemaRow[] = []; + const allDeposits = await fetchAllData(LYVE_STABILITY_POOL_QUERY, { first: 1000 }, 'data.userDeposits'); + + const targetBlockNumber = blockNumber; + + for (const item of allDeposits) { + try { + const sortedUpdates = item.updates + .filter((update: any) => update.blockNumber && update.blockNumber <= targetBlockNumber) + .sort((a: any, b: any) => b.blockNumber - a.blockNumber); + if( !sortedUpdates || !sortedUpdates[0]){ + continue; + } + if (sortedUpdates.length > 0) { + const mostRecentUpdate = sortedUpdates[0]; + csvRows.push({ + block_number: blockNumber, + timestamp: blockTimestamp, + user_address: item.depositor, + token_address: LYU_ADDRESS, + token_balance: mostRecentUpdate.newDeposit, + token_symbol: "LYU", + usd_price: 0 + }); + } + } catch (error) { + console.error(`getStabilityPoolData An error occurred for block ${blockNumber}:`, error); + } + + } + + return csvRows; }; -const getVesselDepositsData = async (blockNumber: number, blockTimestamp: number): Promise => { + + +const getVesselDepositsData = async ( + blockNumber: number, + blockTimestamp: number + ): Promise => { + const LYVE_VESSELS_QUERY = ` + query VesselQuery($skip: Int!, $first: Int!){ + vessels( + skip: $skip, first: $first) { + id + _borrower + _asset + updates { + _coll + blockNumber + blockTimestamp + } + } + } + `; const csvRows: OutputDataSchemaRow[] = []; - const responseJson = await post(LYVE_SUBGRAPH_QUERY_URL, { query: _VESSELS_QUERY }); - for (const item of responseJson.data.vessels) { - const sortedUpdates = item.updates.sort((a: any, b: any) => b.blockTimestamp - a.blockTimestamp); - const updatedAssetAmount = sortedUpdates[0]._coll; - csvRows.push({ - block_number: blockNumber, - timestamp: blockTimestamp, - user_address: item._borrower, - token_address: item._asset, - token_balance: updatedAssetAmount, - token_symbol: "", - usd_price: 0 - }); + const allDeposits = await fetchAllData(LYVE_VESSELS_QUERY, { first: 1000 }, 'data.vessels'); + const targetBlockNumber = blockNumber; + + for (const item of allDeposits) { + try { + const sortedUpdates = item.updates + .filter((update: any) => update.blockNumber && update.blockNumber <= targetBlockNumber) + .sort((a: any, b: any) => b.blockNumber - a.blockNumber); + if( !sortedUpdates || !sortedUpdates[0]){ + continue; + } + const updatedAssetAmount = sortedUpdates[0]._coll; + csvRows.push({ + block_number: blockNumber, + timestamp: blockTimestamp, + user_address: item._borrower, + token_address: item._asset, + token_balance: updatedAssetAmount, + token_symbol: "", + usd_price: 0 + }); + } catch (error) { + console.error(`getVesselDepositsData An error occurred for block ${blockNumber}:`, error); + } } return csvRows; -}; -interface BlockData { - blockNumber: number; - blockTimestamp: number; -} + }; + + export const main = async (blocks: BlockData[]) => { const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks const batchSize = 10; // Size of batch to trigger writing to the file let i = 0; - + for (const { blockNumber, blockTimestamp } of blocks) { - try { - // Retrieve data using block number and timestamp - const csvRowsStabilityPool = await getStabilityPoolData(blockNumber, blockTimestamp); - const csvRowsVessels = await getVesselDepositsData(blockNumber, blockTimestamp); - const csvRows = csvRowsStabilityPool.concat(csvRowsVessels); - - // Accumulate CSV rows for all blocks - allCsvRows.push(...csvRows); - - i++; - console.log(`Processed block ${i}`); - - // Write to file when batch size is reached or at the end of loop - if (i % batchSize === 0 || i === blocks.length) { - const ws = fs.createWriteStream(`outputData.csv`, { flags: i === batchSize ? 'w' : 'a' }); - write(allCsvRows, { headers: i === batchSize ? true : false }) - .pipe(ws) - .on("finish", () => { - console.log(`CSV file has been written.`); - }); - - // Clear the accumulated CSV rows - allCsvRows.length = 0; - } - } catch (error) { - console.error(`An error occurred for block ${blockNumber}:`, error); + try { + // Retrieve data using block number and timestamp + const csvRowsStabilityPool = await getStabilityPoolData( + blockNumber, + blockTimestamp + ); + const csvRowsVessels = await getVesselDepositsData( + blockNumber, + blockTimestamp + ); + const csvRows = csvRowsStabilityPool.concat(csvRowsVessels); + + // Accumulate CSV rows for all blocks + allCsvRows.push(...csvRows); + + i++; + console.log(`Processed block ${i}`); + + // Write to file when batch size is reached or at the end of loop + if (i % batchSize === 0 || i === blocks.length) { + const ws = fs.createWriteStream(`outputData.csv`, { + flags: i === batchSize ? "w" : "a", + }); + write(allCsvRows, { headers: i === batchSize ? true : false }) + .pipe(ws) + .on("finish", () => { + console.log(`CSV file has been written.`); + }); + + // Clear the accumulated CSV rows + allCsvRows.length = 0; } + } catch (error) { + console.error(`An error occurred for block ${blockNumber}:`, error); + } } -}; - -export const getUserTVLByBlock = async (blocks: BlockData) => { - const { blockNumber, blockTimestamp } = blocks + }; + + export const getUserTVLByBlock = async (blocks: BlockData) => { + const { blockNumber, blockTimestamp } = blocks; // Retrieve data using block number and timestamp - const csvRowsStabilityPool = await getStabilityPoolData(blockNumber, blockTimestamp); - const csvRowsVessels = await getVesselDepositsData(blockNumber, blockTimestamp); + const csvRowsStabilityPool = await getStabilityPoolData( + blockNumber, + blockTimestamp + ); + const csvRowsVessels = await getVesselDepositsData( + blockNumber, + blockTimestamp + ); const csvRows = csvRowsStabilityPool.concat(csvRowsVessels); - return csvRows -}; - - -const readBlocksFromCSV = async (filePath: string): Promise => { + return csvRows; + }; + + const readBlocksFromCSV = async (filePath: string): Promise => { const blocks: BlockData[] = []; await new Promise((resolve, reject) => { @@ -159,34 +239,37 @@ const readBlocksFromCSV = async (filePath: string): Promise => { }); return blocks; -}; - - -readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => { + }; + + readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => { console.log(blocks); const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks const batchSize = 1000; // Size of batch to trigger writing to the file let i = 0; - + for (const block of blocks) { try { const result = await getUserTVLByBlock(block); - // Accumulate CSV rows for all blocks allCsvRows.push(...result); } catch (error) { console.error(`An error occurred for block ${block}:`, error); } } await new Promise((resolve, reject) => { - const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' }); - write(allCsvRows, { headers: true }) - .pipe(ws) - .on("finish", () => { - console.log(`CSV file has been written.`); - resolve; - }); + // const randomTime = Math.random() * 1000; + // setTimeout(resolve, randomTime); + const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' }); + write(allCsvRows, { headers: true }) + .pipe(ws) + .on("finish", () => { + console.log(`CSV file has been written.`); + resolve; + }); }); - -}).catch((err) => { + + // Clear the accumulated CSV rows + // allCsvRows.length = 0; + + }).catch((err) => { console.error('Error reading CSV file:', err); -}); + }); \ No newline at end of file diff --git a/adapters/lyve/tsconfig.json b/adapters/lyve/tsconfig.json index a1736e1c..d790b7f8 100644 --- a/adapters/lyve/tsconfig.json +++ b/adapters/lyve/tsconfig.json @@ -1,109 +1,14 @@ { "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - "target": "es2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - - /* Modules */ - "module": "commonjs", /* Specify what module code is generated. */ - "rootDir": "src/", /* Specify the root folder within your source files. */ - // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ - // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ - // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ - // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - "outDir": "dist/", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - // "noEmit": true, /* Disable emitting files from a compilation. */ - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ - - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ - - /* Type Checking */ - "strict": true, /* Enable all strict type-checking options. */ - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": true /* Skip type checking all .d.ts files. */ - } -} + "target": "es6", + "module": "commonjs", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "outDir": "dist/" + }, + "include": [ + "./src/*.ts" + ] +} \ No newline at end of file diff --git a/adapters/mendi/src/index.ts b/adapters/mendi/src/index.ts index 7c612c2b..c3c6402f 100644 --- a/adapters/mendi/src/index.ts +++ b/adapters/mendi/src/index.ts @@ -13,6 +13,7 @@ import csv from "csv-parser"; import { write } from "fast-csv"; import { getMarketInfos, updateBorrowBalances } from "./sdk/marketDetails"; import { bigMath } from "./sdk/abi/helpers"; +import { exit } from "process"; interface BlockData { blockNumber: number; @@ -37,13 +38,16 @@ export const getUserTVLByBlock = async (blocks: BlockData) => { const csvRows: OutputDataSchemaRow[] = []; const block = blocks.blockNumber; - const states = await getAccountStatesForAddressByPoolAtBlock( + let states = await getAccountStatesForAddressByPoolAtBlock( block, "", "", CHAINS.LINEA, PROTOCOLS.MENDI ); + states = states.filter( + (s) => marketInfos.findIndex((mi) => mi.address == s.account) == -1 + ); console.log(`Block: ${block}`); console.log("States: ", states.length); diff --git a/adapters/mendi/src/sdk/marketDetails.ts b/adapters/mendi/src/sdk/marketDetails.ts index 7b4e9b61..0a05a5de 100644 --- a/adapters/mendi/src/sdk/marketDetails.ts +++ b/adapters/mendi/src/sdk/marketDetails.ts @@ -116,8 +116,12 @@ export const updateBorrowBalances = async ( states = states.filter((x) => x.borrowAmount > 0); + console.log(`Will update all borrow balances for ${states.length} states`); for (var i = 0; i < states.length; i += 500) { - var subStates = states.slice(i, 500); + const start = i; + const end = i + 500; + var subStates = states.slice(start, end); + console.log(`Updating borrow balances for ${start} - ${end}`); const borrowBalanceResults = await publicClient.multicall({ contracts: subStates diff --git a/adapters/mendi/src/sdk/subgraphDetails.ts b/adapters/mendi/src/sdk/subgraphDetails.ts index acd28012..41b32192 100644 --- a/adapters/mendi/src/sdk/subgraphDetails.ts +++ b/adapters/mendi/src/sdk/subgraphDetails.ts @@ -89,6 +89,7 @@ export const getAccountStatesForAddressByPoolAtBlock = async ( (mi) => mi.underlyingAddress == state.token.toLowerCase() ); if (!marketInfo) { + console.log(`${state.token} not found`); return undefined; } diff --git a/adapters/nile/src/index.ts b/adapters/nile/src/index.ts index c97b3b3f..3cb9c7d7 100644 --- a/adapters/nile/src/index.ts +++ b/adapters/nile/src/index.ts @@ -13,7 +13,7 @@ import BigNumber from "bignumber.js"; const NILE_ADDRESS = "0xAAAac83751090C6ea42379626435f805DDF54DC8".toLowerCase(); const getData = async () => { - const snapshotBlocks = [3753501]; + const snapshotBlocks = [4469972]; const csvRows: OutputSchemaRow[] = []; diff --git a/adapters/nile/src/sdk/config.ts b/adapters/nile/src/sdk/config.ts index f92dfc37..9edc9576 100644 --- a/adapters/nile/src/sdk/config.ts +++ b/adapters/nile/src/sdk/config.ts @@ -6,5 +6,8 @@ export const SUBGRAPH_URL = export const client = createPublicClient({ chain: linea, - transport: http("https://rpc.linea.build"), + transport: http("https://rpc.linea.build", { + retryCount: 5, + timeout: 60_000, + }), }); diff --git a/adapters/package.json b/adapters/package.json index 92ded735..ec94e15a 100644 --- a/adapters/package.json +++ b/adapters/package.json @@ -13,6 +13,7 @@ "license": "UNLICENSED", "dependencies": { "csv-parser": "^3.0.0", - "fast-csv": "^5.0.1" + "fast-csv": "^5.0.1", + "viem": "^2.9.20" } } diff --git a/adapters/renzo/package.json b/adapters/renzo/package.json index e48ca196..d0b7b1a0 100644 --- a/adapters/renzo/package.json +++ b/adapters/renzo/package.json @@ -15,8 +15,9 @@ "author": "", "license": "UNLICENSED", "dependencies": { - "fast-csv": "^5.0.1", - "csv-parser": "^3.0.0" + "csv-parser": "^3.0.0", + "dotenv": "^16.4.5", + "fast-csv": "^5.0.1" }, "devDependencies": { "@types/node": "^20.12.5", diff --git a/adapters/renzo/src/index.ts b/adapters/renzo/src/index.ts index c9bff1f2..bf8dab94 100644 --- a/adapters/renzo/src/index.ts +++ b/adapters/renzo/src/index.ts @@ -1,182 +1,178 @@ -import * as fs from 'fs'; -import { write } from "fast-csv"; -import csv from 'csv-parser'; +import * as fs from "fs"; +import { format, write } from "fast-csv"; +import csv from "csv-parser"; +require("dotenv").config(); + +const excludedAddresses = new Set([ + "0x8a90d208666deec08123444f67bf5b1836074a67", // Mendi + "0x0684fc172a0b8e6a65cf4684edb2082272fe9050", // Zerolend + "0x76b0d13428eb01f12f132aa58707d254c42df568", // Nilev2 + "0xa9a1fb9f6664a0b6bfb1f52724fd7b23842248c5", // Nilev2 + "0x6ba5ccc757541851d610ecc8f8ac3714b5f95314", // Nile v3 + "0x2c88A441418E06b9F3e565c2f866Fcb03c9409E2", // Layerbank + "0x057819bbc15121c923620c27303b2Ed58b87cF86", // Lynex + "0x7160570BB153Edd0Ea1775EC2b2Ac9b65F1aB61B", // Syncswap + "0xfDe733b5DE5B5a06C68353e01E4c1D3415C89560", // Pancakeswap + "0xa05eF29e9aC8C75c530c2795Fa6A800e188dE0a9", // Connext + "0x62cE247f34dc316f93D3830e4Bf10959FCe630f8", // ZkLink +]); type OutputDataSchemaRow = { - block_number: number; - timestamp: number; - user_address: string; - token_address: string; - token_balance: bigint; - token_symbol: string; - usd_price: number; + block_number: number; + timestamp: number; + user_address: string; + token_address: string; + token_balance: bigint; + token_symbol: string; + usd_price: number; }; interface BlockData { - blockNumber: number; - blockTimestamp: number; + blockNumber: number; + blockTimestamp: number; } -const querySize = 1000; +const querySize = 500000; const EZ_ETH_ADDRESS = "0x2416092f143378750bb29b79eD961ab195CcEea5"; const TOKEN_SYMBOL = "EZETH"; -const SUBGRAPH_QUERY_URL = "https://api.goldsky.com/api/public/project_clsxzkxi8dh7o01zx5kyxdga4/subgraphs/renzo-linea-indexer/v0.11/gn"; -const USER_BALANCES_QUERY = ` -query BalanceQuery { - balances(where: {block_lte: $blockNum}, first: ${querySize}, skip: $skipCount, orderBy: value, orderDirection: desc) { - id - user - value - block - blockTimestamp - } - } -`; - -const post = async (url: string, data: any) => { - const response = await fetch(url, { - method: "POST", - headers: { - "Content-Type": "application/json", - Accept: "application/json", +const RENZO_INDEXER_INTERFACE = + "https://app.sentio.xyz/api/v1/analytics/renzo/ezeth-points-linea/sql/execute"; +const API_KEY = process.env.RENZO_API_KEY || ""; + +export const getUserTVLByBlock = async ( + blocks: BlockData +): Promise => { + const { blockNumber, blockTimestamp } = blocks; + try { + const response = await fetch(RENZO_INDEXER_INTERFACE, { + method: "POST", + headers: { + "Content-Type": "application/json", + "api-key": API_KEY, + }, + body: JSON.stringify({ + sqlQuery: { + sql: `WITH RankedByBlockNumber AS ( + SELECT *, + ROW_NUMBER() OVER (PARTITION BY account ORDER BY block_number DESC) AS row_num + FROM \`point_update\` + WHERE block_number <= ${blockNumber} + ) + SELECT account, newEzETHBalance, block_number, newTimestampMilli, address + FROM RankedByBlockNumber + WHERE row_num = 1 AND newEzETHBalance > 0 + `, + size: querySize, }, - body: JSON.stringify(data), + }), }); - return await response.json(); -}; - - -export const getUserTVLByBlock = async (blocks: BlockData) => { - const { blockNumber, blockTimestamp } = blocks - const csvRows: OutputDataSchemaRow[] = []; - let skipIndex = 0; - let latestBalances: Record = {}; - while (true) { - const responseJson = await post(SUBGRAPH_QUERY_URL, { query: USER_BALANCES_QUERY.replace("$skipCount", skipIndex.toString()).replace("$blockNum", blockNumber.toString()) }); - let rowCount = 0; - for (const item of responseJson.data.balances) { - let userAddress = item.user.toString(); - if (latestBalances[userAddress]) { - if (latestBalances[userAddress][0] < item.block) { - latestBalances[userAddress] = [item.block.toString(), item.value.toString()]; - } - } else { - latestBalances[userAddress] = [item.block.toString(), item.value.toString()]; - } - rowCount++; - } - if (rowCount < querySize) { - break; - } - skipIndex += rowCount; - } - console.log(`Fetched ${skipIndex} records`); - - for (let key in latestBalances) { - let value = latestBalances[key]; - if (value[1] != "0") { - csvRows.push({ - block_number: blockNumber, - timestamp: blockTimestamp, - user_address: key, - token_address: EZ_ETH_ADDRESS, - token_balance: BigInt(value[1]), - token_symbol: TOKEN_SYMBOL, - usd_price: 0 - }); - } + const data = await response.json(); + if (!data.result || !data.result.rows) { + console.error(`No data found for block ${blockNumber}`); + return []; } + const csvRows: OutputDataSchemaRow[] = data.result.rows + .filter((row: any) => !excludedAddresses.has(row.address.toLowerCase())) + .map((row: any) => ({ + block_number: blockNumber, + timestamp: blockTimestamp, + user_address: row.account.toLowerCase(), + token_address: row.address.toLowerCase(), + token_balance: BigInt(row.newEzETHBalance), + token_symbol: TOKEN_SYMBOL, + usd_price: 0, // 0 as default + })); return csvRows; + } catch (error) { + console.error(`An error occurred for block ${blockNumber}:`, error); + return []; + } }; -export const main = async (blocks: BlockData[]) => { - const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks - const batchSize = 10; // Size of batch to trigger writing to the file - let i = 0; - - for (const block of blocks) { - try { - // Retrieve data using block number and timestam - const csvRows = await getUserTVLByBlock(block) - - // Accumulate CSV rows for all blocks - allCsvRows.push(...csvRows); - - i++; - console.log(`Processed block ${i}`); - - // Write to file when batch size is reached or at the end of loop - if (i % batchSize === 0 || i === blocks.length) { - const ws = fs.createWriteStream(`outputData.csv`, { flags: i === batchSize ? 'w' : 'a' }); - write(allCsvRows, { headers: i === batchSize ? true : false }) - .pipe(ws) - .on("finish", () => { - console.log(`CSV file has been written.`); - }); - - // Clear the accumulated CSV rows - allCsvRows.length = 0; - } - } catch (error) { - console.error(`An error occurred for block ${block.blockNumber}:`, error); +const readBlocksFromCSV = async (filePath: string): Promise => { + const blocks: BlockData[] = []; + + await new Promise((resolve, reject) => { + fs.createReadStream(filePath) + .pipe(csv()) // Specify the separator as '\t' for TSV files + .on("data", (row) => { + const blockNumber = parseInt(row.number, 10); + const blockTimestamp = parseInt(row.timestamp, 10); + if (!isNaN(blockNumber) && blockTimestamp) { + blocks.push({ blockNumber: blockNumber, blockTimestamp }); } - } + }) + .on("end", () => { + resolve(); + }) + .on("error", (err) => { + reject(err); + }); + }); + + return blocks; }; +readBlocksFromCSV("hourly_blocks.csv") + .then(async (blocks: any[]) => { + console.log(blocks); + // const allCsvRows: any[] = []; + + // for (const block of blocks) { + // try { + // const result = await getUserTVLByBlock(block); + // allCsvRows.push(...result); + // } catch (error) { + // console.error(`An error occurred for block ${block}:`, error); + // } + // } + // await new Promise((resolve, reject) => { + // const ws = fs.createWriteStream(`outputData.csv`, { flags: "w" }); + // write(allCsvRows, { headers: true }) + // .pipe(ws) + // .on("finish", () => { + // console.log(`CSV file has been written.`); + // resolve; + // }); + // }); + await main(blocks); + }) + .catch((err) => { + console.error("Error reading CSV file:", err); + }); -//main([{blockNumber: 3825017, blockTimestamp: 123456}]); +export const main = async (blocks: BlockData[]) => { + // Open a write stream for the unified output file. + const writeStream = fs.createWriteStream("outputData.csv", { + flags: "w", // 'w' to create a new file or overwrite the existing one. + }); + const csvFormat = format({ + headers: true, + includeEndRowDelimiter: true, + writeHeaders: true, + }); + csvFormat.pipe(writeStream); -const readBlocksFromCSV = async (filePath: string): Promise => { - const blocks: BlockData[] = []; - - await new Promise((resolve, reject) => { - fs.createReadStream(filePath) - .pipe(csv()) // Specify the separator as '\t' for TSV files - .on('data', (row) => { - const blockNumber = parseInt(row.number, 10); - const blockTimestamp = parseInt(row.timestamp, 10); - if (!isNaN(blockNumber) && blockTimestamp) { - blocks.push({ blockNumber: blockNumber, blockTimestamp }); - } - }) - .on('end', () => { - resolve(); - }) - .on('error', (err) => { - reject(err); - }); - }); - - return blocks; - }; - -readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: BlockData[]) => { - console.log(blocks); - const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks - - for (const block of blocks) { - try { - const result = await getUserTVLByBlock(block); - for(let i = 0; i < result.length; i++){ - allCsvRows.push(result[i]) - } - } catch (error) { - console.error(`An error occurred for block ${block}:`, error); - } - } - - await new Promise((resolve, reject) => { - const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' }); - write(allCsvRows, { headers: true }) - .pipe(ws) - .on("finish", () => { - console.log(`CSV file has been written.`); - resolve; - }); + for (const block of blocks) { + const csvRows = await getUserTVLByBlock(block); + console.log(`Processing block: ${block.blockNumber}`); + + // Writing each row to the CSV format stream + csvRows.forEach((row) => { + csvFormat.write(row); }); - }).catch((err) => { - console.error('Error reading CSV file:', err); + } + + csvFormat.end(); + + writeStream.on("finish", () => { + console.log("CSV file has been written."); }); - \ No newline at end of file +}; + +// main([ +// { blockNumber: 4452354, blockTimestamp: 123456 }, +// { blockNumber: 3452355, blockTimestamp: 123457 }, +// ]); diff --git a/adapters/satori/src/sdk/config.ts b/adapters/satori/src/sdk/config.ts index 17e3dd9b..3418af5f 100644 --- a/adapters/satori/src/sdk/config.ts +++ b/adapters/satori/src/sdk/config.ts @@ -1,6 +1,7 @@ -export const SUBGRAPH_URL = "https://api.goldsky.com/api/public/project_cltzggx5i40gi01p3ezjv94bc/subgraphs/satori-linea/v0.0.1/gn" +export const SUBGRAPH_URL = "https://api.goldsky.com/api/private/project_cltzggx5i40gi01p3ezjv94bc/subgraphs/satori-linea/v0.0.1/gn" export const ASSET = "0x176211869cA2b568f2A7D4EE941E073a821EE1ff" export const SYMBOL = "USDC" +export const KEY = "Bearer clvyurggiicnn01ywgl4s4ys9" diff --git a/adapters/satori/src/sdk/subgraphDetails.ts b/adapters/satori/src/sdk/subgraphDetails.ts index 6cfb57ea..c7516ecc 100644 --- a/adapters/satori/src/sdk/subgraphDetails.ts +++ b/adapters/satori/src/sdk/subgraphDetails.ts @@ -1,4 +1,4 @@ -import { ASSET, SYMBOL, SUBGRAPH_URL } from "./config"; +import { ASSET, SYMBOL, SUBGRAPH_URL,KEY } from "./config"; export interface OutputDataSchemaRow { block_number:number @@ -37,7 +37,7 @@ export const getUserTVLByBlock = async ( let response = await fetch(subgraphUrl, { method: "POST", body: JSON.stringify({ query }), - headers: { "Content-Type": "application/json" }, + headers: { "Content-Type": "application/json","Authorization":KEY}, }); let data = await response.json(); let snapshots = data.data.userLpSnapshots diff --git a/adapters/satori/src/sdk/subgraphDetailsV2.ts b/adapters/satori/src/sdk/subgraphDetailsV2.ts index 782a406d..d00fd56a 100644 --- a/adapters/satori/src/sdk/subgraphDetailsV2.ts +++ b/adapters/satori/src/sdk/subgraphDetailsV2.ts @@ -1,4 +1,4 @@ -import { ASSET, SYMBOL, SUBGRAPH_URL } from "./config"; +import { ASSET, SYMBOL, SUBGRAPH_URL,KEY } from "./config"; export interface OutputDataSchemaRow { block_number:number @@ -31,8 +31,13 @@ export const getUserTVLByBlock = async ( let response = await fetch(subgraphUrl, { method: "POST", body: JSON.stringify({ query }), - headers: { "Content-Type": "application/json" }, + headers: { "Content-Type": "application/json","Authorization":KEY}, }); + if (response.ok === false) { + console.error("Response status:", response.statusText); + console.error("Skip value: ", skip); + throw new Error("Error fetching data from subgraph \nStatus code: " + response.status + "\nStatus Text: " + response.statusText); + } let data = await response.json(); let accounts = data.data.accounts for (const account of accounts) { diff --git a/adapters/secta/src/sdk/config.ts b/adapters/secta/src/sdk/config.ts index 58233af7..a50db2c2 100644 --- a/adapters/secta/src/sdk/config.ts +++ b/adapters/secta/src/sdk/config.ts @@ -14,11 +14,11 @@ export const SUBGRAPH_URLS = { [CHAINS.LINEA]: { [PROTOCOLS.SECTA]: { [AMM_TYPES.SECTAV3]: - "https://api.studio.thegraph.com/query/66239/secta-linea-exchange-v3/version/latest", - // "https://gateway-arbitrum.network.thegraph.com/api/3700f7806f624898da7631bb01f5253f/subgraphs/id/DQz9g5ZRSiprkXXCRwRSTjh6J5gsRMuhr8TymEo1pZe6", + //"https://api.studio.thegraph.com/query/66239/secta-linea-exchange-v3/version/latest", + "https://gateway-arbitrum.network.thegraph.com/api/3700f7806f624898da7631bb01f5253f/subgraphs/id/DQz9g5ZRSiprkXXCRwRSTjh6J5gsRMuhr8TymEo1pZe6", [AMM_TYPES.SECTAV2]: - "https://api.studio.thegraph.com/query/66239/secta-linea-exchange-v2/version/latest", - // "https://gateway-arbitrum.network.thegraph.com/api/3700f7806f624898da7631bb01f5253f/subgraphs/id/4YKqZQ3pH5wZ3seW2ojc1o5HxoJVYQ6UBdunW8ovJCBz", + //"https://api.studio.thegraph.com/query/66239/secta-linea-exchange-v2/version/latest", + "https://gateway-arbitrum.network.thegraph.com/api/3700f7806f624898da7631bb01f5253f/subgraphs/id/4YKqZQ3pH5wZ3seW2ojc1o5HxoJVYQ6UBdunW8ovJCBz", }, }, }; diff --git a/adapters/secta/src/sdk/poolDetails.ts b/adapters/secta/src/sdk/poolDetails.ts index b8496343..1e68e043 100644 --- a/adapters/secta/src/sdk/poolDetails.ts +++ b/adapters/secta/src/sdk/poolDetails.ts @@ -36,6 +36,8 @@ export const getV2LpValue = async (rpc: string, pairs: V2Pair[], mintedAddresses ); const userLpBalance = new BigNumber(userLpBalanceBigInt.toString()); + if (pair.totalSupply == 0) continue; + const totalSupply = new BigNumber(pair.totalSupply); const userShare = userLpBalance.dividedBy(totalSupply); diff --git a/adapters/sparta/src/index.ts b/adapters/sparta/src/index.ts index 53ce6e8d..267fb661 100644 --- a/adapters/sparta/src/index.ts +++ b/adapters/sparta/src/index.ts @@ -1,6 +1,6 @@ import { createObjectCsvWriter } from "csv-writer"; import { write } from "fast-csv"; -import csv from 'csv-parser'; +import csv from "csv-parser"; import { client, PROTOCOL_DEPLOY_BLOCK, @@ -118,8 +118,8 @@ function calculateUserReservePortion( const total = totalSupply[contractId]; const share = userPosition / total; - const reserve0 = parseInt(reserves[contractId].reserve0.toString()); - const reserve1 = parseInt(reserves[contractId].reserve1.toString()); + const reserve0 = reserves[contractId].reserve0; + const reserve1 = reserves[contractId].reserve1; const token0 = POOL_TOKENS[contractId].token0; const token1 = POOL_TOKENS[contractId].token1; @@ -128,8 +128,8 @@ function calculateUserReservePortion( } userReserves[user][contractId] = { - amount0: BigInt(share * reserve0), - amount1: BigInt(share * reserve1), + amount0: BigInt(Math.floor(share * reserve0)), + amount1: BigInt(Math.floor(share * reserve1)), token0: token0, token1: token1, }; @@ -152,46 +152,48 @@ function processTransactions(transactions: Transaction[]): { const toAddress = transaction.to.toLowerCase(); const contractId = transaction.contractId_.toLowerCase(); - // Skip transactions where 'from' or 'to' match the contract ID, or both 'from' and 'to' are zero addresses + // Skip internal lp txs if ( - fromAddress === contractId || - toAddress === contractId || - (fromAddress === "0x0000000000000000000000000000000000000000" && - toAddress === "0x0000000000000000000000000000000000000000") + (fromAddress === contractId && + toAddress === "0x0000000000000000000000000000000000000000") || + (toAddress === contractId && + fromAddress === "0x0000000000000000000000000000000000000000") ) { return; } - // Initialize cumulativePositions if not already set + // Initialize userPositions and cumulativePositions if not already set + if (!userPositions[contractId]) { + userPositions[contractId] = {}; + } if (!cumulativePositions[contractId]) { cumulativePositions[contractId] = 0; } // Convert the transaction value from string to integer. - let value = parseInt(transaction.value.toString()); - - // Process transactions that increase liquidity (to address isn't zero) - if (toAddress !== "0x0000000000000000000000000000000000000000") { - if (!userPositions[contractId]) { - userPositions[contractId] = {}; - } - if (!userPositions[contractId][toAddress]) { - userPositions[contractId][toAddress] = 0; - } - userPositions[contractId][toAddress] += value; - cumulativePositions[contractId] += value; - } + const value = parseInt(transaction.value.toString(), 10); - // Process transactions that decrease liquidity (from address isn't zero) + // Decrease liquidity from the sender if the from address is not zero if (fromAddress !== "0x0000000000000000000000000000000000000000") { - if (!userPositions[contractId]) { - userPositions[contractId] = {}; - } if (!userPositions[contractId][fromAddress]) { userPositions[contractId][fromAddress] = 0; } userPositions[contractId][fromAddress] -= value; cumulativePositions[contractId] -= value; + + // Remove the sender from userPositions if their balance is zero + if (userPositions[contractId][fromAddress] === 0) { + delete userPositions[contractId][fromAddress]; + } + } + + // Increase liquidity for the receiver if the to address is not zero + if (toAddress !== "0x0000000000000000000000000000000000000000") { + if (!userPositions[contractId][toAddress]) { + userPositions[contractId][toAddress] = 0; + } + userPositions[contractId][toAddress] += value; + cumulativePositions[contractId] += value; } }); @@ -199,12 +201,34 @@ function processTransactions(transactions: Transaction[]): { } async function fetchTransfers(blockNumber: number) { - const { data } = await client.query({ - query: TRANSFERS_QUERY, - variables: { blockNumber }, - fetchPolicy: "no-cache", - }); - return data.transfers; + const allTransfers = []; + const pageSize = 1000; + let skip = 0; + let hasMore = true; + + while (hasMore) { + try { + const { data } = await client.query({ + query: TRANSFERS_QUERY, + variables: { blockNumber, first: pageSize, skip }, + fetchPolicy: "no-cache", + }); + + const transfers = data.transfers; + allTransfers.push(...transfers); + + if (transfers.length < pageSize) { + hasMore = false; + } else { + skip += pageSize; + } + } catch (error) { + console.error("Error fetching transfers:", error); + break; + } + } + + return allTransfers; } async function fetchReservesForPools(blockNumber: number): Promise { @@ -217,14 +241,12 @@ async function fetchReservesForPools(blockNumber: number): Promise { variables: { blockNumber, contractId: pool }, fetchPolicy: "no-cache", }); - reserves[pool] = { reserve0: data.syncs[0].reserve0, reserve1: data.syncs[0].reserve1, }; }) ); - return reserves; } @@ -235,8 +257,8 @@ function convertToOutputDataSchema( return { block_number: userPosition.block_number, timestamp: userPosition.timestamp, - user_address: userPosition.user, - token_address: userPosition.token, + user_address: userPosition.user.toLowerCase(), + token_address: userPosition.token.toLowerCase(), token_balance: BigInt(userPosition.balance), // Ensure balance is treated as bigint token_symbol: "", // You may want to fill this based on additional token info you might have usd_price: 0, // Adjust if you need to calculate this value or pull from another source @@ -292,32 +314,8 @@ export const getUserTVLByBlock = async (blocks: BlockData) => { return convertToOutputDataSchema(data); }; -async function main() { - console.log(`Starting data fetching process mode: ${FIRST_TIME}`); - const blocks = await getBlockRangesToFetch(); - - let lastblock = 0; - try { - for (const block of blocks) { - lastblock = block; - const blockData = await getUserTVLByBlock({ - blockNumber: block, - blockTimestamp: 0, - }); - console.log("Processed block", block); - await saveToCSV(blockData); - } - } catch (error: any) { - console.error("Error processing block", lastblock, error.message); - } finally { - saveLastProcessedBlock(lastblock); - } -} - // IMPORTANT: config::FIRST_TIME is set to true be default // after inital fetch set it to false -// main().catch(console.error); - const readBlocksFromCSV = async (filePath: string): Promise => { const blocks: BlockData[] = []; @@ -325,17 +323,17 @@ const readBlocksFromCSV = async (filePath: string): Promise => { await new Promise((resolve, reject) => { fs.createReadStream(filePath) .pipe(csv()) // Specify the separator as '\t' for TSV files - .on('data', (row) => { + .on("data", (row) => { const blockNumber = parseInt(row.number, 10); const blockTimestamp = parseInt(row.timestamp, 10); if (!isNaN(blockNumber) && blockTimestamp) { blocks.push({ blockNumber: blockNumber, blockTimestamp }); } }) - .on('end', () => { + .on("end", () => { resolve(); }) - .on('error', (err) => { + .on("error", (err) => { reject(err); }); }); @@ -343,33 +341,34 @@ const readBlocksFromCSV = async (filePath: string): Promise => { return blocks; }; +readBlocksFromCSV("hourly_blocks.csv") + .then(async (blocks: any[]) => { + console.log(blocks); + const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks + const batchSize = 1000; // Size of batch to trigger writing to the file + let i = 0; -readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => { - console.log(blocks); - const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks - const batchSize = 1000; // Size of batch to trigger writing to the file - let i = 0; - - for (const block of blocks) { + for (const block of blocks) { try { - const result = await getUserTVLByBlock(block); - // Accumulate CSV rows for all blocks - allCsvRows.push(...result); + const result = await getUserTVLByBlock(block); + // Accumulate CSV rows for all blocks + allCsvRows.push(...result); } catch (error) { - console.error(`An error occurred for block ${block}:`, error); + console.error(`An error occurred for block ${block}:`, error); } - } - await new Promise((resolve, reject) => { - // const randomTime = Math.random() * 1000; - // setTimeout(resolve, randomTime); - const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' }); - write(allCsvRows, { headers: true }) + } + await new Promise((resolve, reject) => { + // const randomTime = Math.random() * 1000; + // setTimeout(resolve, randomTime); + const ws = fs.createWriteStream(`outputData.csv`, { flags: "w" }); + write(allCsvRows, { headers: true }) .pipe(ws) .on("finish", () => { - console.log(`CSV file has been written.`); - resolve; + console.log(`CSV file has been written.`); + resolve; }); + }); + }) + .catch((err) => { + console.error("Error reading CSV file:", err); }); -}).catch((err) => { - console.error('Error reading CSV file:', err); -}); diff --git a/adapters/sparta/src/sdk/queries.ts b/adapters/sparta/src/sdk/queries.ts index bcae14e3..c0a3ec43 100644 --- a/adapters/sparta/src/sdk/queries.ts +++ b/adapters/sparta/src/sdk/queries.ts @@ -1,8 +1,12 @@ import { gql } from "@apollo/client"; export const TRANSFERS_QUERY = gql` - query GetLiquidityTransfers($blockNumber: Int!) { - transfers(where: { block_number_lte: $blockNumber }) { + query GetLiquidityTransfers($blockNumber: Int!, $first: Int!, $skip: Int!) { + transfers( + first: $first + skip: $skip + where: { block_number_lte: $blockNumber } + ) { from to value diff --git a/adapters/stargate/src/index.ts b/adapters/stargate/src/index.ts index e204c0d4..a64e01a3 100644 --- a/adapters/stargate/src/index.ts +++ b/adapters/stargate/src/index.ts @@ -29,6 +29,8 @@ const getData = async () => { }); }; +const WHITELISTED_TOKEN_ADDRESS = '0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee' + export const getUserTVLByBlock = async ({ blockNumber, blockTimestamp, @@ -39,7 +41,7 @@ export const getUserTVLByBlock = async ({ block_number: blockNumber, timestamp: blockTimestamp, user_address: position.user, - token_address: position.lpToken, + token_address: WHITELISTED_TOKEN_ADDRESS, token_balance: BigInt(position.balance), token_symbol: "", usd_price: 0, diff --git a/adapters/syncswap/src/sdk/config.ts b/adapters/syncswap/src/sdk/config.ts index c3d4c672..594e2b01 100644 --- a/adapters/syncswap/src/sdk/config.ts +++ b/adapters/syncswap/src/sdk/config.ts @@ -1,7 +1,10 @@ import { createPublicClient, http } from "viem"; import { linea } from "viem/chains" -export const V2_SUBGRAPH_URL = "https://gateway-arbitrum.network.thegraph.com/api/ce0ba3625ebbbd3c4b5a2af394dc8e47/subgraphs/id/3xpZFx5YNWzqemwdtRhyaTXVidKNnjY19XAWoHtvR6Lh" +export const V2_SUBGRAPH_URL = "https://gateway-arbitrum.network.thegraph.com/api/06a21853c18afff683a7ff52c764d434/subgraphs/id/3xpZFx5YNWzqemwdtRhyaTXVidKNnjY19XAWoHtvR6Lh" + +export const V2_SUBGRAPH_URL_AFTER_4515693 = "https://gateway-arbitrum.network.thegraph.com/api/06a21853c18afff683a7ff52c764d434/subgraphs/id/9R6uvVYXn9V1iAxkTLXL1Ajka75aD7mmHRj86DbXnyYQ" + export const client = createPublicClient({ chain: linea, transport: http("https://rpc.linea.build") diff --git a/adapters/syncswap/src/sdk/lib.ts b/adapters/syncswap/src/sdk/lib.ts index 56e380e2..7de0e0ce 100644 --- a/adapters/syncswap/src/sdk/lib.ts +++ b/adapters/syncswap/src/sdk/lib.ts @@ -1,4 +1,4 @@ -import { V2_SUBGRAPH_URL, client } from "./config" +import {V2_SUBGRAPH_URL, client, V2_SUBGRAPH_URL_AFTER_4515693} from "./config" import { UserPosition } from "./types" import {Decimal} from 'decimal.js' @@ -79,12 +79,16 @@ export const getV2UserPositionsAtBlock = async (blockNumber: number): Promise= 4515693 ? V2_SUBGRAPH_URL_AFTER_4515693 : V2_SUBGRAPH_URL, { method: "POST", body: JSON.stringify({ query }), headers: { "Content-Type": "application/json" }, }) - const { data: { liquidityPositions } } = await response.json(); + const jsonData = await response.json(); + const liquidityPositions: V2Position[] = []; + if(jsonData.data.hasOwnProperty('liquidityPositions')) { + liquidityPositions.push(...jsonData.data.liquidityPositions) + } result.push(...liquidityPositions.map((position: V2Position) => { const { reserve0, reserve1 } = getV2PositionReserves(position) return { diff --git a/adapters/teahouse/src/index.ts b/adapters/teahouse/src/index.ts index 633d06be..b9b1d66d 100644 --- a/adapters/teahouse/src/index.ts +++ b/adapters/teahouse/src/index.ts @@ -39,7 +39,7 @@ const pipeline = promisify(stream.pipeline); const getData = async () => { const blocks = [ - 4174101 + 4368847 ]; //await readBlocksFromCSV('src/sdk/mode_chain_daily_blocks.csv'); const csvRows: OutputDataSchemaRow[] = []; diff --git a/adapters/teahouse/src/sdk/config.ts b/adapters/teahouse/src/sdk/config.ts index e211e2e3..14b7184d 100644 --- a/adapters/teahouse/src/sdk/config.ts +++ b/adapters/teahouse/src/sdk/config.ts @@ -2,7 +2,7 @@ import { createPublicClient, http } from "viem"; import { linea } from "viem/chains" -export const V3_SUBGRAPH_URL = "https://api.goldsky.com/api/public/project_clu5ow773st3501un98cv0861/subgraphs/TeavaultV3PairLinea-linea/1.0/gn"; +export const V3_SUBGRAPH_URL = "https://api.goldsky.com/api/public/project_clu5ow773st3501un98cv0861/subgraphs/TeavaultV3PairLinea-linea/surge/gn"; export const client = createPublicClient({ chain: linea, diff --git a/adapters/teahouse/src/sdk/vaults.ts b/adapters/teahouse/src/sdk/vaults.ts index fc40802e..a04ff07b 100644 --- a/adapters/teahouse/src/sdk/vaults.ts +++ b/adapters/teahouse/src/sdk/vaults.ts @@ -5,5 +5,6 @@ export const VAULT_ADDRESS = [ "0x07811284e36fDc45f65cd56FC7c6929855d6A0cc", "0x73d9ccd3017b41e9b29f1e4a49d5468b52bd17c6", "0x7d372Cc969211502D5C3a5721a85fc382f83bC8F", - "0x172Dba015dDfA642a3E3e0e8BaB040468D8D9879" + "0x172Dba015dDfA642a3E3e0e8BaB040468D8D9879", + "0x1adC5E10933b696FA5311DB5339F9a15E959e2B5" ]; \ No newline at end of file diff --git a/adapters/zerolend/package.json b/adapters/zerolend/package.json index 1c94c611..149ad500 100644 --- a/adapters/zerolend/package.json +++ b/adapters/zerolend/package.json @@ -15,6 +15,7 @@ "author": "", "license": "UNLICENSED", "dependencies": { + "csv-parser": "^3.0.0", "fast-csv": "^5.0.1" }, "devDependencies": { diff --git a/adapters/zerolend/src/index.ts b/adapters/zerolend/src/index.ts index 6dd19851..ed58aaa1 100644 --- a/adapters/zerolend/src/index.ts +++ b/adapters/zerolend/src/index.ts @@ -1,165 +1,120 @@ import { write } from "fast-csv"; import fs from "fs"; - -interface IResponse { - data: { - userReserves: IData[]; - }; -} - -interface IData { - user: { - id: string; - }; - currentTotalDebt: string; - currentATokenBalance: string; - reserve: { - underlyingAsset: string; - symbol: string; - name: string; - }; - liquidityRate: "0"; -} - -type OutputDataSchemaRow = { - block_number: number; - timestamp: number; - user_address: string; - token_address: string; - token_balance: number; - token_symbol: string; - usd_price: number; +import csv from "csv-parser"; +import { BlockData, getUserTVLByBlock } from "./sdk"; + +// const readBlocksFromCSV = async (filePath: string): Promise => { +// const blocks: BlockData[] = []; + +// await new Promise((resolve, reject) => { +// fs.createReadStream(filePath) +// .pipe(csv()) // Specify the separator as '\t' for TSV files +// .on("data", (row: any) => { +// const blockNumber = parseInt(row.number, 10); +// const blockTimestamp = parseInt(row.block_timestamp, 10); +// if (!isNaN(blockNumber) && blockTimestamp) { +// blocks.push({ blockNumber: blockNumber, blockTimestamp }); +// } +// }) +// .on("end", resolve) +// .on("error", reject); +// }); + +// return blocks; +// }; + +// readBlocksFromCSV("hourly_blocks.csv") +// .then(async (blocks) => { +// const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks +// const batchSize = 10; // Size of batch to trigger writing to the file +// let i = 0; +// console.log("block number received") +// for (const block of blocks) { +// try { +// const result = await getUserTVLByBlock(block); + +// // Accumulate CSV rows for all blocks +// allCsvRows.push(...result); + +// i++; +// console.log(`Processed block ${i}`); + +// // Write to file when batch size is reached or at the end of loop +// if (i % batchSize === 0 || i === blocks.length) { +// const ws = fs.createWriteStream(`outputData.csv`, { +// flags: i === batchSize ? "w" : "a", +// }); +// write(allCsvRows, { headers: i === batchSize ? true : false }) +// .pipe(ws) +// .on("finish", () => { +// console.log(`CSV file has been written.`); +// }); + +// // Clear the accumulated CSV rows +// allCsvRows.length = 0; +// } +// } catch (error) { +// console.error(`An error occurred for block ${block}:`, error); +// } +// } +// }) +// .catch((err) => { +// console.error("Error reading CSV file:", err); +// }); + +module.exports = { + getUserTVLByBlock, }; -interface BlockData { - blockNumber: number; - blockTimestamp: number; -} - -const queryURL = - "https://api.goldsky.com/api/public/project_clsk1wzatdsls01wchl2e4n0y/subgraphs/zerolend-linea/1.0.0/gn"; - -const getBlockNumber = async () => { - const data = { - jsonrpc: "2.0", - method: "eth_blockNumber", - params: [], - id: 83, - }; - - const res = await fetch("https://rpc.linea.build", { - method: "POST", - body: JSON.stringify(data), - headers: { "Content-Type": "application/json" }, +const readBlocksFromCSV = async (filePath: string): Promise => { + const blocks: BlockData[] = []; + + await new Promise((resolve, reject) => { + fs.createReadStream(filePath) + .pipe(csv()) // Specify the separator as '\t' for TSV files + .on('data', (row) => { + const blockNumber = parseInt(row.number, 10); + const blockTimestamp = parseInt(row.timestamp, 10); + if (!isNaN(blockNumber) && blockTimestamp) { + blocks.push({ blockNumber: blockNumber, blockTimestamp }); + } + }) + .on('end', () => { + resolve(); + }) + .on('error', (err) => { + reject(err); + }); }); - const json = await res.json(); - return Number(json.result); + return blocks; }; -export const main = async (): Promise => { - const timestamp = Date.now(); - const first = 1000; - const rows: OutputDataSchemaRow[] = []; - const blockNumber = await getBlockNumber(); - - let lastAddress = "0x0000000000000000000000000000000000000000"; - - do { - const query = `{ - userReserves( - where: {and: [{or: [{currentTotalDebt_gt: 0}, {currentATokenBalance_gt: 0}]}, {user_gt: "${lastAddress}"}]} - first: ${first} - ) { - user { - id - } - currentTotalDebt - currentATokenBalance - reserve { - underlyingAsset - symbol - name - } - liquidityRate +readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: BlockData[]) => { + console.log(blocks); + const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks + const batchSize = 1000; // Size of batch to trigger writing to the file + let i = 0; + + for (const block of blocks) { + try { + const result = await getUserTVLByBlock(block); + for (let i = 0; i < result.length; i++) { + allCsvRows.push(result[i]) } - }`; - - const response = await fetch(queryURL, { - method: "POST", - body: JSON.stringify({ query }), - headers: { "Content-Type": "application/json" }, - }); - const batch: IResponse = await response.json(); - - if (batch.data.userReserves.length <= 2) break; - - batch.data.userReserves.forEach((data: IData) => { - const balance = - BigInt(data.currentATokenBalance) - BigInt(data.currentTotalDebt); - - if (balance !== 0n) - rows.push({ - block_number: blockNumber, - timestamp, - user_address: data.user.id, - token_address: data.reserve.underlyingAsset, - token_balance: Number(balance), - token_symbol: data.reserve.symbol, - usd_price: 0, - }); - - lastAddress = data.user.id; - }); - - console.log( - `Processed ${rows.length} rows. Last address is ${lastAddress}` - ); - } while (true); - - return rows; -}; - -export const writeCSV = async (data: OutputDataSchemaRow[]) => { - // File path where the CSV will be saved - const filePath = "outputData.csv"; - const headers = [ - "block_number", - "timestamp", - "user_address", - "token_address", - "token_balance", - "token_symbol", - ]; - - // Create a write stream - const fileStream = fs.createWriteStream(filePath); - - // Create a CSV writer - const csvStream = write([]); - - csvStream.pipe(fileStream); - csvStream.write(headers); - data.forEach((row) => { - csvStream.write([ - row.block_number, - row.timestamp, - row.user_address, - row.token_address, - row.token_balance, - row.token_symbol, - ]); - }); - - csvStream.on("finish", () => { - console.log("CSV file has been written successfully."); - csvStream.end(); + } catch (error) { + console.error(`An error occurred for block ${block}:`, error); + } + } + await new Promise((resolve, reject) => { + const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' }); + write(allCsvRows, { headers: true }) + .pipe(ws) + .on("finish", () => { + console.log(`CSV file has been written.`); + resolve; + }); }); -}; - -export const getUserTVLByBlock = async (_blocks: BlockData) => await main(); - -main().then(async (data) => { - console.log("Done", data); - await writeCSV(data); -}); +}).catch((err) => { + console.error('Error reading CSV file:', err); +}); \ No newline at end of file diff --git a/adapters/zerolend/src/sdk.ts b/adapters/zerolend/src/sdk.ts new file mode 100644 index 00000000..efb596db --- /dev/null +++ b/adapters/zerolend/src/sdk.ts @@ -0,0 +1,102 @@ +interface IResponse { + data: { + userReserves: IData[]; + }; +} + +interface IData { + user: { + id: string; + }; + currentTotalDebt: string; + currentATokenBalance: string; + reserve: { + underlyingAsset: string; + symbol: string; + name: string; + }; + liquidityRate: "0"; +} + +type OutputDataSchemaRow = { + block_number: number; + timestamp: number; + user_address: string; + token_address: string; + token_balance: number; + token_symbol: string; + usd_price: number; +}; + +export interface BlockData { + blockNumber: number; + blockTimestamp: number; +} + +const queryURL = + "https://api.goldsky.com/api/public/project_clsk1wzatdsls01wchl2e4n0y/subgraphs/zerolend-linea/1.0.0/gn"; + +export const getUserTVLByBlock = async ( + blocks: BlockData +): Promise => { + const timestamp = blocks.blockTimestamp; + const first = 1000; + const rows: OutputDataSchemaRow[] = []; + + let lastAddress = "0x0000000000000000000000000000000000000000"; + + do { + const query = `{ + userReserves( + block: {number: ${blocks.blockNumber}} + where: {and: [{or: [{currentTotalDebt_gt: 0}, {currentATokenBalance_gt: 0}]}, {user_gt: "${lastAddress}"}]} + first: ${first} + ) { + user { + id + } + currentTotalDebt + currentATokenBalance + reserve { + underlyingAsset + symbol + name + } + liquidityRate + } + }`; + + const response = await fetch(queryURL, { + method: "POST", + body: JSON.stringify({ query }), + headers: { "Content-Type": "application/json" }, + }); + const batch: IResponse = await response.json(); + + if (!batch.data || batch.data.userReserves.length == 0) break; + + batch.data.userReserves.forEach((data: IData) => { + const balance = + BigInt(data.currentATokenBalance) - BigInt(data.currentTotalDebt); + + if (balance !== 0n) + rows.push({ + block_number: blocks.blockNumber, + timestamp, + user_address: data.user.id, + token_address: data.reserve.underlyingAsset, + token_balance: Number(balance), + token_symbol: data.reserve.symbol, + usd_price: 0, + }); + + lastAddress = data.user.id; + }); + + console.log( + `Processed ${rows.length} rows. Last address is ${lastAddress}` + ); + } while (true); + + return rows; +}; diff --git a/adapters/zerolend/yarn.lock b/adapters/zerolend/yarn.lock index 7c7347f1..1f64dc83 100644 --- a/adapters/zerolend/yarn.lock +++ b/adapters/zerolend/yarn.lock @@ -3,89 +3,101 @@ "@fast-csv/format@5.0.0": - "integrity" "sha512-IyMpHwYIOGa2f0BJi6Wk55UF0oBA5urdIydoEDYxPo88LFbeb3Yr4rgpu98OAO1glUWheSnNtUgS80LE+/dqmw==" - "resolved" "https://registry.npmjs.org/@fast-csv/format/-/format-5.0.0.tgz" - "version" "5.0.0" + version "5.0.0" + resolved "https://registry.npmjs.org/@fast-csv/format/-/format-5.0.0.tgz" + integrity sha512-IyMpHwYIOGa2f0BJi6Wk55UF0oBA5urdIydoEDYxPo88LFbeb3Yr4rgpu98OAO1glUWheSnNtUgS80LE+/dqmw== dependencies: - "lodash.escaperegexp" "^4.1.2" - "lodash.isboolean" "^3.0.3" - "lodash.isequal" "^4.5.0" - "lodash.isfunction" "^3.0.9" - "lodash.isnil" "^4.0.0" + lodash.escaperegexp "^4.1.2" + lodash.isboolean "^3.0.3" + lodash.isequal "^4.5.0" + lodash.isfunction "^3.0.9" + lodash.isnil "^4.0.0" "@fast-csv/parse@5.0.0": - "integrity" "sha512-ecF8tCm3jVxeRjEB6VPzmA+1wGaJ5JgaUX2uesOXdXD6qQp0B3EdshOIed4yT1Xlj/F2f8v4zHSo0Oi31L697g==" - "resolved" "https://registry.npmjs.org/@fast-csv/parse/-/parse-5.0.0.tgz" - "version" "5.0.0" + version "5.0.0" + resolved "https://registry.npmjs.org/@fast-csv/parse/-/parse-5.0.0.tgz" + integrity sha512-ecF8tCm3jVxeRjEB6VPzmA+1wGaJ5JgaUX2uesOXdXD6qQp0B3EdshOIed4yT1Xlj/F2f8v4zHSo0Oi31L697g== dependencies: - "lodash.escaperegexp" "^4.1.2" - "lodash.groupby" "^4.6.0" - "lodash.isfunction" "^3.0.9" - "lodash.isnil" "^4.0.0" - "lodash.isundefined" "^3.0.1" - "lodash.uniq" "^4.5.0" + lodash.escaperegexp "^4.1.2" + lodash.groupby "^4.6.0" + lodash.isfunction "^3.0.9" + lodash.isnil "^4.0.0" + lodash.isundefined "^3.0.1" + lodash.uniq "^4.5.0" "@types/node@^20.11.17": - "integrity" "sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==" - "resolved" "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz" - "version" "20.12.7" + version "20.12.7" + resolved "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz" + integrity sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg== dependencies: - "undici-types" "~5.26.4" + undici-types "~5.26.4" -"fast-csv@^5.0.1": - "integrity" "sha512-Q43zC4NdQD5MAWOVQOF8KA+D6ddvTJjX2ib8zqysm74jZhtk6+dc8C75/OqRV6Y9CLc4kgvbC3PLG8YL4YZfgw==" - "resolved" "https://registry.npmjs.org/fast-csv/-/fast-csv-5.0.1.tgz" - "version" "5.0.1" +csv-parser@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/csv-parser/-/csv-parser-3.0.0.tgz" + integrity sha512-s6OYSXAK3IdKqYO33y09jhypG/bSDHPuyCme/IdEHfWpLf/jKcpitVFyOC6UemgGk8v7Q5u2XE0vvwmanxhGlQ== + dependencies: + minimist "^1.2.0" + +fast-csv@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/fast-csv/-/fast-csv-5.0.1.tgz" + integrity sha512-Q43zC4NdQD5MAWOVQOF8KA+D6ddvTJjX2ib8zqysm74jZhtk6+dc8C75/OqRV6Y9CLc4kgvbC3PLG8YL4YZfgw== dependencies: "@fast-csv/format" "5.0.0" "@fast-csv/parse" "5.0.0" -"lodash.escaperegexp@^4.1.2": - "integrity" "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==" - "resolved" "https://registry.npmjs.org/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz" - "version" "4.1.2" - -"lodash.groupby@^4.6.0": - "integrity" "sha512-5dcWxm23+VAoz+awKmBaiBvzox8+RqMgFhi7UvX9DHZr2HdxHXM/Wrf8cfKpsW37RNrvtPn6hSwNqurSILbmJw==" - "resolved" "https://registry.npmjs.org/lodash.groupby/-/lodash.groupby-4.6.0.tgz" - "version" "4.6.0" - -"lodash.isboolean@^3.0.3": - "integrity" "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" - "resolved" "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz" - "version" "3.0.3" - -"lodash.isequal@^4.5.0": - "integrity" "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==" - "resolved" "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz" - "version" "4.5.0" - -"lodash.isfunction@^3.0.9": - "integrity" "sha512-AirXNj15uRIMMPihnkInB4i3NHeb4iBtNg9WRWuK2o31S+ePwwNmDPaTL3o7dTJ+VXNZim7rFs4rxN4YU1oUJw==" - "resolved" "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-3.0.9.tgz" - "version" "3.0.9" - -"lodash.isnil@^4.0.0": - "integrity" "sha512-up2Mzq3545mwVnMhTDMdfoG1OurpA/s5t88JmQX809eH3C8491iu2sfKhTfhQtKY78oPNhiaHJUpT/dUDAAtng==" - "resolved" "https://registry.npmjs.org/lodash.isnil/-/lodash.isnil-4.0.0.tgz" - "version" "4.0.0" - -"lodash.isundefined@^3.0.1": - "integrity" "sha512-MXB1is3s899/cD8jheYYE2V9qTHwKvt+npCwpD+1Sxm3Q3cECXCiYHjeHWXNwr6Q0SOBPrYUDxendrO6goVTEA==" - "resolved" "https://registry.npmjs.org/lodash.isundefined/-/lodash.isundefined-3.0.1.tgz" - "version" "3.0.1" - -"lodash.uniq@^4.5.0": - "integrity" "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" - "resolved" "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" - "version" "4.5.0" - -"typescript@^5.3.3": - "integrity" "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==" - "resolved" "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz" - "version" "5.4.5" - -"undici-types@~5.26.4": - "integrity" "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" - "resolved" "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz" - "version" "5.26.5" +lodash.escaperegexp@^4.1.2: + version "4.1.2" + resolved "https://registry.npmjs.org/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz" + integrity sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw== + +lodash.groupby@^4.6.0: + version "4.6.0" + resolved "https://registry.npmjs.org/lodash.groupby/-/lodash.groupby-4.6.0.tgz" + integrity sha512-5dcWxm23+VAoz+awKmBaiBvzox8+RqMgFhi7UvX9DHZr2HdxHXM/Wrf8cfKpsW37RNrvtPn6hSwNqurSILbmJw== + +lodash.isboolean@^3.0.3: + version "3.0.3" + resolved "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz" + integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== + +lodash.isequal@^4.5.0: + version "4.5.0" + resolved "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz" + integrity sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ== + +lodash.isfunction@^3.0.9: + version "3.0.9" + resolved "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-3.0.9.tgz" + integrity sha512-AirXNj15uRIMMPihnkInB4i3NHeb4iBtNg9WRWuK2o31S+ePwwNmDPaTL3o7dTJ+VXNZim7rFs4rxN4YU1oUJw== + +lodash.isnil@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/lodash.isnil/-/lodash.isnil-4.0.0.tgz" + integrity sha512-up2Mzq3545mwVnMhTDMdfoG1OurpA/s5t88JmQX809eH3C8491iu2sfKhTfhQtKY78oPNhiaHJUpT/dUDAAtng== + +lodash.isundefined@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/lodash.isundefined/-/lodash.isundefined-3.0.1.tgz" + integrity sha512-MXB1is3s899/cD8jheYYE2V9qTHwKvt+npCwpD+1Sxm3Q3cECXCiYHjeHWXNwr6Q0SOBPrYUDxendrO6goVTEA== + +lodash.uniq@^4.5.0: + version "4.5.0" + resolved "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" + integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== + +minimist@^1.2.0: + version "1.2.8" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + +typescript@^5.3.3: + version "5.4.5" + resolved "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz" + integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ== + +undici-types@~5.26.4: + version "5.26.5" + resolved "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz" + integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==