Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Metavault : dex : tvl by user #143

Merged
merged 3 commits into from
May 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 2 additions & 7 deletions adapters/metavault/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"version": "1.0.0",
"description": "",
"main": "index.js",
"type": "commonjs",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node dist/index.js",
Expand All @@ -12,17 +13,11 @@
},
"keywords": [],
"author": "",
"license": "ISC",
"license": "UNLICENSED",
"dependencies": {
"@types/big.js": "^6.2.2",
"big.js": "^6.2.1",
"bignumber.js": "^9.1.2",
"csv-parser": "^3.0.0",
"decimal.js-light": "^2.5.1",
"fast-csv": "^5.0.1",
"jsbi": "^4.3.0",
"tiny-invariant": "^1.3.1",
"toformat": "^2.0.0",
"viem": "^2.8.13"
},
"devDependencies": {
Expand Down
221 changes: 91 additions & 130 deletions adapters/metavault/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,145 +1,106 @@
import { CHAINS, PROTOCOLS, AMM_TYPES } from "./sdk/config";
import { getPositionDetailsFromPosition, getPositionsForAddressByPoolAtBlock, getTimestampAtBlock, getTradeLiquidityForAddressByPoolAtBlock } from "./sdk/subgraphDetails";
(BigInt.prototype as any).toJSON = function () {
return this.toString();
};
import { write } from 'fast-csv';
import csv from 'csv-parser';
import fs from 'fs';
import { write } from 'fast-csv';

interface BlockData {
blockNumber: number;
blockTimestamp: number;
}

type OutputDataSchemaRow = {
block_number: number;
timestamp: number;
user_address: string;
token_address: string;
token_balance: number;
token_symbol?: string;
usd_price: number;
};

import { BlockData, OutputSchemaRow } from './sdk/types';
import { getTradeLiquidityForAddressByPoolAtBlock, getV2UserPositionsAtBlock, getV3UserPositionsAtBlock } from './sdk/lib';

export const getUserTVLByBlock = async ({
blockNumber,
blockTimestamp,
}: BlockData): Promise<OutputDataSchemaRow[]> => {
return await getPoolData({ blockNumber, blockTimestamp });
}

export const getPoolData = async ({
blockNumber,
blockTimestamp,
}: BlockData): Promise<OutputDataSchemaRow[]> => {
const allCsvRows: OutputDataSchemaRow[] = []; // Array to accumulate CSV rows for all blocks
try {
// const blockTimestamp = new Date(await getTimestampAtBlock(blockNumber)).toISOString();
const positions = await getPositionsForAddressByPoolAtBlock(
blockNumber, "", "", CHAINS.L2_CHAIN_ID, PROTOCOLS.METAVAULT, AMM_TYPES.UNISWAPV3
);
const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];

console.log(`Block: ${blockNumber}`);
console.log("Positions: ", positions.length);
await new Promise<void>((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(csv()) // Specify the separator as '\t' for TSV files
.on('data', (row) => {
const blockNumber = parseInt(row.number, 10);
const blockTimestamp = parseInt(row.timestamp, 10);
if (!isNaN(blockNumber) && blockTimestamp) {
blocks.push({ blockNumber: blockNumber, blockTimestamp });
}
})
.on('end', () => {
resolve();
})
.on('error', (err) => {
reject(err);
});
});

// Assuming this part of the logic remains the same
let positionsWithUSDValue = positions.map(getPositionDetailsFromPosition);
// let lpValueByUsers = getLPValueByUserAndPoolFromPositions(positionsWithUSDValue);
return blocks;
};
readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: BlockData[]) => {
console.log(blocks);
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
const batchSize = 1000; // Size of batch to trigger writing to the file
let i = 0;

positionsWithUSDValue.forEach((value, key) => {
// Accumulate CSV row data
if (value.token0DecimalValue > 0) {
allCsvRows.push({
block_number: blockNumber,
timestamp: blockTimestamp,
user_address: value.owner,
token_address: value.token0.id,
token_balance: value.token0DecimalValue,
usd_price: 0,
});
}
if (value.token1DecimalValue > 0) {
allCsvRows.push({
block_number: blockNumber,
timestamp: blockTimestamp,
user_address: value.owner,
token_address: value.token1.id,
token_balance: value.token1DecimalValue,
usd_price: 0,
});
}
});
const liquidities = await getTradeLiquidityForAddressByPoolAtBlock(
blockNumber, "", "", CHAINS.L2_CHAIN_ID, PROTOCOLS.METAVAULT, AMM_TYPES.TRADE
);
liquidities.forEach((value, key) => {
if (value.amount > 0) {
allCsvRows.push({
block_number: blockNumber,
timestamp: blockTimestamp,
user_address: value.user,
token_address: value.asset,
token_balance: value.amount,
usd_price: 0,
});
}
for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
allCsvRows.push(...result);
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
}
}
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
resolve;
});
});
}).catch((err) => {
console.error('Error reading CSV file:', err);
});
export const getUserTVLByBlock = async ({ blockNumber, blockTimestamp }: BlockData): Promise<OutputSchemaRow[]> => {
const result: OutputSchemaRow[] = []

} catch (error) {
console.error(`An error occurred for block ${blockNumber}:`, error);
}
return allCsvRows
}
const [v2Positions, v3Positions, tradeLiquidities] = await Promise.all([
getV2UserPositionsAtBlock(blockNumber),
getV3UserPositionsAtBlock(blockNumber),
getTradeLiquidityForAddressByPoolAtBlock(blockNumber)
])

const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];
// combine v2 & v3
const combinedPositions = [...v2Positions, ...v3Positions]
const balances: Record<string, Record<string, bigint>> = {}
for (const position of combinedPositions) {
balances[position.user] = balances[position.user] || {}

await new Promise<void>((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(csv()) // Specify the separator as '\t' for TSV files
.on('data', (row) => {
const blockNumber = parseInt(row.number, 10);
const blockTimestamp = parseInt(row.timestamp, 10);
if (!isNaN(blockNumber) && blockTimestamp) {
blocks.push({ blockNumber: blockNumber, blockTimestamp });
}
})
.on('end', () => {
resolve();
})
.on('error', (err) => {
reject(err);
});
});
if (position.token0.balance > 0n)
balances[position.user][position.token0.address] =
(balances?.[position.user]?.[position.token0.address] ?? 0n)
+ position.token0.balance

return blocks;
};
if (position.token1.balance > 0n)
balances[position.user][position.token1.address] =
(balances?.[position.user]?.[position.token1.address] ?? 0n)
+ position.token1.balance
}
for (const position of tradeLiquidities) {
balances[position.user] = balances[position.user] || {}

readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: BlockData[]) => {
console.log(blocks);
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
if (position.amount > 0n)
balances[position.user][position.asset] =
(balances?.[position.user]?.[position.asset] ?? 0n)
+ position.amount
}
for (const [user, tokenBalances] of Object.entries(balances)) {
for (const [token, balance] of Object.entries(tokenBalances)) {
result.push({
block_number: blockNumber,
timestamp: blockTimestamp,
user_address: user,
token_address: token,
token_balance: balance,
token_symbol: "",
usd_price: 0
})
}
}

for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
for(let i = 0; i < result.length; i++){
allCsvRows.push(result[i])
}
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
}
}
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
resolve;
});
});
}).catch((err) => {
console.error('Error reading CSV file:', err);
});
return result
};
35 changes: 0 additions & 35 deletions adapters/metavault/src/sdk/blockApi.ts

This file was deleted.

31 changes: 9 additions & 22 deletions adapters/metavault/src/sdk/config.ts
Original file line number Diff line number Diff line change
@@ -1,24 +1,11 @@
export const enum CHAINS{
L2_CHAIN_ID = 59144,
}
export const enum PROTOCOLS{
METAVAULT = 0,
}
import { createPublicClient, http } from "viem";
import { linea } from "viem/chains"

export const enum AMM_TYPES{
UNISWAPV3 = 0,
TRADE = 1
}
export const V2_SUBGRAPH_URL = "https://api.studio.thegraph.com/query/55804/linea-v2/version/latest"
export const V3_SUBGRAPH_URL = "https://api.studio.thegraph.com/query/55804/linea-v3/version/latest"
export const TRADE_SUBGRAPH_URL = "https://api.studio.thegraph.com/query/55804/linea-trade/version/latest"

export const SUBGRAPH_URLS = {
[CHAINS.L2_CHAIN_ID]: {
[PROTOCOLS.METAVAULT]: {
[AMM_TYPES.UNISWAPV3]: "https://api.studio.thegraph.com/query/55804/linea-v3/version/latest",
[AMM_TYPES.TRADE]: "https://api.studio.thegraph.com/query/55804/linea-trade/version/latest"
}
},

}
export const RPC_URLS = {
[CHAINS.L2_CHAIN_ID]: "https://rpc.linea.build"
}
export const client = createPublicClient({
chain: linea,
transport: http("https://rpc.linea.build")
})
24 changes: 0 additions & 24 deletions adapters/metavault/src/sdk/entities/positions.ts

This file was deleted.

Loading
Loading