Skip to content

Commit

Permalink
correct read block from csv added
Browse files Browse the repository at this point in the history
  • Loading branch information
nitish-91 committed May 9, 2024
1 parent e9d3315 commit 26dcec8
Show file tree
Hide file tree
Showing 3 changed files with 98 additions and 48 deletions.
3 changes: 0 additions & 3 deletions adapters/block_numbers.tsv

This file was deleted.

139 changes: 96 additions & 43 deletions adapters/zerolend/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,65 +3,118 @@ import fs from "fs";
import csv from "csv-parser";
import { BlockData, getUserTVLByBlock } from "./sdk";

// const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
// const blocks: BlockData[] = [];

// await new Promise<void>((resolve, reject) => {
// fs.createReadStream(filePath)
// .pipe(csv()) // Specify the separator as '\t' for TSV files
// .on("data", (row: any) => {
// const blockNumber = parseInt(row.number, 10);
// const blockTimestamp = parseInt(row.block_timestamp, 10);
// if (!isNaN(blockNumber) && blockTimestamp) {
// blocks.push({ blockNumber: blockNumber, blockTimestamp });
// }
// })
// .on("end", resolve)
// .on("error", reject);
// });

// return blocks;
// };

// readBlocksFromCSV("hourly_blocks.csv")
// .then(async (blocks) => {
// const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
// const batchSize = 10; // Size of batch to trigger writing to the file
// let i = 0;
// console.log("block number received")
// for (const block of blocks) {
// try {
// const result = await getUserTVLByBlock(block);

// // Accumulate CSV rows for all blocks
// allCsvRows.push(...result);

// i++;
// console.log(`Processed block ${i}`);

// // Write to file when batch size is reached or at the end of loop
// if (i % batchSize === 0 || i === blocks.length) {
// const ws = fs.createWriteStream(`outputData.csv`, {
// flags: i === batchSize ? "w" : "a",
// });
// write(allCsvRows, { headers: i === batchSize ? true : false })
// .pipe(ws)
// .on("finish", () => {
// console.log(`CSV file has been written.`);
// });

// // Clear the accumulated CSV rows
// allCsvRows.length = 0;
// }
// } catch (error) {
// console.error(`An error occurred for block ${block}:`, error);
// }
// }
// })
// .catch((err) => {
// console.error("Error reading CSV file:", err);
// });

module.exports = {
getUserTVLByBlock,
};

const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];

await new Promise<void>((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(csv({ separator: "\t" })) // Specify the separator as '\t' for TSV files
.on("data", (row: any) => {
.pipe(csv()) // Specify the separator as '\t' for TSV files
.on('data', (row) => {
const blockNumber = parseInt(row.number, 10);
const blockTimestamp = parseInt(row.block_timestamp, 10);
const blockTimestamp = parseInt(row.timestamp, 10);
if (!isNaN(blockNumber) && blockTimestamp) {
blocks.push({ blockNumber: blockNumber, blockTimestamp });
}
})
.on("end", resolve)
.on("error", reject);
.on('end', () => {
resolve();
})
.on('error', (err) => {
reject(err);
});
});

return blocks;
};

readBlocksFromCSV("block_numbers.tsv")
.then(async (blocks) => {
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
const batchSize = 10; // Size of batch to trigger writing to the file
let i = 0;

for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);

// Accumulate CSV rows for all blocks
allCsvRows.push(...result);

i++;
console.log(`Processed block ${i}`);

// Write to file when batch size is reached or at the end of loop
if (i % batchSize === 0 || i === blocks.length) {
const ws = fs.createWriteStream(`outputData.csv`, {
flags: i === batchSize ? "w" : "a",
});
write(allCsvRows, { headers: i === batchSize ? true : false })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
});
readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: BlockData[]) => {
console.log(blocks);
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
const batchSize = 1000; // Size of batch to trigger writing to the file
let i = 0;

// Clear the accumulated CSV rows
allCsvRows.length = 0;
}
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
for (let i = 0; i < result.length; i++) {
allCsvRows.push(result[i])
}
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
}
})
.catch((err) => {
console.error("Error reading CSV file:", err);
}
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
resolve;
});
});

module.exports = {
getUserTVLByBlock,
};
}).catch((err) => {
console.error('Error reading CSV file:', err);
});
4 changes: 2 additions & 2 deletions adapters/zerolend/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@

csv-parser@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/csv-parser/-/csv-parser-3.0.0.tgz#b88a6256d79e090a97a1b56451f9327b01d710e7"
resolved "https://registry.npmjs.org/csv-parser/-/csv-parser-3.0.0.tgz"
integrity sha512-s6OYSXAK3IdKqYO33y09jhypG/bSDHPuyCme/IdEHfWpLf/jKcpitVFyOC6UemgGk8v7Q5u2XE0vvwmanxhGlQ==
dependencies:
minimist "^1.2.0"
Expand Down Expand Up @@ -89,7 +89,7 @@ lodash.uniq@^4.5.0:

minimist@^1.2.0:
version "1.2.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c"
resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz"
integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==

typescript@^5.3.3:
Expand Down

0 comments on commit 26dcec8

Please sign in to comment.