From 39bdc56d226f3545af94e26e5b3bf94d3e9d508d Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Wed, 29 May 2024 14:45:28 +0200 Subject: [PATCH 01/12] feat(attestation): attestation data zod, test, docs --- src/fetching/fetchAttestationData.ts | 96 +++++++++++----------- src/utils/validation.ts | 14 ++++ test/fetching/fetchAttestationData.test.ts | 94 +++++++++++++++++++++ 3 files changed, 156 insertions(+), 48 deletions(-) create mode 100644 src/utils/validation.ts create mode 100644 test/fetching/fetchAttestationData.test.ts diff --git a/src/fetching/fetchAttestationData.ts b/src/fetching/fetchAttestationData.ts index b041839..891abc4 100644 --- a/src/fetching/fetchAttestationData.ts +++ b/src/fetching/fetchAttestationData.ts @@ -3,6 +3,38 @@ import { client } from "@/clients/evmClient"; import easAbi from "@/abis/eas.json"; import { Address, Hex, isAddress } from "viem"; import { ParsedAttestedEvent } from "@/parsing/attestedEvent"; +import * as z from "zod"; +import { messages } from "@/utils/validation"; + +/** + * Asynchronously fetches attestation data from a contract. + * + * This function fetches the attestation data as stored at the provided UID on the contract. + * It first checks if the attestedEvent and its UID are defined. If not, it logs an error and returns. + * Then, it tries to read the contract using the client, with the provided address, abi, function name, and arguments. + * If the contract read is successful, it parses the attestation data using the AttestationSchema. + * If the parsing is successful, it returns the attestedEvent with the attestation data attached. + * If an error occurs during the contract read, it logs the error and returns. + * + * @param {Object} attestedEvent - The EAS Attested event data. + * @returns {Promise} - The event data with the attestation data attached, or undefined if an error occurs. + * + * @example + * ```js + * const attestedEvent = { + * recipient: "0x1234...5678", + * attester: "0x1234...5678", + * uid: "0x1234...5678", + * schema: "0x1234...5678", + * refUID: "0x1234...5678", + * time: BigInt(1633027200), + * expirationTime: BigInt(1733027200), + * revocationTime: BigInt(0), + * revocable: true, + * data: "0x1234...5678", + * }; + * const attestation: Attestation | undefined = await fetchAttestationData({ attestedEvent }); + **/ //https://github.com/ethereum-attestation-service/eas-sdk/blob/master/src/eas.ts#L87 export interface Attestation { @@ -18,25 +50,21 @@ export interface Attestation { data: Hex; } -/* - * This function fetches the attestation data as stored at the provided UID on the contract. - * - * @param attestation - The EAS Attested event data. - * @returns {Attestation} - The event data with the attestation data attached - * - * @example - * ```js - * - * const easData = { - * recipient: "0x1234...5678", - * attester: "0x1234...5678", - * uid: "0x1234...5678", - * schema: "0x1234...5678", - * }; - * - * const attestation: Attestation = await fetchAttestationData(easData); - * ``` - */ +// Zod validation of Attestation +export const AttestationSchema = z.object({ + uid: z.string(), + schema: z.string(), + refUID: z.string(), + time: z.bigint(), + expirationTime: z.bigint(), + revocationTime: z.bigint(), + recipient: z + .string() + .refine(isAddress, { message: messages.INVALID_ADDRESS }), + revocable: z.boolean(), + attester: z.string().refine(isAddress, { message: messages.INVALID_ADDRESS }), + data: z.string(), +}); interface FetchAttestationData { attestedEvent?: ParsedAttestedEvent; @@ -63,13 +91,7 @@ export const fetchAttestationData = async ({ args: [uid], }); - if (!_attestationData || !isAttestation(_attestationData)) { - console.error( - "[FetchAttestationData] Invalid attestation data", - _attestationData, - ); - return; - } + AttestationSchema.parse(_attestationData); return { ...attestedEvent, attestation: _attestationData }; } catch (e) { @@ -80,25 +102,3 @@ export const fetchAttestationData = async ({ return; } }; - -export const isAttestation = (data: unknown): data is Attestation => { - return ( - typeof data === "object" && - data !== null && - "uid" in data && - "schema" in data && - "refUID" in data && - "time" in data && - "expirationTime" in data && - "revocationTime" in data && - "recipient" in data && - typeof data.recipient === "string" && - isAddress(data.recipient) && - "revocable" in data && - typeof data.revocable === "boolean" && - "attester" in data && - typeof data.attester === "string" && - isAddress(data.attester) && - "data" in data - ); -}; diff --git a/src/utils/validation.ts b/src/utils/validation.ts new file mode 100644 index 0000000..558632d --- /dev/null +++ b/src/utils/validation.ts @@ -0,0 +1,14 @@ +import { isAddress } from "viem"; +import { z } from "zod"; + +export const isEthAddress = z + .string() + .refine((value) => isAddress(value, { strict: false }), { + message: + "Provided address is invalid. Please insure you have typed correctly.", + }); + +export const messages = { + INVALID_ADDRESS: + "Provided address is invalid. Please insure you have typed correctly.", +}; diff --git a/test/fetching/fetchAttestationData.test.ts b/test/fetching/fetchAttestationData.test.ts new file mode 100644 index 0000000..fe918cc --- /dev/null +++ b/test/fetching/fetchAttestationData.test.ts @@ -0,0 +1,94 @@ +import { afterAll, afterEach, describe, test } from "vitest"; +import { + Attestation, + fetchAttestationData, +} from "@/fetching/fetchAttestationData"; +import { client } from "@/clients/evmClient"; +import { faker } from "@faker-js/faker"; +import sinon from "sinon"; +import { getAddress } from "viem"; + +describe("fetchAttestationData", () => { + afterEach(() => { + sinon.restore(); + }); + + afterAll(() => { + sinon.restore(); + }); + + test("returns undefined when attestedEvent is not provided", async ({ + expect, + }) => { + const result = await fetchAttestationData({}); + expect(result).toBeUndefined(); + }); + + test("returns undefined when attestedEvent.uid is not provided", async ({ + expect, + }) => { + const result = await fetchAttestationData({ attestedEvent: {} }); + expect(result).toBeUndefined(); + }); + + test("returns attestation data when attestedEvent and uid are provided", async ({ + expect, + }) => { + const recipient = getAddress(faker.finance.ethereumAddress()); + const attester = getAddress(faker.finance.ethereumAddress()); + + const attestedEvent = { + recipient, + attester, + uid: "0x1234", + block_timestamp: BigInt(1234), + }; + + const mockAttestationData: Attestation = { + uid: "0x1234", + schema: "0x1234", + refUID: "0x1234", + time: BigInt(1234), + expirationTime: BigInt(1234), + revocationTime: BigInt(1234), + recipient, + revocable: true, + attester, + data: "0x1234", + }; + const readSpy = sinon.stub(client, "readContract"); + readSpy.resolves(mockAttestationData); + + const result = await fetchAttestationData({ + attestedEvent, + }); + + expect(result).toEqual({ + ...attestedEvent, + attestation: mockAttestationData, + }); + }); + + test("returns undefined when an error occurs during contract read", async ({ + expect, + }) => { + const recipient = getAddress(faker.finance.ethereumAddress()); + const attester = getAddress(faker.finance.ethereumAddress()); + + const attestedEvent = { + recipient, + attester, + uid: "0x1234", + block_timestamp: BigInt(1234), + }; + + const readSpy = sinon.stub(client, "readContract"); + readSpy.throws(); + + const result = await fetchAttestationData({ + attestedEvent, + }); + + expect(result).toBeUndefined(); + }); +}); From 8a90e80a48d9c4a9482963aecb3bbae5bd55aa7f Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Wed, 29 May 2024 14:47:16 +0200 Subject: [PATCH 02/12] feat(attestation): typing in test --- test/fetching/fetchAttestationData.test.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/fetching/fetchAttestationData.test.ts b/test/fetching/fetchAttestationData.test.ts index fe918cc..9c8d9bf 100644 --- a/test/fetching/fetchAttestationData.test.ts +++ b/test/fetching/fetchAttestationData.test.ts @@ -7,6 +7,7 @@ import { client } from "@/clients/evmClient"; import { faker } from "@faker-js/faker"; import sinon from "sinon"; import { getAddress } from "viem"; +import { ParsedAttestedEvent } from "../../src/parsing/attestedEvent"; describe("fetchAttestationData", () => { afterEach(() => { @@ -27,7 +28,9 @@ describe("fetchAttestationData", () => { test("returns undefined when attestedEvent.uid is not provided", async ({ expect, }) => { - const result = await fetchAttestationData({ attestedEvent: {} }); + const result = await fetchAttestationData({ + attestedEvent: {} as unknown as ParsedAttestedEvent, + }); expect(result).toBeUndefined(); }); From cb6e00b2c9d816576c1d72017a28abad66ec9485 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Thu, 30 May 2024 11:23:58 +0200 Subject: [PATCH 03/12] feat(easSchema): zod, test, docs --- src/clients/evmClient.ts | 8 +-- src/fetching/fetchAllowlistFromUri.ts | 29 +++------ src/fetching/fetchAttestationData.ts | 2 +- src/fetching/fetchMetadataFromUri.ts | 2 +- src/fetching/fetchSchemaData.ts | 90 ++++++++++----------------- src/fetching/fetchTokenUnits.ts | 26 -------- test/fetching/fetchSchemaData.test.ts | 71 +++++++++++++++++++++ 7 files changed, 121 insertions(+), 107 deletions(-) delete mode 100644 src/fetching/fetchTokenUnits.ts create mode 100644 test/fetching/fetchSchemaData.test.ts diff --git a/src/clients/evmClient.ts b/src/clients/evmClient.ts index 09dd94f..43b8815 100644 --- a/src/clients/evmClient.ts +++ b/src/clients/evmClient.ts @@ -13,12 +13,12 @@ const selectedNetwork = () => { } }; -const alchemyUrl = (apiKey: string) => { +const alchemyUrl = () => { switch (chainId) { case 84532: - return `https://base-sepolia.g.alchemy.com/v2/${apiKey}`; + return `https://base-sepolia.g.alchemy.com/v2/${alchemyApiKey}`; case 11155111: - return `https://eth-sepolia.g.alchemy.com/v2/${apiKey}`; + return `https://eth-sepolia.g.alchemy.com/v2/${alchemyApiKey}`; default: throw new Error(`Unsupported chain ID: ${chainId}`); } @@ -28,7 +28,7 @@ const alchemyUrl = (apiKey: string) => { export const client = createPublicClient({ cacheTime: 10_000, chain: selectedNetwork(), - transport: http(alchemyUrl(alchemyApiKey)), + transport: http(alchemyUrl()), batch: { multicall: { wait: 32, diff --git a/src/fetching/fetchAllowlistFromUri.ts b/src/fetching/fetchAllowlistFromUri.ts index d1a731c..955b22b 100644 --- a/src/fetching/fetchAllowlistFromUri.ts +++ b/src/fetching/fetchAllowlistFromUri.ts @@ -1,27 +1,18 @@ -import { fetchFromHTTPS, fetchFromIPFS } from "@/utils"; import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; import { fetchFromHttpsOrIpfs } from "@/utils/fetchFromHttpsOrIpfs"; -/* - * This function fetches the metadata of a claim from the uri as stored in the claim on the contract. +/** + * This function fetches an allow list from a given URI. * - * Because the uri can be an IPFS URI, an HTTPS URI, or a CID, this function tries to fetch the metadata from the - * different sources in that order. If the metadata is found, it is validated and returned. + * The URI can be an IPFS URI, an HTTPS URI, or a CID. The function tries to fetch the allow list from the + * different sources in that order. If the allow list is found, it is validated and returned. * - * @param claim - The claim data. - * @returns The metadata of the claim. + * @param uri - The URI where the allow list is located. + * @returns The allow list as an OpenZeppelin Merkle tree if found and valid, otherwise undefined. * * @example - * ```js - * - * const claim: Claim = { - * contract_address: "0x1234...5678", - * claim_id: 1n, - * uri: "ipfs://QmXZj9Pm4g7Hv3Z6K4Vw2vW" - * total_units: 100n, - * }; - * - * const metadata = await fetchMetadataFromUri(claim); + * ```typescript + * const allowList = await fetchAllowListFromUri({ uri: "ipfs://QmXZj9Pm4g7Hv3Z6K4Vw2vW" }); * ``` */ @@ -49,7 +40,7 @@ export const fetchAllowListFromUri = async ({ uri }: FetchAllowListFromUri) => { JSON.parse(fetchResult as string), ); } catch (error) { - console.error( + console.warn( `[FetchAllowListFromUri] Allow list at ${uri} is not a valid OZ Merkle tree`, error, ); @@ -62,7 +53,7 @@ export const fetchAllowListFromUri = async ({ uri }: FetchAllowListFromUri) => { ); return StandardMerkleTree.load<[string, bigint]>(fetchResult as never); } catch (error) { - console.error( + console.warn( `[FetchAllowListFromUri] Allow list at ${uri} is not a valid OZ Merkle tree`, error, ); diff --git a/src/fetching/fetchAttestationData.ts b/src/fetching/fetchAttestationData.ts index 891abc4..dc0e452 100644 --- a/src/fetching/fetchAttestationData.ts +++ b/src/fetching/fetchAttestationData.ts @@ -93,7 +93,7 @@ export const fetchAttestationData = async ({ AttestationSchema.parse(_attestationData); - return { ...attestedEvent, attestation: _attestationData }; + return { ...attestedEvent, attestation: _attestationData as Attestation }; } catch (e) { console.error( `[FetchAttestationData] Error fetching attestation data for UID ${uid} on contract ${easAddress}:`, diff --git a/src/fetching/fetchMetadataFromUri.ts b/src/fetching/fetchMetadataFromUri.ts index 3e310e9..f2a913e 100644 --- a/src/fetching/fetchMetadataFromUri.ts +++ b/src/fetching/fetchMetadataFromUri.ts @@ -56,7 +56,7 @@ export const fetchMetadataFromUri = async ({ uri }: FetchMetadataFromUri) => { description: _metadata.description, external_url: _metadata.external_url, image: _metadata.image, - properties: fetchResult.properties, + properties: _metadata.properties, contributors: _metadata.hypercert?.contributors.value, impact_scope: _metadata.hypercert?.impact_scope.value, impact_timeframe_from: _metadata.hypercert?.impact_timeframe?.value?.[0], diff --git a/src/fetching/fetchSchemaData.ts b/src/fetching/fetchSchemaData.ts index f4b302c..6cbb1c1 100644 --- a/src/fetching/fetchSchemaData.ts +++ b/src/fetching/fetchSchemaData.ts @@ -3,9 +3,23 @@ import { client } from "@/clients/evmClient"; import schemaRegistryAbi from "@/abis/schemaRegistry.json"; import { Hex, isAddress } from "viem"; import { Tables } from "@/types/database.types"; +import { z } from "zod"; +import { messages } from "@/utils/validation"; -//github.com/ethereum-attestation-service/eas-contracts/blob/master/contracts/ISchemaRegistry.sol +/** + * Fetches schema data from a contract using the provided schema's EAS ID. + * + * @param schema - An optional object of type Tables<"supported_schemas">. If provided, it should contain a property `eas_schema_id` which is used to fetch the schema data from the contract. + * + * @returns If successful, it returns an object of type SchemaRecord containing the fetched schema data. If the schema is not provided, or if the schema does not contain an `eas_schema_id`, or if there is an error during the contract read operation, it returns undefined. + * + * @example + * ```typescript + * const schemaData = await fetchSchemaData({ schema: { eas_schema_id: '0x1234...5678' } }); + * ``` + */ +//github.com/ethereum-attestation-service/eas-contracts/blob/master/contracts/ISchemaRegistry.sol export interface SchemaRecord { uid: Hex; revocable: boolean; @@ -13,29 +27,26 @@ export interface SchemaRecord { schema: string; } -/* - * This function fetches the attestation data as stored at the provided UID on the contract. - * - * @param attestation - The EAS Attested event data. - * @returns - The event data with the attestation data attached - * - * @example - * ```js - * - * const easData: EASData = { - * recipient: "0x1234...5678", - * attester: "0x1234...5678", - * uid: "0x1234...5678", - * schema: "0x1234...5678", - * }; - * - * const attestation = await fetchAttestationData(easData); - * ``` - */ +export const createSchemaRecordSchema = (schema_uid: string) => + z.object({ + uid: z.string().refine((uid) => uid === schema_uid, { + message: `Schema data does not match schema UID ${schema_uid}`, + }), + revocable: z.boolean(), + resolver: z.string().refine(isAddress, { + message: messages.INVALID_ADDRESS, + }), + schema: z.string(), + }); + +export interface FetchSchemaDataArgs { + schema?: Pick, "eas_schema_id">; +} + export const fetchSchemaData = async ({ schema, }: { - schema?: Tables<"supported_schemas">; + schema?: Pick, "eas_schema_id">; }) => { if (!schema || !schema.eas_schema_id) { console.error(`Could not find EAS ID for schema`, schema); @@ -44,6 +55,7 @@ export const fetchSchemaData = async ({ const { schemaRegistryAddress } = getDeployment(); const { eas_schema_id } = schema; + const validationSchema = createSchemaRecordSchema(eas_schema_id); try { const _schemaData = await client.readContract({ @@ -53,25 +65,7 @@ export const fetchSchemaData = async ({ args: [eas_schema_id], }); - if (!_schemaData || !isSchemaRecord(_schemaData)) { - console.error("Invalid schema data", _schemaData); - return; - } - - if (_schemaData.uid != eas_schema_id) { - console.error( - `Schema data UID ${_schemaData.uid} does not match schema UID ${eas_schema_id}`, - ); - return; - } - - const _schema = schema; - - _schema.schema = _schemaData.schema; - _schema.resolver = _schemaData.resolver; - _schema.revocable = _schemaData.revocable; - - return _schema; + return validationSchema.parse(_schemaData); } catch (e) { console.error( `Error fetching data for schema ${eas_schema_id} on contract ${schemaRegistryAddress}:`, @@ -80,19 +74,3 @@ export const fetchSchemaData = async ({ return; } }; - -const isSchemaRecord = (data: unknown): data is SchemaRecord => { - return ( - typeof data === "object" && - data !== null && - "uid" in data && - typeof data.uid === "string" && - "revocable" in data && - typeof data.revocable === "boolean" && - "resolver" in data && - typeof data.resolver === "string" && - isAddress(data.resolver) && - "schema" in data && - typeof data.schema === "string" - ); -}; diff --git a/src/fetching/fetchTokenUnits.ts b/src/fetching/fetchTokenUnits.ts deleted file mode 100644 index f3b002c..0000000 --- a/src/fetching/fetchTokenUnits.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Hex, parseAbiItem } from "viem"; -import { client } from "@/clients/evmClient"; - -interface FetchTokenUnits { - contractAddress: Hex; - tokenId: bigint; - blockNumber: bigint; -} - -export const fetchTokenUnits = async ({ - contractAddress, - tokenId, - blockNumber, -}: FetchTokenUnits) => { - return await client.readContract({ - address: contractAddress, - abi: [ - parseAbiItem( - "function unitsOf(uint256 tokenID) view returns(uint256 units)", - ), - ], - functionName: "unitsOf", - args: [tokenId], - blockNumber, - }); -}; diff --git a/test/fetching/fetchSchemaData.test.ts b/test/fetching/fetchSchemaData.test.ts new file mode 100644 index 0000000..358893e --- /dev/null +++ b/test/fetching/fetchSchemaData.test.ts @@ -0,0 +1,71 @@ +import { afterAll, afterEach, describe, test } from "vitest"; +import { + fetchSchemaData, + FetchSchemaDataArgs, + SchemaRecord, +} from "@/fetching/fetchSchemaData"; +import { client } from "@/clients/evmClient"; +import { faker } from "@faker-js/faker"; +import sinon from "sinon"; +import { getAddress } from "viem"; + +describe("fetchSchemaData", () => { + afterEach(() => { + sinon.restore(); + }); + + afterAll(() => { + sinon.restore(); + }); + + test("returns undefined when schema is not provided", async ({ expect }) => { + const result = await fetchSchemaData({}); + expect(result).toBeUndefined(); + }); + + test("returns undefined when schema.eas_schema_id is not provided", async ({ + expect, + }) => { + const result = await fetchSchemaData({} as unknown as FetchSchemaDataArgs); + expect(result).toBeUndefined(); + }); + + test("returns schema data when schema and eas_schema_id are provided", async ({ + expect, + }) => { + const resolver = getAddress(faker.finance.ethereumAddress()); + + const schema = { eas_schema_id: "0x5678" }; + const readSpy = sinon.stub(client, "readContract"); + + const mockSchemaData: SchemaRecord = { + uid: "0x5678", + revocable: true, + resolver, + schema: "schema", + }; + + readSpy.resolves(mockSchemaData); + + const result = await fetchSchemaData({ + schema, + }); + + expect(result).toEqual(mockSchemaData); + }); + + test("returns undefined when an error occurs during contract read", async ({ + expect, + }) => { + const schema = { eas_schema_id: "0x5678" }; + const readSpy = sinon.stub(client, "readContract"); + + readSpy.throws(); + + const result = await fetchSchemaData({ + schema, + }); + + expect(result).toBeUndefined(); + }); +}); From a4940e0c561816ab24a8505493751875a1c9fe52 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Thu, 30 May 2024 13:45:44 +0200 Subject: [PATCH 04/12] feat(monitorEas): zod, test, docs --- .env.test | 13 +++ src/monitoring/eas.ts | 102 +++++++++++--------- src/parsing/attestationData.ts | 58 +++++------ src/utils/getDeployment.ts | 17 +++- test/monitoring/eas.test.ts | 139 +++++++++++++++++++++++++++ test/resources/mockAttestedEvents.ts | 45 +++++++++ test/utils/getDeployment.test.ts | 21 ++++ vitest.config.ts | 7 +- 8 files changed, 319 insertions(+), 83 deletions(-) create mode 100644 .env.test create mode 100644 test/monitoring/eas.test.ts create mode 100644 test/resources/mockAttestedEvents.ts create mode 100644 test/utils/getDeployment.test.ts diff --git a/.env.test b/.env.test new file mode 100644 index 0000000..ea4daab --- /dev/null +++ b/.env.test @@ -0,0 +1,13 @@ +# The port the server will run on +PORT=4001 + +# The chain ID of the +CHAIN_ID=11155111 +ALCHEMY_API_KEY=mock_alchemy_api_key +SUPABASE_DB_URL=http://localhost:54321 +SUPABASE_SERVICE_API_KEY=test +BATCH_SIZE=10000 +DELAY=5000 +SENTRY_DSN="test" +SENTRY_ENVIRONMENT=testing +SENTRY_AUTH_TOKEN="test" \ No newline at end of file diff --git a/src/monitoring/eas.ts b/src/monitoring/eas.ts index 034d141..239f918 100644 --- a/src/monitoring/eas.ts +++ b/src/monitoring/eas.ts @@ -3,67 +3,79 @@ import { isAddress, parseAbiItem } from "viem"; import { getDeployment } from "@/utils/getDeployment"; import { Tables } from "@/types/database.types"; -/* - This function fetches the logs of the Attested event from the EAS contract. - - @param schema - The EAS schema ID as stored in the database. - @param fromBlock - The block number from which to start fetching logs. - @param batchSize - The number of blocks to fetch logs from. - @returns The logs and the block numbers from which to start fetching logs. - - @example - ```js - const supportedSchemas = await getSupportedSchemas({ chainId: 1 }); - - const schema = supportedSchemas[0]; - - const logs = await getAttestationsForSchema({ schema, fromBlock: 1337n, batchSize: 100n }); - ``` +/** + * Fetches the logs of the Attested event from the EAS contract for a specific schema. + * + * @param schema - The EAS schema ID as stored in the database. + * @param fromBlock - The block number from which to start fetching logs. Defaults to 0. + * @param batchSize - The number of blocks to fetch logs from. + * + * @returns An object containing the fetched logs and the block numbers from which to start fetching logs. + * If the EAS address is not available or not a valid address, or if the schema or EAS schema ID is not found, it returns undefined. + * + * @example + * ```typescript + * const supportedSchemas = await getSupportedSchemas({ chainId: 1 }); + * const schema = supportedSchemas[0]; + * const logs = await getAttestationsForSchema({ schema, fromBlock: 1337n, batchSize: 100n }); + ``` */ + export const getAttestationsForSchema = async ({ schema, fromBlock = 0n, batchSize, }: { - schema: Tables<"supported_schemas">; + schema: Pick, "eas_schema_id">; fromBlock?: bigint; batchSize: bigint; }) => { const { startBlock, easAddress } = getDeployment(); - const latestBlock = await client.getBlockNumber(); - if (!easAddress || !isAddress(easAddress)) { - console.error("EAS is not available", easAddress); + if (!isAddress(easAddress)) { + console.error( + "[getAttestationsForSchema] EAS is not available", + easAddress, + ); return; } - if (!schema || !schema.eas_schema_id) { - console.error("Schema or EAS schema ID not found", schema); - return; - } + try { + const latestBlock = await client.getBlockNumber(); - const _fromBlock = - fromBlock && fromBlock > startBlock ? fromBlock : startBlock; - const _toBlock = - _fromBlock + batchSize > latestBlock ? latestBlock : _fromBlock + batchSize; + const _fromBlock = + fromBlock && fromBlock > startBlock ? fromBlock : startBlock; + const _toBlock = + _fromBlock + batchSize > latestBlock + ? latestBlock + : _fromBlock + batchSize; - console.info(`Fetching attestation logs from ${_fromBlock} to ${_toBlock}`); + console.info( + `[getAttestationsForSchema] Fetching attestation logs from ${_fromBlock} to ${_toBlock}`, + ); - const filter = await client.createEventFilter({ - address: easAddress, - fromBlock: _fromBlock, - toBlock: _toBlock, - event: parseAbiItem( - "event Attested(address indexed recipient, address indexed attester, bytes32 uid, bytes32 indexed schema)", - ), - args: { - schema: schema.eas_schema_id as `0x${string}`, - }, - }); + const filter = await client.createEventFilter({ + address: easAddress, + fromBlock: _fromBlock, + toBlock: _toBlock, + event: parseAbiItem( + "event Attested(address indexed recipient, address indexed attester, bytes32 uid, bytes32 indexed schema), + ), + args: { + schema: schema.eas_schema_id as `0x${string}, + , + }); - return { - logs: await client.getFilterLogs({ filter }), - fromBlock: _fromBlock, - toBlock: _toBlock, - }; + return { + logs: await client.getFilterLogs({ filter }), + fromBlock: _fromBlock, + toBlock: _toBlock, + }; + } catch (error) { + console.error( + "[getAttestationsForSchema] Error fetching attestation logs", + error, + ); + return; + } }; diff --git a/src/parsing/attestationData.ts b/src/parsing/attestationData.ts index b4b3b95..acc96aa 100644 --- a/src/parsing/attestationData.ts +++ b/src/parsing/attestationData.ts @@ -1,6 +1,6 @@ import { decodeAbiParameters, isAddress } from "viem"; import { Tables } from "@/types/database.types"; -import { Attestation, isAttestation } from "@/fetching/fetchAttestationData"; +import { Attestation, AttestationSchema } from "@/fetching/fetchAttestationData"; import { parseSchemaToABI } from "@/utils/parseSchemaToAbi"; import { ParsedAttestedEvent } from "@/parsing/attestedEvent"; @@ -16,9 +16,9 @@ export const decodeAttestationData = ({ schema, }: { attestation?: ParsedAttestedEvent & { attestation: Attestation }; - schema?: Partial>; + schema?: Pick, "schema" | "id">; }) => { - if (!schema || !schema?.schema) { + if (!schema?.schema) { console.error( "[DecodeAttestationData] Schema is missing data for parsing", schema, @@ -36,40 +36,34 @@ export const decodeAttestationData = ({ const attestationData = attestation.attestation; - if (!isAttestation(attestationData)) { - console.error( - "[DecodeAttestationData] Invalid attestation data", - attestationData, - ); - return; - } - - const abiFromSchema = parseSchemaToABI(schema.schema); + try { + AttestationSchema.parse(attestationData); - const decodedAttestation = decodeAbiParameters( - abiFromSchema[0].outputs, - attestationData.data, - ); + const abiFromSchema = parseSchemaToABI(schema.schema)[0]; - const keys = abiFromSchema[0].outputs.map((output) => output.name); - const values = decodedAttestation; - const decodedAttestationObject: Record = keys.reduce( - (acc: Record, key, index) => { - acc[key] = values[index]; - return acc; - }, - {}, - ); + const decodedAttestation = decodeAbiParameters( + abiFromSchema.outputs, + attestationData.data, + ); - if (!decodedAttestationObject) { - console.error( - "[DecodeAttestationData] Attestation data could not be parsed", - attestation, + const keys = abiFromSchema.outputs.map((output) => output.name); + const values = decodedAttestation; + const decodedAttestationObject: Record = keys.reduce( + (acc: Record, key, index) => { + acc[key] = values[index]; + return acc; + }, + {, ); - return; - } - try { + if (!decodedAttestationObject) { + console.error( + "[DecodeAttestationData] Attestation data could not be parsed", + attestation + ); + return; + } + const _attestation: Partial> = {}; _attestation.attester = attestationData.attester; diff --git a/src/utils/getDeployment.ts b/src/utils/getDeployment.ts index f375058..d854432 100644 --- a/src/utils/getDeployment.ts +++ b/src/utils/getDeployment.ts @@ -1,10 +1,19 @@ import { chainId } from "@/utils/constants"; import { Deployment, deployments } from "@hypercerts-org/sdk"; -/* - * This function returns the deployment for the current chain ID. + +/** + * Returns the deployment for the current chain ID. + * + * @returns An object containing the deployment details for the current chain ID. The object includes the start block number, the EAS address, the schema registry address, and the chain ID. + * @throws {Error} If the chain ID is not supported, it throws an error. * - * @returns The deployment for the current chain ID. - * @throws Error if the chain ID is not supported. + * @example + * ```typescript + * const deployment = getDeployment(); + * console.log(deployment.startBlock); // Outputs the start block number + * console.log(deployment.easAddress); // Outputs the EAS address + * console.log(deployment.schemaRegistryAddress); // Outputs the schema registry address + * console.log(deployment.chainId); // Outputs the chain ID */ export const getDeployment = (): Partial & { diff --git a/test/monitoring/eas.test.ts b/test/monitoring/eas.test.ts new file mode 100644 index 0000000..46f1a76 --- /dev/null +++ b/test/monitoring/eas.test.ts @@ -0,0 +1,139 @@ +import { getAttestationsForSchema } from "@/monitoring/eas"; +import { client } from "@/clients/evmClient"; +import { afterAll, afterEach, describe, expect, it, vi } from "vitest"; +import sinon from "sinon"; +import { mockFilter, mockLogs } from "../resources/mockAttestedEvents"; + +const mocks = vi.hoisted(() => { + return { + getDeployment: vi.fn(), + }; +}); + +vi.mock("../../src/utils/getDeployment", () => ({ + getDeployment: mocks.getDeployment, +})); + +describe("getAttestationsForSchema", () => { + const getBlockNumberSpy = sinon.stub(client, "getBlockNumber"); + const createEventFilterSpy = sinon.stub(client, "createEventFilter"); + const getFilterLogsSpy = sinon.stub(client, "getFilterLogs"); + + afterEach(() => { + vi.clearAllMocks(); + sinon.reset(); + }); + + afterAll(() => { + vi.clearAllMocks(); + sinon.restore(); + }); + + it("returns undefined when EAS address is not available", async () => { + mocks.getDeployment.mockReturnValue({ startBlock: 0n, easAddress: null }); + + const result = await getAttestationsForSchema({ + schema: { eas_schema_id: "0x123" }, + batchSize: 100n, + }); + expect(result).toBeUndefined(); + }); + + it("returns undefined when EAS address is not valid", async () => { + mocks.getDeployment.mockReturnValue({ + startBlock: 0n, + easAddress: "not an address", + }); + + const result = await getAttestationsForSchema({ + schema: { eas_schema_id: "0x123" }, + batchSize: 100n, + }); + expect(result).toBeUndefined(); + }); + + it("returns logs when all parameters are valid", async () => { + mocks.getDeployment.mockReturnValue({ + startBlock: 5957292n, + easAddress: "0xc2679fbd37d54388ce493f1db75320d236e1815e", + }); + + getBlockNumberSpy.resolves(5957292n); + createEventFilterSpy.resolves(mockFilter); + getFilterLogsSpy.resolves(mockLogs); + + const result = await getAttestationsForSchema({ + schema: { + eas_schema_id: + "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + }, + batchSize: 100n, + }); + + expect(result).toEqual({ + logs: mockLogs, + fromBlock: 5957292n, + toBlock: 5957292n, + }); + }); + + it("returns undefined when block number is not available", async () => { + mocks.getDeployment.mockReturnValue({ + startBlock: 5957292n, + easAddress: "0xc2679fbd37d54388ce493f1db75320d236e1815e", + }); + + getBlockNumberSpy.throws(); + + const result = await getAttestationsForSchema({ + schema: { + eas_schema_id: + "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + }, + batchSize: 100n, + }); + + expect(result).toBeUndefined(); + }); + + it("returns undefined when event filter cannot be created", async () => { + mocks.getDeployment.mockReturnValue({ + startBlock: 5957292n, + easAddress: "0xc2679fbd37d54388ce493f1db75320d236e1815e", + }); + + getBlockNumberSpy.resolves(5957292n); + createEventFilterSpy.throws(); + + const result = await getAttestationsForSchema({ + schema: { + eas_schema_id: + "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + }, + batchSize: 100n, + }); + + expect(result).toBeUndefined(); + }); + + it("returns undefined when logs cannot be fetched", async () => { + mocks.getDeployment.mockReturnValue({ + startBlock: 5957292n, + easAddress: "0xc2679fbd37d54388ce493f1db75320d236e1815e", + }); + + getBlockNumberSpy.resolves(5957292n); + createEventFilterSpy.resolves(mockFilter); + getFilterLogsSpy.throws(); + + const result = await getAttestationsForSchema({ + schema: { + eas_schema_id: + "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + }, + batchSize: 100n, + }); + + expect(result).toBeUndefined(); + }); +}); diff --git a/test/resources/mockAttestedEvents.ts b/test/resources/mockAttestedEvents.ts new file mode 100644 index 0000000..874a663 --- /dev/null +++ b/test/resources/mockAttestedEvents.ts @@ -0,0 +1,45 @@ +import { Address, Hash } from "viem"; + +export const mockFilter = { + abi: [{ name: "Attested", type: "event", inputs: [Array] }], + args: { + schema: + "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + }, + eventName: "Attested", + fromBlock: 5966591n, + id: "0xfef8bca29ad5ba2e7064459dc25c35e2" as Hash, + request: [], + strict: false, + toBlock: 5976591n, + type: "event", +}; + +export const mockLogs = [ + { + eventName: "Attested", + args: { + recipient: "0x0000000000000000000000000000000000000000", + attester: "0x774e0Fc0DED22cA78D8f55d1307a2FD38a420CBe", + schema: + "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + uid: "0x040cce94967277fb797e9c96448b2b6741dd6898858e31a0d5d705be0aa93131", + }, + address: "0xc2679fbd37d54388ce493f1db75320d236e1815e" as Address, + blockHash: + "0xc9ee0d61cbcc3edafcadf4687efaa112005c6e742aa5e4a785cff60cfc6509cb" as Hash, + blockNumber: 5957292n, + data: "0x040cce94967277fb797e9c96448b2b6741dd6898858e31a0d5d705be0aa93131", + logIndex: 108, + removed: false, + topics: [ + "0x8bf46bf4cfd674fa735a3d63ec1c9ad4153f033c290341f3a588b75685141b35", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x000000000000000000000000774e0fc0ded22ca78d8f55d1307a2fd38a420cbe", + "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + ], + transactionHash: + "0xd017f5696c5a1a2aa960b792d6afacd3e9b653ae58488f86067d2dbd92b7bd9a", + transactionIndex: 38, + }, +]; diff --git a/test/utils/getDeployment.test.ts b/test/utils/getDeployment.test.ts new file mode 100644 index 0000000..b519649 --- /dev/null +++ b/test/utils/getDeployment.test.ts @@ -0,0 +1,21 @@ +import { afterAll, describe, expect, test, vi } from "vitest"; +import { getDeployment } from "../../src/utils"; +import { chainId } from "../../src/utils/constants"; + +describe("getDeployment", () => { + afterAll(() => { + vi.restoreAllMocks(); + }); + + test("returns deployment for supported chain ID", () => { + const deployment = getDeployment(); + + expect(deployment.chainId).toEqual(chainId); + }); + + test("throws error for unsupported chain ID", () => { + vi.mock("../../src/utils/constants", () => ({ chainId: 1337 })); + + expect(() => getDeployment()).toThrowError("Unsupported chain ID"); + }); +}); diff --git a/vitest.config.ts b/vitest.config.ts index 588b017..25ccc07 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -1,11 +1,14 @@ -import { resolve } from "node:path"; import { configDefaults, defineConfig } from "vitest/config"; +import { config } from "dotenv"; +import { resolve } from "node:path"; import tsconfigPaths from "vite-tsconfig-paths"; +config({ path: resolve(__dirname, ".env.test") }); + export default defineConfig({ plugins: [tsconfigPaths()], test: { - setupFiles: ["dotenv/config", "./test/setup-env.ts"], + setupFiles: ["./test/setup-env.ts"], // https://github.com/davelosert/vitest-coverage-report-action coverage: { // you can include other reporters, but 'json-summary' is required, json is recommended From 7a2700c9a82e9189145d49a7efa4228a04319ea8 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Thu, 30 May 2024 17:47:58 +0200 Subject: [PATCH 05/12] feat(allowlistCreated): zod, test, docs --- src/fetching/fetchAttestationData.ts | 25 ++---- src/monitoring/eas.ts | 6 +- src/parsing/allowListCreatedEvent.ts | 95 +++++++++++----------- test/parsing/allowListCreatedEvent.test.ts | 17 +--- test/utils/getDeployment.test.ts | 7 +- 5 files changed, 66 insertions(+), 84 deletions(-) diff --git a/src/fetching/fetchAttestationData.ts b/src/fetching/fetchAttestationData.ts index dc0e452..6b50966 100644 --- a/src/fetching/fetchAttestationData.ts +++ b/src/fetching/fetchAttestationData.ts @@ -1,7 +1,7 @@ import { getDeployment } from "@/utils"; import { client } from "@/clients/evmClient"; import easAbi from "@/abis/eas.json"; -import { Address, Hex, isAddress } from "viem"; +import { Address, Hex, isAddress, isHex } from "viem"; import { ParsedAttestedEvent } from "@/parsing/attestedEvent"; import * as z from "zod"; import { messages } from "@/utils/validation"; @@ -37,24 +37,11 @@ import { messages } from "@/utils/validation"; **/ //https://github.com/ethereum-attestation-service/eas-sdk/blob/master/src/eas.ts#L87 -export interface Attestation { - uid: Hex; - schema: Hex; - refUID: Hex; - time: bigint; - expirationTime: bigint; - revocationTime: bigint; - recipient: Address; - revocable: boolean; - attester: Address; - data: Hex; -} - // Zod validation of Attestation export const AttestationSchema = z.object({ - uid: z.string(), - schema: z.string(), - refUID: z.string(), + uid: z.string().refine(isHex), + schema: z.string().refine(isHex), + refUID: z.string().refine(isHex), time: z.bigint(), expirationTime: z.bigint(), revocationTime: z.bigint(), @@ -63,9 +50,11 @@ export const AttestationSchema = z.object({ .refine(isAddress, { message: messages.INVALID_ADDRESS }), revocable: z.boolean(), attester: z.string().refine(isAddress, { message: messages.INVALID_ADDRESS }), - data: z.string(), + data: z.string().refine(isHex), }); +export type Attestation = z.infer; + interface FetchAttestationData { attestedEvent?: ParsedAttestedEvent; } diff --git a/src/monitoring/eas.ts b/src/monitoring/eas.ts index 239f918..fb8d90b 100644 --- a/src/monitoring/eas.ts +++ b/src/monitoring/eas.ts @@ -59,11 +59,11 @@ export const getAttestationsForSchema = async ({ fromBlock: _fromBlock, toBlock: _toBlock, event: parseAbiItem( - "event Attested(address indexed recipient, address indexed attester, bytes32 uid, bytes32 indexed schema), + "event Attested(address indexed recipient, address indexed attester, bytes32 uid, bytes32 indexed schema)", ), args: { - schema: schema.eas_schema_id as `0x${string}, - , + schema: schema.eas_schema_id as `0x${string}`, + }, }); return { diff --git a/src/parsing/allowListCreatedEvent.ts b/src/parsing/allowListCreatedEvent.ts index a3cc71c..8c7abbd 100644 --- a/src/parsing/allowListCreatedEvent.ts +++ b/src/parsing/allowListCreatedEvent.ts @@ -1,57 +1,58 @@ import { isAddress } from "viem"; import { NewAllowList } from "@/types/types"; +import { z } from "zod"; +import { messages } from "@/utils/validation"; -type AllowListCreatedEvent = { - address: string; - args: { - tokenID: bigint; - root: string; - }; - blockNumber: bigint; - [key: string]: unknown; -}; - -/* - * Helper method to get the sender, recipient, tokenID and value from the event. Will return undefined when the event is - * missing any of the required fields. +/** + * Parses an event object to extract the tokenID and root. + * + * This function attempts to parse the event object using the AllowListCreatedEventSchema. + * If the event object is valid, it extracts the tokenID and root from the event's args property, + * and returns them in a new object. If the event object is not valid, it logs an error and returns undefined. + * + * @param event - The event object to parse. Its structure should match the AllowListCreatedEventSchema. * - * @param event - The event object. + * @returns An object containing the tokenID and root from the event's args property, or undefined if the event object is not valid. + * + * @example + * ```typescript + * const event = { + * address: "0x1234", + * args: { + * tokenID: 5678n, + * root: "0x5678", + * }, + * blockNumber: 1234n, + * }; + * const parsedEvent = parseAllowListCreated(event); + * console.log(parsedEvent); // { token_id: 5678n, root: "0x5678" } * */ -export const parseAllowListCreated = async (event: unknown) => { - if (!isAllowListCreatedEvent(event)) { - console.error( - `[ParseAllow:istCreated] Invalid event or event args for parsing AllowlistCreated event: `, - event, - ); - return; - } - const { args } = event; +const AllowListCreatedEventSchema = z.object({ + address: z.string().refine(isAddress, { message: messages.INVALID_ADDRESS }), + args: z.object({ + tokenID: z.bigint(), + root: z.string(), + }), + blockNumber: z.bigint(), +}); - const row: Partial = { - token_id: args.tokenID, - root: args.root, - }; +export type AllowListCreatedEvent = z.infer; - return row; -}; +export const parseAllowListCreated = async (event: unknown) => { + try { + AllowListCreatedEventSchema.parse(event); + + const { args } = event as AllowListCreatedEvent; -function isAllowListCreatedEvent( - event: unknown, -): event is AllowListCreatedEvent { - const e = event as Partial; + const row: Partial = { + token_id: args.tokenID, + root: args.root, + }; - return ( - typeof e === "object" && - e !== null && - e?.args !== null && - typeof e?.args === "object" && - e?.args.tokenID !== null && - typeof e?.args?.tokenID === "bigint" && - e?.args.root !== null && - typeof e?.args?.root === "string" && - typeof e.address === "string" && - isAddress(e.address) && - typeof e.blockNumber === "bigint" - ); -} + return row; + } catch (e) { + console.error("[parseAllowListCreated] Error parsing event", e); + return; + } +}; diff --git a/test/parsing/allowListCreatedEvent.test.ts b/test/parsing/allowListCreatedEvent.test.ts index 7f7a09d..9729477 100644 --- a/test/parsing/allowListCreatedEvent.test.ts +++ b/test/parsing/allowListCreatedEvent.test.ts @@ -1,9 +1,6 @@ -import { beforeEach, describe, expect, it } from "vitest"; -import { server } from "../setup-env"; -import { http, HttpResponse } from "msw"; +import { describe, expect, it } from "vitest"; import { parseAllowListCreated } from "../../src/parsing/allowListCreatedEvent"; import { faker } from "@faker-js/faker"; -import { alchemyUrl } from "../resources/alchemyUrl"; describe("allowlistCreatedEvent", () => { const tokenID = faker.number.bigInt(); @@ -22,14 +19,6 @@ describe("allowlistCreatedEvent", () => { args, }; - beforeEach(() => { - server.use( - http.post(`${alchemyUrl}/*`, () => { - return HttpResponse.json(0); - }), - ); - }); - it("parses allowlistCreated event", async () => { const parsed = await parseAllowListCreated(event); @@ -42,7 +31,7 @@ describe("allowlistCreatedEvent", () => { expect(parsed.root).toEqual(root); }); - it("fails if event is invalid", async () => { + it("fails silently if event is invalid", async () => { const parsed1 = await parseAllowListCreated({ ...event, blockNumber: "not a bigint", @@ -56,7 +45,7 @@ describe("allowlistCreatedEvent", () => { expect(parsed2).toBeUndefined(); }); - it("fails if event args are invalid", async () => { + it("fails silently if event args are invalid", async () => { const parsed = await parseAllowListCreated({ ...event, args: { diff --git a/test/utils/getDeployment.test.ts b/test/utils/getDeployment.test.ts index b519649..b41b798 100644 --- a/test/utils/getDeployment.test.ts +++ b/test/utils/getDeployment.test.ts @@ -1,6 +1,6 @@ import { afterAll, describe, expect, test, vi } from "vitest"; import { getDeployment } from "../../src/utils"; -import { chainId } from "../../src/utils/constants"; +import * as constants from "../../src/utils/constants"; describe("getDeployment", () => { afterAll(() => { @@ -8,13 +8,16 @@ describe("getDeployment", () => { }); test("returns deployment for supported chain ID", () => { + const chainId = 11155111; + vi.spyOn(constants, "chainId", "get").mockReturnValue(chainId); const deployment = getDeployment(); expect(deployment.chainId).toEqual(chainId); }); test("throws error for unsupported chain ID", () => { - vi.mock("../../src/utils/constants", () => ({ chainId: 1337 })); + const chainId = 999_999_999; + vi.spyOn(constants, "chainId", "get").mockReturnValue(chainId); expect(() => getDeployment()).toThrowError("Unsupported chain ID"); }); From d067f910a71cddd97b9e40a639f420133d481b2c Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Fri, 31 May 2024 04:36:04 +0200 Subject: [PATCH 06/12] feat(test): index attests event, data, tweaks, new migration --- package.json | 1 + src/fetching/fetchAttestationData.ts | 86 +++------ src/fetching/fetchSchemaData.ts | 54 +++--- src/indexer/indexAllowlistData.ts | 1 - src/indexer/indexAllowlistRecords.ts | 2 +- src/indexer/indexAttestations.ts | 62 +++---- src/indexer/indexSupportedSchemas.ts | 59 ++++--- src/monitoring/eas.ts | 5 +- src/parsing/attestationData.ts | 165 +++++++++--------- src/parsing/attestedEvent.ts | 118 +++++++------ src/storage/getSupportedSchemas.ts | 39 +++-- src/storage/storeAttestations.ts | 36 ++-- src/storage/storeSupportedSchemas.ts | 68 +++++--- src/types/database-generated.types.ts | 6 +- src/types/database.types.ts | 10 +- src/utils/mapUnknownToBigInt.ts | 6 + ...0240531004150_update_eas_schema_column.sql | 13 ++ supabase/seed.sql | 2 +- test/fetching/fetchAttestationData.test.ts | 89 ++++------ test/fetching/fetchSchemaData.test.ts | 30 ++-- test/monitoring/eas.test.ts | 18 +- test/parsing/attestationData.test.ts | 80 +++++++++ test/parsing/attestedEvent.test.ts | 62 +++++++ test/parsing/transferSingleEvent.test.ts | 29 +-- ...kAttestedEvents.ts => mockAttestations.ts} | 20 ++- test/setup-env.ts | 12 ++ test/storage/getSupportedSchemas.test.ts | 45 +++++ .../storeTransferSingleFraction.test.ts | 44 +---- vitest.config.ts | 4 +- 29 files changed, 672 insertions(+), 494 deletions(-) create mode 100644 src/utils/mapUnknownToBigInt.ts create mode 100644 supabase/migrations/20240531004150_update_eas_schema_column.sql create mode 100644 test/parsing/attestationData.test.ts create mode 100644 test/parsing/attestedEvent.test.ts rename test/resources/{mockAttestedEvents.ts => mockAttestations.ts} (66%) create mode 100644 test/storage/getSupportedSchemas.test.ts diff --git a/package.json b/package.json index 0a7bc0c..b17d990 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,7 @@ "start": "node -r dotenv/config dist/server.js", "lint": "next lint", "test": "vitest", + "test:coverage": "vitest --coverage", "reset:supabase:local": "npx supabase db reset", "update:sentry:sourcemaps": "npx @sentry/wizard@latest -i sourcemaps", "update:supabase:types": "npx supabase gen types typescript --local > src/types/database-generated.types.ts" diff --git a/src/fetching/fetchAttestationData.ts b/src/fetching/fetchAttestationData.ts index 6b50966..8be5d96 100644 --- a/src/fetching/fetchAttestationData.ts +++ b/src/fetching/fetchAttestationData.ts @@ -1,44 +1,14 @@ import { getDeployment } from "@/utils"; import { client } from "@/clients/evmClient"; import easAbi from "@/abis/eas.json"; -import { Address, Hex, isAddress, isHex } from "viem"; +import { isAddress, isHex } from "viem"; import { ParsedAttestedEvent } from "@/parsing/attestedEvent"; import * as z from "zod"; import { messages } from "@/utils/validation"; -/** - * Asynchronously fetches attestation data from a contract. - * - * This function fetches the attestation data as stored at the provided UID on the contract. - * It first checks if the attestedEvent and its UID are defined. If not, it logs an error and returns. - * Then, it tries to read the contract using the client, with the provided address, abi, function name, and arguments. - * If the contract read is successful, it parses the attestation data using the AttestationSchema. - * If the parsing is successful, it returns the attestedEvent with the attestation data attached. - * If an error occurs during the contract read, it logs the error and returns. - * - * @param {Object} attestedEvent - The EAS Attested event data. - * @returns {Promise} - The event data with the attestation data attached, or undefined if an error occurs. - * - * @example - * ```js - * const attestedEvent = { - * recipient: "0x1234...5678", - * attester: "0x1234...5678", - * uid: "0x1234...5678", - * schema: "0x1234...5678", - * refUID: "0x1234...5678", - * time: BigInt(1633027200), - * expirationTime: BigInt(1733027200), - * revocationTime: BigInt(0), - * revocable: true, - * data: "0x1234...5678", - * }; - * const attestation: Attestation | undefined = await fetchAttestationData({ attestedEvent }); - **/ - //https://github.com/ethereum-attestation-service/eas-sdk/blob/master/src/eas.ts#L87 // Zod validation of Attestation -export const AttestationSchema = z.object({ +export const EasAttestationSchema = z.object({ uid: z.string().refine(isHex), schema: z.string().refine(isHex), refUID: z.string().refine(isHex), @@ -53,41 +23,37 @@ export const AttestationSchema = z.object({ data: z.string().refine(isHex), }); -export type Attestation = z.infer; +export type EasAttestation = z.infer; -interface FetchAttestationData { - attestedEvent?: ParsedAttestedEvent; +export interface FetchAttestationData { + attestedEvent: ParsedAttestedEvent; } +/** + * fetchAttestationData is an async function that fetches attestation data for a given UID from a contract. + * It uses the client to read the contract and parses the returned data using the EasAttestationSchema. + * @param {FetchAttestationData} attestedEvent - The event to fetch attestation data for. + * @returns {Promise<{event: ParsedAttestedEvent, attestation: EasAttestation}>} - The attested event with the fetched attestation data. + */ export const fetchAttestationData = async ({ attestedEvent, }: FetchAttestationData) => { const { easAddress } = getDeployment(); - if (!attestedEvent || !attestedEvent.uid) { - console.error( - `[FetchAttestationData] Could not find UID for attestation`, - attestedEvent, - ); - return; - } const { uid } = attestedEvent; - try { - const _attestationData = await client.readContract({ - address: easAddress as `0x${string}`, - abi: easAbi, - functionName: "getAttestation", - args: [uid], - }); - - AttestationSchema.parse(_attestationData); - - return { ...attestedEvent, attestation: _attestationData as Attestation }; - } catch (e) { - console.error( - `[FetchAttestationData] Error fetching attestation data for UID ${uid} on contract ${easAddress}:`, - e, - ); - return; - } + console.debug( + `[fetchAttestationData] Fetching attestation data for UID: ${uid} from contract: ${easAddress}`, + ); + + const _attestationData = await client.readContract({ + address: easAddress as `0x${string}`, + abi: easAbi, + functionName: "getAttestation", + args: [uid], + }); + + return { + event: attestedEvent, + attestation: EasAttestationSchema.parse(_attestationData), + }; }; diff --git a/src/fetching/fetchSchemaData.ts b/src/fetching/fetchSchemaData.ts index 6cbb1c1..59ccc69 100644 --- a/src/fetching/fetchSchemaData.ts +++ b/src/fetching/fetchSchemaData.ts @@ -6,19 +6,6 @@ import { Tables } from "@/types/database.types"; import { z } from "zod"; import { messages } from "@/utils/validation"; -/** - * Fetches schema data from a contract using the provided schema's EAS ID. - * - * @param schema - An optional object of type Tables<"supported_schemas">. If provided, it should contain a property `eas_schema_id` which is used to fetch the schema data from the contract. - * - * @returns If successful, it returns an object of type SchemaRecord containing the fetched schema data. If the schema is not provided, or if the schema does not contain an `eas_schema_id`, or if there is an error during the contract read operation, it returns undefined. - * - * @example - * ```typescript - * const schemaData = await fetchSchemaData({ schema: { eas_schema_id: '0x1234...5678' } }); - * ``` - */ - //github.com/ethereum-attestation-service/eas-contracts/blob/master/contracts/ISchemaRegistry.sol export interface SchemaRecord { uid: Hex; @@ -40,37 +27,48 @@ export const createSchemaRecordSchema = (schema_uid: string) => }); export interface FetchSchemaDataArgs { - schema?: Pick, "eas_schema_id">; + schema: Pick, "uid">; } +/** + * Fetches schema data from a contract using the provided schema's UID. + * + * This function takes a schema object as input, which should contain a property `uid`. + * It uses this UID to fetch the schema data from the contract. + * If the schema is not provided, or if the schema does not contain a `uid`, the function will throw an error. + * + * @param {Object} params - The parameters for the function. + * @param {Object} params.schema - The schema object. It should contain a property `uid`. + * @param {string} params.schema.uid - The UID of the schema. + * + * @returns {Promise} A promise that resolves to an object of type SchemaRecord containing the fetched schema data. If there is an error during the contract read operation, the promise is rejected with the error. + * + * @example + * ```typescript + * const schemaData = await fetchSchemaData({ schema: { uid: '0x1234...5678' } }); + * console.log(schemaData); + * ``` + */ export const fetchSchemaData = async ({ - schema, -}: { - schema?: Pick, "eas_schema_id">; -}) => { - if (!schema || !schema.eas_schema_id) { - console.error(`Could not find EAS ID for schema`, schema); - return; - } - + schema: { uid }, +}: FetchSchemaDataArgs) => { const { schemaRegistryAddress } = getDeployment(); - const { eas_schema_id } = schema; - const validationSchema = createSchemaRecordSchema(eas_schema_id); + const validationSchema = createSchemaRecordSchema(uid); try { const _schemaData = await client.readContract({ address: schemaRegistryAddress as `0x${string}`, abi: schemaRegistryAbi, functionName: "getSchema", - args: [eas_schema_id], + args: [uid], }); return validationSchema.parse(_schemaData); } catch (e) { console.error( - `Error fetching data for schema ${eas_schema_id} on contract ${schemaRegistryAddress}:`, + `[fetchSchemaData] Error fetching data for schema ${uid} on contract ${schemaRegistryAddress}:`, e, ); - return; + throw e; } }; diff --git a/src/indexer/indexAllowlistData.ts b/src/indexer/indexAllowlistData.ts index debc16c..abe0349 100644 --- a/src/indexer/indexAllowlistData.ts +++ b/src/indexer/indexAllowlistData.ts @@ -1,5 +1,4 @@ import { IndexerConfig } from "@/types/types"; -import { getIncompleteAllowLists } from "@/storage/getIncompleteAllowLists"; import { fetchAllowListFromUri } from "@/fetching/fetchAllowlistFromUri"; import { storeAllowListData } from "@/storage/storeAllowListData"; import { Tables } from "@/types/database.types"; diff --git a/src/indexer/indexAllowlistRecords.ts b/src/indexer/indexAllowlistRecords.ts index c52d690..ce6a2c8 100644 --- a/src/indexer/indexAllowlistRecords.ts +++ b/src/indexer/indexAllowlistRecords.ts @@ -59,7 +59,7 @@ const processAllowListEntriesBatch = async ( const tree = StandardMerkleTree.load(allowList.data); if (!tree) { - console.error( + console.debug( "[IndexAllowlistRecords] Failed to load tree from data", allowList, ); diff --git a/src/indexer/indexAttestations.ts b/src/indexer/indexAttestations.ts index 73f5fca..93855bd 100644 --- a/src/indexer/indexAttestations.ts +++ b/src/indexer/indexAttestations.ts @@ -1,22 +1,22 @@ import { getAttestationsForSchema } from "@/monitoring"; -import { getDeployment } from "@/utils"; import { decodeAttestationData, parseAttestedEvent } from "@/parsing"; -import { Tables } from "@/types/database.types"; -import { ParsedAttestedEvent } from "@/parsing/attestedEvent"; import { IndexerConfig } from "@/types/types"; import { getSupportedSchemas } from "@/storage/getSupportedSchemas"; import { storeSupportedSchemas } from "@/storage/storeSupportedSchemas"; import { storeAttestations } from "@/storage/storeAttestations"; import { fetchAttestationData } from "@/fetching/fetchAttestationData"; -/* +/** * Indexes attestation logs for all supported schemas. Attestation logs are fetched from the chain and parsed into attestation data. * The attestation data is then stored in the database. * - * @param [batchSize] - The number of logs to fetch and parse in each batch. + * @param {IndexerConfig} config - Configuration object for the indexer. It has a batchSize property which determines the number of logs to fetch and parse in each batch. + * @param {bigint} config.batchSize - The number of logs to fetch and parse in each batch. Defaults to 10000n. + * + * @returns {Promise} - Returns a promise that resolves when the indexing operation is complete. If an error occurs during the operation, the promise is rejected with the error. * * @example - * ```js + * ```typescript * await indexAttestations({ batchSize: 1000n }); * ``` */ @@ -26,24 +26,19 @@ const defaultConfig = { batchSize: 10000n }; export const indexAttestations = async ({ batchSize = defaultConfig.batchSize, }: IndexerConfig = defaultConfig) => { - const { chainId } = getDeployment(); - const supportedSchemas = await getSupportedSchemas({ chainId }); + const supportedSchemas = await getSupportedSchemas(); if (!supportedSchemas || supportedSchemas.length === 0) { console.debug("[IndexAttestations] No supported schemas found"); return; } - // Get schema structure for all supported schemas - const schemasToIndex = supportedSchemas.filter((schema) => schema?.schema); - await Promise.all( - schemasToIndex.map(async (schema) => { + supportedSchemas.map(async (supportedSchema) => { + const { id, uid, last_block_indexed } = supportedSchema; const attestedEvents = await getAttestationsForSchema({ - schema, - fromBlock: schema?.last_block_indexed - ? BigInt(schema?.last_block_indexed) - : undefined, + schema: { uid }, + fromBlock: last_block_indexed ? BigInt(last_block_indexed) : undefined, batchSize, }); @@ -55,38 +50,33 @@ export const indexAttestations = async ({ if (!logs || logs.length === 0) { console.debug( - "[IndexAttestations] No logs found for supported schemas", - schemasToIndex.map((schema) => schema?.id), + "[IndexAttestations] No logs found for supported schema", + { supported_schema_id: id, uid }, ); return await storeSupportedSchemas({ supportedSchemas: [ { - ...schema, + ...supportedSchema, last_block_indexed: toBlock, }, ], }); } - const parsedEvents = ( - await Promise.all(logs.map(parseAttestedEvent)) - ).filter( - (attestation): attestation is ParsedAttestedEvent => - attestation !== null, - ); + const parsedEvents = await Promise.all(logs.map(parseAttestedEvent)); - const attestations = ( - await Promise.all( - parsedEvents.map(async (event) => - fetchAttestationData({ attestedEvent: event }).then((attestation) => - decodeAttestationData({ attestation, schema }), - ), + const attestations = await Promise.all( + parsedEvents.map(async (event) => + fetchAttestationData({ attestedEvent: event }).then( + ({ attestation, event }) => + decodeAttestationData({ + attestation, + event, + schema: supportedSchema, + }), ), - ) - ).filter( - (attestation): attestation is Tables<"attestations"> => - attestation !== null && attestation !== undefined, + ), ); return await storeAttestations({ @@ -96,7 +86,7 @@ export const indexAttestations = async ({ await storeSupportedSchemas({ supportedSchemas: [ { - ...schema, + ...supportedSchema, last_block_indexed: attestedEvents.toBlock, }, ], diff --git a/src/indexer/indexSupportedSchemas.ts b/src/indexer/indexSupportedSchemas.ts index dc05f46..ce87ec7 100644 --- a/src/indexer/indexSupportedSchemas.ts +++ b/src/indexer/indexSupportedSchemas.ts @@ -1,20 +1,23 @@ -import { getDeployment } from "@/utils"; import { getSupportedSchemas } from "@/storage/getSupportedSchemas"; import { IndexerConfig } from "@/types/types"; import { fetchSchemaData } from "@/fetching/fetchSchemaData"; -import { Tables } from "@/types/database.types"; import { storeSupportedSchemas } from "@/storage/storeSupportedSchemas"; -/* - * This function indexes the logs of the ClaimStored event emitted by the HypercertMinter contract. Based on the last - * block indexed, it fetches the logs in batches, parses them, fetches the metadata, and stores the hypercerts in the - * database. +/** + * Indexes supported schemas and stores them in the database. * - * @param [batchSize] - The number of logs to fetch and parse in each batch. + * This function fetches the supported schemas and filters out any that are incomplete. + * An incomplete schema is one that does not have a `schema`, `resolver`, or `revocable` property. + * The function then processes the incomplete schemas in batches, fetching additional data for each schema and storing the updated schemas in the database. + * + * @param {Object} config - The configuration for the function. + * @param {bigint} config.batchSize - The number of schemas to process in each batch. Defaults to `defaultConfig.batchSize`. + * + * @returns {Promise} A promise that resolves when all supported schemas have been processed and stored. If there is an error during the process, the promise is rejected with the error. * * @example - * ```js - * await indexClaimsStoredEvents({ batchSize: 1000n }); + * ```typescript + * await indexSupportedSchemas({ batchSize: 5n }); * ``` */ @@ -25,8 +28,7 @@ const defaultConfig = { export const indexSupportedSchemas = async ({ batchSize = defaultConfig.batchSize, }: IndexerConfig = defaultConfig) => { - const { chainId } = getDeployment(); - const supportedSchemas = await getSupportedSchemas({ chainId }); + const supportedSchemas = await getSupportedSchemas(); if (!supportedSchemas || supportedSchemas.length === 0) { console.debug("[IndexSupportedSchema] No supported schemas found"); @@ -34,22 +36,35 @@ export const indexSupportedSchemas = async ({ } const incompleteSchemas = supportedSchemas.filter( - (schema) => !schema.schema || !schema.resolver || !schema.revocable, + (schema) => + !schema.schema || + !schema.resolver || + schema.revocable === null || + schema.revocable === undefined, ); const _size = Number(batchSize); for (let i = 0; i < incompleteSchemas.length; i += _size) { const batch = incompleteSchemas.slice(i, i + _size); - const schemaData = ( - await Promise.all(batch.map((schema) => fetchSchemaData({ schema }))) - ).filter( - (schema): schema is Tables<"supported_schemas"> => - schema !== null && schema !== undefined, - ); - - await storeSupportedSchemas({ - supportedSchemas: schemaData, - }); + + try { + const schemaData = await Promise.all( + batch.map(async (schema) => ({ + ...schema, + ...(await fetchSchemaData({ schema })), + })), + ); + + await storeSupportedSchemas({ + supportedSchemas: schemaData, + }); + } catch (error) { + console.error("[IndexSupportedSchema] Error processing batch: ", { + error, + batch, + }); + throw error; + } } }; diff --git a/src/monitoring/eas.ts b/src/monitoring/eas.ts index fb8d90b..755dc69 100644 --- a/src/monitoring/eas.ts +++ b/src/monitoring/eas.ts @@ -26,7 +26,7 @@ export const getAttestationsForSchema = async ({ fromBlock = 0n, batchSize, }: { - schema: Pick, "eas_schema_id">; + schema: Pick, "uid">; fromBlock?: bigint; batchSize: bigint; }) => { @@ -54,6 +54,7 @@ export const getAttestationsForSchema = async ({ `[getAttestationsForSchema] Fetching attestation logs from ${_fromBlock} to ${_toBlock}`, ); + // TODO could be it's own schema const filter = await client.createEventFilter({ address: easAddress, fromBlock: _fromBlock, @@ -62,7 +63,7 @@ export const getAttestationsForSchema = async ({ "event Attested(address indexed recipient, address indexed attester, bytes32 uid, bytes32 indexed schema)", ), args: { - schema: schema.eas_schema_id as `0x${string}`, + schema: schema.uid as `0x${string}`, }, }); diff --git a/src/parsing/attestationData.ts b/src/parsing/attestationData.ts index acc96aa..4b93bcc 100644 --- a/src/parsing/attestationData.ts +++ b/src/parsing/attestationData.ts @@ -1,112 +1,113 @@ import { decodeAbiParameters, isAddress } from "viem"; import { Tables } from "@/types/database.types"; -import { Attestation, AttestationSchema } from "@/fetching/fetchAttestationData"; +import { EasAttestation } from "@/fetching/fetchAttestationData"; import { parseSchemaToABI } from "@/utils/parseSchemaToAbi"; import { ParsedAttestedEvent } from "@/parsing/attestedEvent"; +import { z } from "zod"; -/* - * Helper method to get the attestation content from the encoded data +const HypercertAttestationSchema = z.object( + { + chain_id: z.coerce.bigint(), + contract_address: z.string().refine(isAddress, { + message: `[decodeAttestationData] Invalid contract address in attestation data`, + }), + token_id: z.coerce.bigint(), + }, + { message: `[decodeAttestationData] Invalid hypercert attestation data` }, +); + +/** + * Decodes attestation data from a given attestation and schema. + * + * This function takes an attestation and a schema as input, and attempts to decode the attestation data. + * If the schema or attestation is missing, it logs an error and returns. + * If the attestation data can be successfully decoded, it creates a new attestation object with the decoded data. + * If an error occurs during the decoding process, it logs the error and returns. * - * @param attestation - The attestation object. + * @param {Object} params - The parameters for the function. + * @param {ParsedAttestedEvent} params.event - The event data associated with the attestation. + * @param {EasAttestation} params.attestation - The attestation data to decode. + * @param {Object} params.schema - The schema to use for decoding. It should contain a `schema` and an `id`. + * @param {string} params.schema.schema - The schema string. + * @param {number} params.schema.id - The id of the schema. * - * @returns The decoded attestation object. + * @returns {Object | undefined} A new attestation object with the decoded data, or undefined if the attestation data could not be decoded. + * + * @example + * ```typescript + * const attestation = { + * attestation: { + * attester: "0x1234", + * recipient: "0x5678", + * data: "0x9abc", + * }, + * block_timestamp: 1234567890n, + * uid: "abcdef", + * }; + * const schema = { + * schema: "0xdef0", + * id: 1, + * }; + * const decodedAttestation = decodeAttestationData({ event, attestation, schema }); + * console.log(decodedAttestation); * */ export const decodeAttestationData = ({ + event, attestation, schema, }: { - attestation?: ParsedAttestedEvent & { attestation: Attestation }; - schema?: Pick, "schema" | "id">; + event: ParsedAttestedEvent; + attestation: EasAttestation; + schema: Pick, "schema" | "id">; }) => { - if (!schema?.schema) { - console.error( - "[DecodeAttestationData] Schema is missing data for parsing", - schema, - ); - return; - } - - if (!attestation || !attestation?.attestation) { - console.error( - "[DecodeAttestationData] Attestation is missing data for parsing", - attestation, - ); + if (!schema.schema) { + console.error("[DecodeAttestationData] Schema is missing", schema); return; } - const attestationData = attestation.attestation; + const { attester, recipient, data, uid } = attestation; + let _attestation: { [key: string]: unknown } = {}; try { - AttestationSchema.parse(attestationData); - const abiFromSchema = parseSchemaToABI(schema.schema)[0]; - const decodedAttestation = decodeAbiParameters( - abiFromSchema.outputs, - attestationData.data, - ); + const decodedAttestation = decodeAbiParameters(abiFromSchema.outputs, data); - const keys = abiFromSchema.outputs.map((output) => output.name); - const values = decodedAttestation; - const decodedAttestationObject: Record = keys.reduce( - (acc: Record, key, index) => { - acc[key] = values[index]; - return acc; - }, - {, + _attestation = Object.fromEntries( + abiFromSchema.outputs.map((output, index) => [ + output.name, + decodedAttestation[index], + ]), ); - - if (!decodedAttestationObject) { - console.error( - "[DecodeAttestationData] Attestation data could not be parsed", - attestation - ); - return; - } - - const _attestation: Partial> = {}; - - _attestation.attester = attestationData.attester; - _attestation.recipient = attestationData.recipient; - _attestation.block_timestamp = attestation.block_timestamp; - _attestation.uid = attestation.uid; - _attestation.supported_schemas_id = schema.id; - _attestation.attestation = JSON.parse(JSON.stringify(attestationData)); - _attestation.data = JSON.parse(JSON.stringify(decodedAttestationObject)); - - if (decodedAttestationObject?.chain_id) - _attestation.chain_id = mapUnknownToBigInt( - decodedAttestationObject.chain_id, - )?.toString(); - - if ( - decodedAttestationObject?.contract_address && - decodedAttestationObject?.token_id - ) { - _attestation.contract_address = - typeof decodedAttestationObject?.contract_address === "string" && - isAddress(decodedAttestationObject?.contract_address) - ? decodedAttestationObject.contract_address - : null; - - _attestation.token_id = mapUnknownToBigInt( - decodedAttestationObject.token_id, - )?.toString(); - } - - return _attestation; } catch (error) { console.error( - "[DecodeAttestationData] Error while decoding attestation data", + "[DecodeAttestationData] Error while decoding attestation data: ", error, ); return; } -}; -const mapUnknownToBigInt = (value: unknown) => { - if (typeof value === "bigint") return value; - if (typeof value === "number") return BigInt(value); - if (typeof value === "string") return BigInt(value); - return null; + try { + const { chain_id, contract_address, token_id } = + HypercertAttestationSchema.parse(_attestation); + + return { + attester, + recipient, + block_timestamp: event.block_timestamp, + uid, + supported_schemas_id: schema.id, + attestation: JSON.parse(JSON.stringify(attestation)), + data: JSON.parse(JSON.stringify(_attestation)), + chain_id, + contract_address, + token_id, + }; + } catch (error) { + console.error( + "[DecodeAttestationData] Error while constructing attestation data: ", + error, + ); + throw error; + } }; diff --git a/src/parsing/attestedEvent.ts b/src/parsing/attestedEvent.ts index 64e8c61..a2b5cb9 100644 --- a/src/parsing/attestedEvent.ts +++ b/src/parsing/attestedEvent.ts @@ -2,17 +2,36 @@ import { isAddress } from "viem"; import { Tables } from "@/types/database.types"; import { getDeployment } from "@/utils"; import { getBlockTimestamp } from "@/utils/getBlockTimestamp"; +import { z } from "zod"; -type AttestedEvent = { - address: string; - args: { - recipient: `0x${string}`; - attester: `0x${string}`; - uid: string; - schema: string; - }; - blockNumber: bigint; - [key: string]: unknown; +export const AttestationSchema = z.object({ + uid: z.string(), + schema: z.string(), + recipient: z.string().refine(isAddress), + attester: z.string().refine(isAddress), +}); + +export type Attestation = z.infer; + +export const AttestedEventSchema = z.object({ + address: z.string().refine(isAddress), + args: z.object({ + recipient: z.string().refine(isAddress), + attester: z.string().refine(isAddress), + uid: z.string(), + schema: z.string(), + }), + blockNumber: z.bigint(), +}); + +const createAttestedEventSchema = ({ easAddress }: { easAddress: string }) => { + return AttestedEventSchema.extend({ + address: z + .string() + .refine((address) => address.toLowerCase() == easAddress.toLowerCase(), { + message: "[parseAttestedEvent] Address does not match EAS address", + }), + }); }; export type ParsedAttestedEvent = Pick< @@ -20,60 +39,43 @@ export type ParsedAttestedEvent = Pick< "attester" | "recipient" | "uid" | "block_timestamp" >; -/* - * Helper method to get the recipient, attester, attestation UID and schema ID from the event. Will return undefined when the event is - * missing data. +/** + * Parses an attested event to extract the recipient, attester, attestation UID and block timestamp. * - * @param event - The event object. + * This function attempts to parse the event object after validation. + * If the event object is valid, it extracts the recipient, attester, attestation UID and block timestamp from the event's args property, + * and returns them in a new object. If the event object is not valid or the contract address is invalid, it logs an error and returns undefined. * - * @returns {EASdata} - The recipient, attester, attestation UID and schema ID. - * */ -export const parseAttestedEvent = async (log: unknown) => { - if (!log || !isAttestedEvent(log)) { - console.error( - `Invalid event or event args for parsing Attested event: `, - log, - ); - return; - } - - const { args, address } = log; + * @param log - The event object to parse. Its structure should match the schema created with the `createAttestedEventSchema` function. + * + * @returns An object containing the recipient, attester, attestation UID and block timestamp from the event's args property, or undefined if the event object is not valid or the contract address is invalid. + * + * @example + * ```typescript + * const log = { + * address: "0x1234", + * args: { + * recipient: "0x5678", + * attester: "0x9abc", + * uid: "abcdef", + * }, + * blockNumber: 1234n, + * }; + * const parsedEvent = parseAttestedEvent(log); + * console.log(parsedEvent); // { recipient: "0x5678", attester: "0x9abc", uid: "abcdef", block_timestamp: 1234567890n } + * ``` + */ +export const parseAttestedEvent = async ( + log: unknown, +): Promise => { const { easAddress } = getDeployment(); + const validator = createAttestedEventSchema({ easAddress }); + const { args, blockNumber } = validator.parse(log); - if ( - easAddress.toLowerCase() != address.toLowerCase() || - !isAddress(address) - ) { - console.error( - `Invalid contract address for parsing Attested event: `, - easAddress, - address, - ); - return; - } - - const res: ParsedAttestedEvent = { + return { recipient: args.recipient, attester: args.attester, uid: args.uid, - block_timestamp: await getBlockTimestamp(log.blockNumber), + block_timestamp: await getBlockTimestamp(blockNumber), }; - - return res; }; - -function isAttestedEvent(event: unknown): event is AttestedEvent { - const e = event as Partial; - - return ( - typeof e === "object" && - e !== null && - typeof e.args === "object" && - e.args !== null && - isAddress(e.args.recipient) && - isAddress(e.args.attester) && - typeof e.address === "string" && - isAddress(e.address) && - typeof e.blockNumber === "bigint" - ); -} diff --git a/src/storage/getSupportedSchemas.ts b/src/storage/getSupportedSchemas.ts index 72463b0..174d51e 100644 --- a/src/storage/getSupportedSchemas.ts +++ b/src/storage/getSupportedSchemas.ts @@ -1,23 +1,34 @@ import { supabase } from "@/clients/supabaseClient"; +import { chainId } from "@/utils/constants"; +import { Tables } from "@/types/database.types"; -export const getSupportedSchemas = async ({ chainId }: { chainId: number }) => { - if (!chainId || !Number.isInteger(chainId)) { - console.error(`[GetSupportedSchema] Invalid chain ID: ${chainId}`); - return; - } +/** + * getSupportedSchemas is an async function that fetches supported EAS schemas for a given chain ID from the "supported_schemas" table in the database. + * It uses the supabase client to read from the database. + * + * @returns {Promise[] | undefined>} - The supported EAS schemas for the given chain ID, or undefined if an error occurs or no data is found. + * + * @throws {Error} - Throws an error if there is an error in the fetch operation. + */ +export const getSupportedSchemas = async () => { + console.debug( + `[GetSupportedSchema] Fetching supported EAS schema for chain ID ${chainId}`, + ); - const { data, error } = await supabase - .from("supported_schemas") - .select() - .eq("chain_id", chainId); + try { + const { data } = await supabase + .from("supported_schemas") + .select("*") + .eq("chain_id", chainId) + .returns[]>() + .throwOnError(); - if (!data) { - console.debug( + return data; + } catch (error) { + console.error( `[GetSupportedSchema] Error while fetching supported EAS schema for chain ID ${chainId}`, error, ); - return; + throw error; } - - return data; }; diff --git a/src/storage/storeAttestations.ts b/src/storage/storeAttestations.ts index 01ba4df..51c12f9 100644 --- a/src/storage/storeAttestations.ts +++ b/src/storage/storeAttestations.ts @@ -13,23 +13,37 @@ import * as console from "node:console"; export const storeAttestations = async ({ attestations, }: { - attestations?: Tables<"attestations">[]; + attestations: ( + | Omit, "id" | "claims_id"> + | undefined + )[]; }) => { - if (!attestations) { + const _attestations = (attestations = attestations.filter( + (attestation) => attestation !== null && attestation !== undefined, + )); + + if (!_attestations || _attestations.length === 0) { console.debug("[StoreAttestations] No attestation data provided"); return; } - if (attestations.length === 0) return; - console.debug( - `[StoreAttestations] Storing ${attestations.length} attestations`, + `[StoreAttestations] Storing ${_attestations.length} attestations`, ); - await supabase - .from("attestations") - .upsert(attestations, { - onConflict: "supported_schemas_id, uid", - }) - .throwOnError(); + try { + await supabase + .from("attestations") + .upsert(_attestations, { + onConflict: "supported_schemas_id, uid", + }) + .throwOnError(); + } catch (error) { + console.error( + "[StoreAttestations] Error while storing attestations", + error, + ); + + throw error; + } }; diff --git a/src/storage/storeSupportedSchemas.ts b/src/storage/storeSupportedSchemas.ts index 0801b98..e63fb56 100644 --- a/src/storage/storeSupportedSchemas.ts +++ b/src/storage/storeSupportedSchemas.ts @@ -1,40 +1,50 @@ import { supabase } from "@/clients/supabaseClient"; import { Tables } from "@/types/database.types"; -/* - This function stores the schema data in the database. - - @param scheme The schema to store. - @returns The stored data. - - @example - ```js - - const record = { - id: "0x1234...5678", - eas_schema_id: "0x1234...5678", - schema: 'bytes32 proposalId, bool vote', - revocable: true - }; - - const storedData = await storeSchemaRecord(record); - ``` - */ - interface StoreSupportedSchemas { - supportedSchemas?: Tables<"supported_schemas">[]; + supportedSchemas: Tables<"supported_schemas">[]; } +/** + * Asynchronously stores supported schemas in the database. + * + * This function takes an object with a `supportedSchemas` property as input, which should be an array of supported schema objects. + * It uses the Supabase client to upsert the supported schemas into the `supported_schemas` table in the database. + * If the `supportedSchemas` property is not provided, the function logs an error and returns early. + * If there is an error during the upsert operation, the function logs the error and throws it. + * + * @param {Object} params - The parameters for the function. + * @param {Object[]} params.supportedSchemas - The array of supported schema objects to store. + * + * @returns {Promise} A promise that resolves when the upsert operation is complete. If there is an error during the operation, the promise is rejected with the error. + * + * @example + * ```typescript + * const supportedSchemas = [ + * { + * id: '0x1234...5678', + * eas_schema_id: '0x1234...5678', + * ... + * schema: 'bytes32 proposalId, bool vote', + * revocable: true, + * }, + * // More schema objects... + * ]; + * + * await storeSupportedSchemas({ supportedSchemas }); + * + *``` + */ export const storeSupportedSchemas = async ({ supportedSchemas, }: StoreSupportedSchemas) => { - if (!supportedSchemas) { - console.error("[StoreSupportedSchema] No schema data provided"); - return; + try { + await supabase + .from("supported_schemas") + .upsert(supportedSchemas) + .throwOnError(); + } catch (error) { + console.error("[StoreSupportedSchema] Error storing schema data", error); + throw error; } - - await supabase - .from("supported_schemas") - .upsert(supportedSchemas) - .throwOnError(); }; diff --git a/src/types/database-generated.types.ts b/src/types/database-generated.types.ts index 5c6f4e6..f2e9e07 100644 --- a/src/types/database-generated.types.ts +++ b/src/types/database-generated.types.ts @@ -440,30 +440,30 @@ export type Database = { supported_schemas: { Row: { chain_id: number - eas_schema_id: string id: string last_block_indexed: number | null resolver: string | null revocable: boolean | null schema: string | null + uid: string } Insert: { chain_id: number - eas_schema_id: string id?: string last_block_indexed?: number | null resolver?: string | null revocable?: boolean | null schema?: string | null + uid: string } Update: { chain_id?: number - eas_schema_id?: string id?: string last_block_indexed?: number | null resolver?: string | null revocable?: boolean | null schema?: string | null + uid?: string } Relationships: [] } diff --git a/src/types/database.types.ts b/src/types/database.types.ts index fa8e987..c943fdf 100644 --- a/src/types/database.types.ts +++ b/src/types/database.types.ts @@ -90,25 +90,25 @@ export type Database = MergeDeep< }; attestations: { Row: { - chain_id: bigint | number | string | null; + chain_id: bigint | number | string; token_id: bigint | number | string; block_timestamp: bigint | number | string; }; Insert: { - chain_id: bigint | number | string | null; + chain_id: bigint | number | string; token_id: bigint | number | string; block_timestamp: bigint | number | string; }; Update: { - chain_id: bigint | number | string | null; + chain_id: bigint | number | string; token_id: bigint | number | string; block_timestamp: bigint | number | string; }; }; claims: { Row: { - value: bigint | number | string | null | undefined; - units: bigint | number | string | null | undefined; + value: bigint | number | string | undefined; + units: bigint | number | string | undefined; token_id: bigint | number | string; creation_block_timestamp: bigint | number | string; last_block_update_timestamp: bigint | number | string; diff --git a/src/utils/mapUnknownToBigInt.ts b/src/utils/mapUnknownToBigInt.ts new file mode 100644 index 0000000..907e3b6 --- /dev/null +++ b/src/utils/mapUnknownToBigInt.ts @@ -0,0 +1,6 @@ +export const mapUnknownToBigInt = (value: unknown) => { + if (typeof value === "bigint") return value; + if (typeof value === "number") return BigInt(value); + if (typeof value === "string") return BigInt(value); + throw new Error(`Could not map unknown value to BigInt: ${value}`); +}; diff --git a/supabase/migrations/20240531004150_update_eas_schema_column.sql b/supabase/migrations/20240531004150_update_eas_schema_column.sql new file mode 100644 index 0000000..2f721a7 --- /dev/null +++ b/supabase/migrations/20240531004150_update_eas_schema_column.sql @@ -0,0 +1,13 @@ +alter table supported_schemas + drop constraint supported_schemas_chain_id_eas_schema_id_key; + +alter table supported_schemas + rename column eas_schema_id to uid; + +alter table supported_schemas + add constraint supported_schemas_chain_id_uid_key unique (chain_id, uid); + +drop index idx_supported_schemas_eas_schema_id; + +create index idx_supported_schemas_uid + on supported_schemas (uid); \ No newline at end of file diff --git a/supabase/seed.sql b/supabase/seed.sql index e1ec9ce..6393607 100644 --- a/supabase/seed.sql +++ b/supabase/seed.sql @@ -56,5 +56,5 @@ $$ END $$; -insert into public.supported_schemas (chain_id, eas_schema_id, last_block_indexed) +insert into public.supported_schemas (chain_id, uid, last_block_indexed) values (11155111, '0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f', 5484610); diff --git a/test/fetching/fetchAttestationData.test.ts b/test/fetching/fetchAttestationData.test.ts index 9c8d9bf..efe186e 100644 --- a/test/fetching/fetchAttestationData.test.ts +++ b/test/fetching/fetchAttestationData.test.ts @@ -1,6 +1,6 @@ -import { afterAll, afterEach, describe, test } from "vitest"; +import { afterAll, afterEach, beforeEach, describe, test } from "vitest"; import { - Attestation, + FetchAttestationData, fetchAttestationData, } from "@/fetching/fetchAttestationData"; import { client } from "@/clients/evmClient"; @@ -8,58 +8,48 @@ import { faker } from "@faker-js/faker"; import sinon from "sinon"; import { getAddress } from "viem"; import { ParsedAttestedEvent } from "../../src/parsing/attestedEvent"; +import { getMockEasAttestation } from "../resources/mockAttestations"; describe("fetchAttestationData", () => { + const readSpy = sinon.stub(client, "readContract"); + + let attestedEvent = {} as ParsedAttestedEvent; + const mockAttestationData = getMockEasAttestation(); + + beforeEach(() => { + attestedEvent = { + attester: getAddress(faker.finance.ethereumAddress()), + recipient: getAddress(faker.finance.ethereumAddress()), + uid: faker.string.hexadecimal({ length: 32 }), + block_timestamp: 1234567890n, + }; + }); + afterEach(() => { - sinon.restore(); + sinon.reset(); }); afterAll(() => { sinon.restore(); }); - test("returns undefined when attestedEvent is not provided", async ({ - expect, - }) => { - const result = await fetchAttestationData({}); - expect(result).toBeUndefined(); + test("throws when attestedEvent is not provided", async ({ expect }) => { + await expect(() => + fetchAttestationData({} as FetchAttestationData), + ).rejects.toThrowError(); }); - test("returns undefined when attestedEvent.uid is not provided", async ({ - expect, - }) => { - const result = await fetchAttestationData({ - attestedEvent: {} as unknown as ParsedAttestedEvent, - }); - expect(result).toBeUndefined(); + test("throws when attestedEvent.uid is not provided", async ({ expect }) => { + await expect(() => + fetchAttestationData({ + attestedEvent: {} as unknown as ParsedAttestedEvent, + }), + ).rejects.toThrowError(); }); test("returns attestation data when attestedEvent and uid are provided", async ({ expect, }) => { - const recipient = getAddress(faker.finance.ethereumAddress()); - const attester = getAddress(faker.finance.ethereumAddress()); - - const attestedEvent = { - recipient, - attester, - uid: "0x1234", - block_timestamp: BigInt(1234), - }; - - const mockAttestationData: Attestation = { - uid: "0x1234", - schema: "0x1234", - refUID: "0x1234", - time: BigInt(1234), - expirationTime: BigInt(1234), - revocationTime: BigInt(1234), - recipient, - revocable: true, - attester, - data: "0x1234", - }; - const readSpy = sinon.stub(client, "readContract"); readSpy.resolves(mockAttestationData); const result = await fetchAttestationData({ @@ -67,31 +57,18 @@ describe("fetchAttestationData", () => { }); expect(result).toEqual({ - ...attestedEvent, + event: attestedEvent, attestation: mockAttestationData, }); }); - test("returns undefined when an error occurs during contract read", async ({ + test("throws when an error occurs during contract read", async ({ expect, }) => { - const recipient = getAddress(faker.finance.ethereumAddress()); - const attester = getAddress(faker.finance.ethereumAddress()); - - const attestedEvent = { - recipient, - attester, - uid: "0x1234", - block_timestamp: BigInt(1234), - }; - - const readSpy = sinon.stub(client, "readContract"); readSpy.throws(); - const result = await fetchAttestationData({ - attestedEvent, - }); - - expect(result).toBeUndefined(); + await expect(() => + fetchAttestationData({ attestedEvent }), + ).rejects.toThrowError(); }); }); diff --git a/test/fetching/fetchSchemaData.test.ts b/test/fetching/fetchSchemaData.test.ts index 358893e..b84a65d 100644 --- a/test/fetching/fetchSchemaData.test.ts +++ b/test/fetching/fetchSchemaData.test.ts @@ -18,24 +18,18 @@ describe("fetchSchemaData", () => { sinon.restore(); }); - test("returns undefined when schema is not provided", async ({ expect }) => { - const result = await fetchSchemaData({}); - expect(result).toBeUndefined(); + test("throws when schema uid is not provided", async ({ expect }) => { + await expect( + async () => await fetchSchemaData({} as unknown as FetchSchemaDataArgs), + ).rejects.toThrowError(); }); - test("returns undefined when schema.eas_schema_id is not provided", async ({ - expect, - }) => { - const result = await fetchSchemaData({} as unknown as FetchSchemaDataArgs); - expect(result).toBeUndefined(); - }); - - test("returns schema data when schema and eas_schema_id are provided", async ({ + test("returns schema data when schema and uid are provided", async ({ expect, }) => { const resolver = getAddress(faker.finance.ethereumAddress()); - const schema = { eas_schema_id: "0x5678" }; + const schema = { uid: "0x5678" }; const readSpy = sinon.stub(client, "readContract"); const mockSchemaData: SchemaRecord = { @@ -54,18 +48,16 @@ describe("fetchSchemaData", () => { expect(result).toEqual(mockSchemaData); }); - test("returns undefined when an error occurs during contract read", async ({ + test("throws when an error occurs during contract read", async ({ expect, }) => { - const schema = { eas_schema_id: "0x5678" }; + const schema = { uid: "0x5678" }; const readSpy = sinon.stub(client, "readContract"); readSpy.throws(); - const result = await fetchSchemaData({ - schema, - }); - - expect(result).toBeUndefined(); + await expect( + async () => await fetchSchemaData({ schema }), + ).rejects.toThrowError(); }); }); diff --git a/test/monitoring/eas.test.ts b/test/monitoring/eas.test.ts index 46f1a76..bffc00b 100644 --- a/test/monitoring/eas.test.ts +++ b/test/monitoring/eas.test.ts @@ -2,7 +2,7 @@ import { getAttestationsForSchema } from "@/monitoring/eas"; import { client } from "@/clients/evmClient"; import { afterAll, afterEach, describe, expect, it, vi } from "vitest"; import sinon from "sinon"; -import { mockFilter, mockLogs } from "../resources/mockAttestedEvents"; +import { mockFilter, mockLogs } from "../resources/mockAttestations"; const mocks = vi.hoisted(() => { return { @@ -33,7 +33,7 @@ describe("getAttestationsForSchema", () => { mocks.getDeployment.mockReturnValue({ startBlock: 0n, easAddress: null }); const result = await getAttestationsForSchema({ - schema: { eas_schema_id: "0x123" }, + schema: { uid: "0x123" }, batchSize: 100n, }); expect(result).toBeUndefined(); @@ -46,7 +46,7 @@ describe("getAttestationsForSchema", () => { }); const result = await getAttestationsForSchema({ - schema: { eas_schema_id: "0x123" }, + schema: { uid: "0x123" }, batchSize: 100n, }); expect(result).toBeUndefined(); @@ -64,8 +64,7 @@ describe("getAttestationsForSchema", () => { const result = await getAttestationsForSchema({ schema: { - eas_schema_id: - "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", }, batchSize: 100n, }); @@ -87,8 +86,7 @@ describe("getAttestationsForSchema", () => { const result = await getAttestationsForSchema({ schema: { - eas_schema_id: - "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", }, batchSize: 100n, }); @@ -107,8 +105,7 @@ describe("getAttestationsForSchema", () => { const result = await getAttestationsForSchema({ schema: { - eas_schema_id: - "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", }, batchSize: 100n, }); @@ -128,8 +125,7 @@ describe("getAttestationsForSchema", () => { const result = await getAttestationsForSchema({ schema: { - eas_schema_id: - "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", }, batchSize: 100n, }); diff --git a/test/parsing/attestationData.test.ts b/test/parsing/attestationData.test.ts new file mode 100644 index 0000000..9f4a77d --- /dev/null +++ b/test/parsing/attestationData.test.ts @@ -0,0 +1,80 @@ +import { expect, it, beforeEach, describe } from "vitest"; +import { decodeAttestationData } from "../../src/parsing/attestationData"; +import { ParsedAttestedEvent } from "@/parsing/attestedEvent"; +import { Tables } from "@/types/database.types"; +import { EasAttestation } from "../../src/fetching/fetchAttestationData"; +import { getMockEasAttestation } from "../resources/mockAttestations"; +import { faker } from "@faker-js/faker"; +import { Address, getAddress } from "viem"; +import { chainId } from "../../src/utils/constants"; + +describe("decodeAttestationData", () => { + let attester: Address; + let recipient: Address; + let event = {} as ParsedAttestedEvent; + let attestation: EasAttestation; + let schema: Pick, "schema" | "id">; + + beforeEach(() => { + attester = getAddress(faker.finance.ethereumAddress()); + recipient = getAddress(faker.finance.ethereumAddress()); + + event = { + attester, + recipient, + uid: faker.string.hexadecimal({ length: 6 }), + block_timestamp: BigInt(Math.round(faker.date.recent().getTime() / 1000)), + }; + + attestation = getMockEasAttestation({ + attester, + recipient, + data: "0x0000000000000000000000000000000000000000000000000000000000aa36a7000000000000000000000000a16dfb32eb140a6f3f2ac68f41dad8c7e83c494100000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000000000000000000000000000000000000000002a3134363332313431373737363030333533393238393235313038313139353636303333303932363038300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000104a757374206576616c756174696e672e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000000573616c61640000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005737465616b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005736175636500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000076b65746368757000000000000000000000000000000000000000000000000000", + }); + + schema = { + schema: `uint40 chain_id,address contract_address,string token_id,uint8 evaluate_basic,uint8 evaluate_work,uint8 evaluate_contributors,uint8 evaluate_properties,string comments,string[] tags`, + id: faker.string.hexadecimal({ length: 66, casing: "lower" }), + }; + }); + + it("returns undefined when schema is incomplete", () => { + schema.schema = null; + expect( + decodeAttestationData({ attestation, event, schema }), + ).toBeUndefined(); + }); + + it("returns undefined when attestation can't be parsed or is missing", () => { + const result = decodeAttestationData({ + attestation: {} as EasAttestation, + event, + schema, + }); + expect(result).toBeUndefined(); + }); + + it("returns undefined when attestation data cannot be parsed", () => { + attestation.data = "0xinvalid"; + const result = decodeAttestationData({ attestation, event, schema }); + expect(result).toBeUndefined(); + }); + + it("returns a new attestation object with decoded data when attestation data is valid", () => { + const result = decodeAttestationData({ attestation, event, schema }); + expect(result).toBeDefined(); + expect(result).toMatchObject({ + attester, + recipient, + block_timestamp: event.block_timestamp, + uid: attestation.uid, + supported_schemas_id: schema.id, + attestation: JSON.parse(JSON.stringify(attestation)), + chain_id: BigInt(chainId), + token_id: 146321417776003539289251081195660330926080n, + contract_address: getAddress( + "0xa16dfb32eb140a6f3f2ac68f41dad8c7e83c4941", + ), + }); + }); +}); diff --git a/test/parsing/attestedEvent.test.ts b/test/parsing/attestedEvent.test.ts new file mode 100644 index 0000000..5ae5d74 --- /dev/null +++ b/test/parsing/attestedEvent.test.ts @@ -0,0 +1,62 @@ +import { parseAttestedEvent } from "../../src/parsing/attestedEvent"; +import { describe, vi, beforeEach, it, expect } from "vitest"; +import { getAddress } from "viem"; +import { faker } from "@faker-js/faker"; + +const mocks = vi.hoisted(() => { + return { + getDeployment: vi.fn(), + getBlockTimestamp: vi.fn(), + }; +}); + +vi.mock("../../src/utils/getDeployment", () => ({ + getDeployment: mocks.getDeployment, +})); + +vi.mock("../../src/utils/getBlockTimestamp", () => ({ + getBlockTimestamp: mocks.getBlockTimestamp, +})); + +describe("parseAttestedEvent", () => { + let log = {}; + + const easContractAddress = getAddress(faker.finance.ethereumAddress()); + const recipient = getAddress(faker.finance.ethereumAddress()); + const attester = getAddress(faker.finance.ethereumAddress()); + + beforeEach(() => { + log = { + address: easContractAddress, + args: { + recipient, + attester, + uid: "abcdef", + schema: "0xdef0", + }, + blockNumber: 1234n, + }; + }); + + it("returns undefined when contract address is invalid", async () => { + log = { ...log, address: "0xinvalid" }; + await expect(() => parseAttestedEvent(log)).rejects.toThrowError(); + }); + + it("returns undefined when contract address does not match easAddress", async () => { + log = { ...log, address: "0x0000000000000000000000000000000000000000" }; + await expect(() => parseAttestedEvent(log)).rejects.toThrowError(); + }); + + it("returns a parsed event object when log is valid", async () => { + mocks.getDeployment.mockReturnValue({ easAddress: easContractAddress }); + mocks.getBlockTimestamp.mockReturnValue(1234567890n); + + const result = await parseAttestedEvent(log); + expect(result).toBeDefined(); + expect(result.recipient).toEqual(recipient); + expect(result.attester).toEqual(attester); + expect(result.uid).toEqual("abcdef"); + expect(result.block_timestamp).toEqual(1234567890n); + }); +}); diff --git a/test/parsing/transferSingleEvent.test.ts b/test/parsing/transferSingleEvent.test.ts index b95353b..a196377 100644 --- a/test/parsing/transferSingleEvent.test.ts +++ b/test/parsing/transferSingleEvent.test.ts @@ -6,11 +6,15 @@ import { http, HttpResponse } from "msw"; import { client } from "../../src/clients/evmClient"; import { alchemyUrl } from "../resources/alchemyUrl"; +import { getAddress } from "viem"; describe("transferSingleEvent", {}, () => { const from = faker.finance.ethereumAddress(); const timestamp = 10; - const address = faker.finance.ethereumAddress() as `0x${string}`; + const contractAddress = getAddress(faker.finance.ethereumAddress()); + const operatorAddress = getAddress(faker.finance.ethereumAddress()); + const fromAddress = getAddress(faker.finance.ethereumAddress()); + const toAddress = getAddress(faker.finance.ethereumAddress()); const claimID = faker.number.bigInt(); const blockNumber = 1n; const value = 3n; @@ -38,14 +42,14 @@ describe("transferSingleEvent", {}, () => { it("parses a transfer single event", {}, async () => { const event = { event: "TransferSingle", - address, + address: contractAddress, blockNumber, transactionHash: "0x3e7d7e4c4f3d5a7f2b3d6c5", args: { id: claimID, - operator: address, - from: address, - to: address, + operator: operatorAddress, + from: fromAddress, + to: toAddress, value, }, }; @@ -55,7 +59,8 @@ describe("transferSingleEvent", {}, () => { expect(parsed).toEqual({ block_number: blockNumber, block_timestamp: timestamp, - owner_address: address, + from_owner_address: fromAddress, + to_owner_address: toAddress, token_id: claimID, value, }); @@ -64,9 +69,9 @@ describe("transferSingleEvent", {}, () => { it("should fail when event is not a valid TransferSingleEvent", async () => { const args = { id: claimID, - operator: address, - from: address, - to: address, + operator: operatorAddress, + from: fromAddress, + to: toAddress, value, }; const event = { @@ -126,9 +131,9 @@ describe("transferSingleEvent", {}, () => { ...event, args: { ...args, - operator: address, - from: address, - to: address, + operator: operatorAddress, + from: fromAddress, + to: toAddress, id: 1, value: 1, }, diff --git a/test/resources/mockAttestedEvents.ts b/test/resources/mockAttestations.ts similarity index 66% rename from test/resources/mockAttestedEvents.ts rename to test/resources/mockAttestations.ts index 874a663..3237daa 100644 --- a/test/resources/mockAttestedEvents.ts +++ b/test/resources/mockAttestations.ts @@ -1,4 +1,22 @@ -import { Address, Hash } from "viem"; +import { Address, getAddress, Hash, Hex } from "viem"; +import { EasAttestation } from "../../src/fetching/fetchAttestationData"; +import { faker } from "@faker-js/faker"; + +export const getMockEasAttestation = (overrides?: Partial) => { + return { + uid: faker.string.hexadecimal({ length: 32 }) as Hash, + schema: faker.string.hexadecimal({ length: 32 }) as Hash, + refUID: faker.string.hexadecimal({ length: 32 }) as Hash, + time: BigInt(1234), + expirationTime: BigInt(1234), + revocationTime: BigInt(5678), + recipient: getAddress(faker.finance.ethereumAddress()), + revocable: true, + attester: getAddress(faker.finance.ethereumAddress()), + data: faker.string.hexadecimal({ length: 64 }) as Hex, + ...overrides, + }; +}; export const mockFilter = { abi: [{ name: "Attested", type: "event", inputs: [Array] }], diff --git a/test/setup-env.ts b/test/setup-env.ts index 543bb4c..182926a 100644 --- a/test/setup-env.ts +++ b/test/setup-env.ts @@ -2,6 +2,18 @@ import { afterAll, afterEach, beforeAll } from "vitest"; import { setupServer } from "msw/node"; import { handlers } from "./handlers"; +// @ts-expect-error BigInt is not supported by JSON +BigInt.prototype.toJSON = function () { + const int = Number.parseInt(this.toString()); + return int ?? this.toString(); +}; + +// @ts-expect-error BigInt is not supported by JSON +BigInt.prototype.fromJSON = function () { + const int = Number.parseInt(this.toString()); + return int ?? this.toString(); +}; + export const server = setupServer(...handlers); server.listen(); diff --git a/test/storage/getSupportedSchemas.test.ts b/test/storage/getSupportedSchemas.test.ts new file mode 100644 index 0000000..a7a7760 --- /dev/null +++ b/test/storage/getSupportedSchemas.test.ts @@ -0,0 +1,45 @@ +import { describe, it, expect } from "vitest"; +import { server } from "../setup-env"; +import { http, HttpResponse } from "msw"; +import { faker } from "@faker-js/faker"; +import { supabaseUrl } from "../../src/utils/constants"; +import { Tables } from "../../src/types/database.types"; +import { getSupportedSchemas } from "../../src/storage/getSupportedSchemas"; +import { getAddress } from "viem"; + +describe("getSupportedSchemas", {}, async () => { + const mockSupportedSchema: Tables<"supported_schemas"> = { + uid: "", + resolver: getAddress(faker.finance.ethereumAddress()), + revocable: false, + chain_id: 11155111, + id: faker.string.uuid(), + schema: faker.string.hexadecimal({ length: 32 }), + last_block_indexed: 1, + }; + + it("get schema from DB", {}, async () => { + server.use( + http.get(`${supabaseUrl}/*`, () => { + return HttpResponse.json([mockSupportedSchema]); + }), + ); + + const supportedSchema = await getSupportedSchemas(); + + expect(supportedSchema).toBeDefined(); + expect(supportedSchema![0]).toEqual(mockSupportedSchema); + }); + + it("should throw an error if the supabase call errors out", async () => { + server.use( + http.get(`${supabaseUrl}/*`, () => { + return HttpResponse.error(); + }), + ); + + await expect( + async () => await getSupportedSchemas(), + ).rejects.toThrowError(); + }); +}); diff --git a/test/storage/storeTransferSingleFraction.test.ts b/test/storage/storeTransferSingleFraction.test.ts index 89930e2..330ae11 100644 --- a/test/storage/storeTransferSingleFraction.test.ts +++ b/test/storage/storeTransferSingleFraction.test.ts @@ -4,6 +4,7 @@ import { faker } from "@faker-js/faker"; import { server } from "../setup-env"; import { http, HttpResponse } from "msw"; import { supabaseUrl } from "../../src/utils/constants"; +import { getAddress } from "viem"; describe("storeTransferSingleFraction", () => { const transfer = { @@ -11,8 +12,9 @@ describe("storeTransferSingleFraction", () => { contract_address: faker.finance.ethereumAddress(), value: faker.number.bigInt(), block_timestamp: faker.number.bigInt(), - owner_address: faker.finance.ethereumAddress(), - token_id: faker.number.bigInt(), + from_owner_address: getAddress(faker.finance.ethereumAddress()), + to_owner_address: getAddress(faker.finance.ethereumAddress()), + token_id: 420n, contracts_id: faker.string.uuid(), type: "fraction", } as const; @@ -35,42 +37,4 @@ describe("storeTransferSingleFraction", () => { expect.fail("response undefined"); } }); - - it("should only store the entry for a token with newest timestamp", async () => { - let theResult: any[] = []; - server.use( - http.post(`${supabaseUrl}/*`, async ({ request }) => { - const data = await request.json(); - // @ts-ignore - theResult = data._fractions; - return HttpResponse.json(data); - }), - ); - const transferOld = { - ...transfer, - block_timestamp: transfer.block_timestamp - 1n, - value: transfer.value - 1n, - }; - - await storeTransferSingleFraction({ - transfers: [transferOld, transfer], - }); - - if (!theResult) { - expect.fail("result undefined"); - } - - expect(theResult.length).toBe(1); - expect(theResult[0].value).toBe(transfer.value.toString()); - - await storeTransferSingleFraction({ - transfers: [transfer, transferOld], - }); - if (!theResult) { - expect.fail("resultReversed undefined"); - } - - expect(theResult.length).toBe(1); - expect(theResult[0].value).toBe(transfer.value.toString()); - }); }); diff --git a/vitest.config.ts b/vitest.config.ts index 25ccc07..81626c3 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -23,8 +23,8 @@ export default defineConfig({ }, exclude: [ ...(configDefaults.coverage.exclude as string[]), - "**/database-generated.types.ts", - "**/database.types.ts", + "**/*.types.ts", + "**/types.ts", ], }, }, From ea1badb68a32ef2a7ab62e12850b8938199743ed Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Fri, 31 May 2024 04:41:37 +0200 Subject: [PATCH 07/12] chore(git): pull in develop --- test/storage/storeTransferSingleFraction.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/storage/storeTransferSingleFraction.test.ts b/test/storage/storeTransferSingleFraction.test.ts index 5b18add..173197f 100644 --- a/test/storage/storeTransferSingleFraction.test.ts +++ b/test/storage/storeTransferSingleFraction.test.ts @@ -38,7 +38,7 @@ describe("storeTransferSingleFraction", () => { } }); - it("should only store the entry for a token with newest timestamp", async () => { + it.skip("should only store the entry for a token with newest timestamp", async () => { let theResult: any[] = []; server.use( http.post(`${supabaseUrl}/*`, async ({ request }) => { From 126b6cbb247b633a2e6c69ba189b55ca1fa51e28 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Fri, 31 May 2024 10:35:46 +0200 Subject: [PATCH 08/12] feat(rls): add RLS and read to all tables --- ...31082509_add_rls_and_read_to_all_tables.sql | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 supabase/migrations/20240531082509_add_rls_and_read_to_all_tables.sql diff --git a/supabase/migrations/20240531082509_add_rls_and_read_to_all_tables.sql b/supabase/migrations/20240531082509_add_rls_and_read_to_all_tables.sql new file mode 100644 index 0000000..1cc47d1 --- /dev/null +++ b/supabase/migrations/20240531082509_add_rls_and_read_to_all_tables.sql @@ -0,0 +1,18 @@ +-- Grant only read access to all public tables +DO +$$ + DECLARE + p_table_name text; + BEGIN + FOR p_table_name IN + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_type = 'BASE TABLE' + LOOP + EXECUTE format('ALTER TABLE %I ENABLE ROW LEVEL SECURITY;', p_table_name); + EXECUTE format('CREATE POLICY "Enable read access for all users" ON %I FOR SELECT TO PUBLIC;', + p_table_name); + END LOOP; + end; +$$ From 53e4b02505b5c0bc79052a058f0753138a5083b8 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Fri, 31 May 2024 17:38:10 +0200 Subject: [PATCH 09/12] feat(yolo): throws, tests, tweaks, column rename --- package.json | 5 +- pnpm-lock.yaml | 73 ++++++++ src/fetching/fetchAllowlistFromUri.ts | 43 ++--- src/indexer/indexClaimsStored.ts | 75 ++++---- src/monitoring/eas.ts | 26 ++- src/monitoring/hypercerts.ts | 104 +++++------ src/parsing/claimStoredEvent.ts | 113 ++++++------ src/storage/getContractEventsForChain.ts | 80 +++++---- src/storage/storeClaim.ts | 126 ++++++------- src/types/database-generated.types.ts | 4 + src/types/types.ts | 10 -- src/utils/fetchFromHttpsOrIpfs.ts | 2 +- src/utils/getBlocksToFetch.ts | 63 +++++++ src/utils/getMinterAddressAndStartBlock.ts | 36 ++++ ...3_update_claims_with_owner_and_creator.sql | 2 + test/fetching/fetchAllowlistFromUri.test.ts | 56 ++++++ test/fetching/fetchAttestationData.test.ts | 4 +- test/helpers/data.ts | 49 +++++ test/helpers/factories.ts | 117 ++++++++++++ test/monitoring/eas.test.ts | 81 +++++---- test/monitoring/hypercerts.test.ts | 170 ++++++++++++++++++ test/parsing/attestationData.test.ts | 20 +-- test/parsing/claimStoredEvent.test.ts | 94 +++------- test/resources/mockAttestations.ts | 20 +-- test/storage/storeHypercert.test.ts | 27 +-- test/utils/getBlocksToFetch.test.ts | 60 +++++++ .../getMinterAddressAndStartBlock.test.ts | 59 ++++++ 27 files changed, 1059 insertions(+), 460 deletions(-) create mode 100644 src/utils/getBlocksToFetch.ts create mode 100644 src/utils/getMinterAddressAndStartBlock.ts create mode 100644 supabase/migrations/20240531091103_update_claims_with_owner_and_creator.sql create mode 100644 test/fetching/fetchAllowlistFromUri.test.ts create mode 100644 test/helpers/data.ts create mode 100644 test/helpers/factories.ts create mode 100644 test/monitoring/hypercerts.test.ts create mode 100644 test/utils/getBlocksToFetch.test.ts create mode 100644 test/utils/getMinterAddressAndStartBlock.test.ts diff --git a/package.json b/package.json index b17d990..4be80c6 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "license": "ISC", "type": "module", "devDependencies": { + "@anatine/zod-mock": "^3.13.4", "@eslint/js": "^8.57.0", "@faker-js/faker": "^8.4.1", "@rollup/plugin-alias": "^5.1.0", @@ -45,11 +46,13 @@ "rollup-plugin-polyfill-node": "^0.13.0", "sinon": "^17.0.1", "supertest": "^6.3.4", + "ts-mockito": "^2.6.1", "tsx": "^4.7.1", "typescript": "^5.3.3", "typescript-eslint": "^7.0.2", "vite-tsconfig-paths": "^4.3.1", - "vitest": "^1.3.1" + "vitest": "^1.3.1", + "vitest-mock-extended": "^1.3.1" }, "dependencies": { "@hypercerts-org/sdk": "^1.4.3", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index dad1c97..c09f33e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -57,6 +57,9 @@ importers: specifier: ^3.23.6 version: 3.23.6 devDependencies: + '@anatine/zod-mock': + specifier: ^3.13.4 + version: 3.13.4(@faker-js/faker@8.4.1)(zod@3.23.6) '@eslint/js': specifier: ^8.57.0 version: 8.57.0 @@ -135,6 +138,9 @@ importers: supertest: specifier: ^6.3.4 version: 6.3.4 + ts-mockito: + specifier: ^2.6.1 + version: 2.6.1 tsx: specifier: ^4.7.1 version: 4.7.1 @@ -150,6 +156,9 @@ importers: vitest: specifier: ^1.3.1 version: 1.3.1(@types/node@20.11.19) + vitest-mock-extended: + specifier: ^1.3.1 + version: 1.3.1(typescript@5.3.3)(vitest@1.3.1(@types/node@20.11.19)) packages: @@ -175,6 +184,12 @@ packages: resolution: {integrity: sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==} engines: {node: '>=6.0.0'} + '@anatine/zod-mock@3.13.4': + resolution: {integrity: sha512-yO/KeuyYsEDCTcQ+7CiRuY3dnafMHIZUMok6Ci7aERRCTQ+/XmsiPk/RnMx5wlLmWBTmX9kw+PavbMsjM+sAJA==} + peerDependencies: + '@faker-js/faker': ^7.0.0 || ^8.0.0 + zod: ^3.21.4 + '@babel/code-frame@7.23.5': resolution: {integrity: sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==} engines: {node: '>=6.9.0'} @@ -1955,6 +1970,10 @@ packages: resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} engines: {node: '>=12'} + drange@1.1.1: + resolution: {integrity: sha512-pYxfDYpued//QpnLIm4Avk7rsNtAtQkUES2cwAYSvD/wd2pKD71gN2Ebj3e7klzXwjocvE8c5vx/1fxwpqmSxA==} + engines: {node: '>=4'} + eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} @@ -3041,6 +3060,10 @@ packages: queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + randexp@0.5.3: + resolution: {integrity: sha512-U+5l2KrcMNOUPYvazA3h5ekF80FHTUG+87SEAmHZmolh1M+i/WyTCxVzmi+tidIa1tM4BSe8g2Y/D3loWDjj+w==} + engines: {node: '>=4'} + randombytes@2.1.0: resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} @@ -3093,6 +3116,10 @@ packages: resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} hasBin: true + ret@0.2.2: + resolution: {integrity: sha512-M0b3YWQs7R3Z917WRQy1HHA7Ba7D8hvZg6UE5mLykJxQVE2ju0IXbGlaHPPlkY+WN7wFP+wUMXmBFA0aV6vYGQ==} + engines: {node: '>=4'} + reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -3408,6 +3435,17 @@ packages: peerDependencies: typescript: '>=4.2.0' + ts-essentials@9.4.2: + resolution: {integrity: sha512-mB/cDhOvD7pg3YCLk2rOtejHjjdSi9in/IBYE13S+8WA5FBSraYf4V/ws55uvs0IvQ/l0wBOlXy5yBNZ9Bl8ZQ==} + peerDependencies: + typescript: '>=4.1.0' + peerDependenciesMeta: + typescript: + optional: true + + ts-mockito@2.6.1: + resolution: {integrity: sha512-qU9m/oEBQrKq5hwfbJ7MgmVN5Gu6lFnIGWvpxSjrqq6YYEVv+RwVFWySbZMBgazsWqv6ctAyVBpo9TmAxnOEKw==} + tsconfck@3.0.2: resolution: {integrity: sha512-6lWtFjwuhS3XI4HsX4Zg0izOI3FU/AI9EGVlPEUMDIhvLPMD4wkiof0WCoDgW7qY+Dy198g4d9miAqUHWHFH6Q==} engines: {node: ^18 || >=20} @@ -3604,6 +3642,12 @@ packages: terser: optional: true + vitest-mock-extended@1.3.1: + resolution: {integrity: sha512-OpghYjh4BDuQ/Mzs3lFMQ1QRk9D8/2O9T47MLUA5eLn7K4RWIy+MfIivYOWEyxjTENjsBnzgMihDjyNalN/K0Q==} + peerDependencies: + typescript: 3.x || 4.x || 5.x + vitest: '>=0.31.1' + vitest@1.3.1: resolution: {integrity: sha512-/1QJqXs8YbCrfv/GPQ05wAZf2eakUPLPa18vkJAKE7RXOKfVHqMZZ1WlTjiwl6Gcn65M5vpNUB6EFLnEdRdEXQ==} engines: {node: ^18.0.0 || >=20.0.0} @@ -3795,6 +3839,12 @@ snapshots: '@jridgewell/gen-mapping': 0.3.3 '@jridgewell/trace-mapping': 0.3.22 + '@anatine/zod-mock@3.13.4(@faker-js/faker@8.4.1)(zod@3.23.6)': + dependencies: + '@faker-js/faker': 8.4.1 + randexp: 0.5.3 + zod: 3.23.6 + '@babel/code-frame@7.23.5': dependencies: '@babel/highlight': 7.23.4 @@ -5956,6 +6006,8 @@ snapshots: dotenv@16.4.5: {} + drange@1.1.1: {} + eastasianwidth@0.2.0: {} ee-first@1.1.1: {} @@ -7289,6 +7341,11 @@ snapshots: queue-microtask@1.2.3: {} + randexp@0.5.3: + dependencies: + drange: 1.1.1 + ret: 0.2.2 + randombytes@2.1.0: dependencies: safe-buffer: 5.2.1 @@ -7342,6 +7399,8 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 + ret@0.2.2: {} + reusify@1.0.4: {} rimraf@2.7.1: @@ -7694,6 +7753,14 @@ snapshots: dependencies: typescript: 5.3.3 + ts-essentials@9.4.2(typescript@5.3.3): + optionalDependencies: + typescript: 5.3.3 + + ts-mockito@2.6.1: + dependencies: + lodash: 4.17.21 + tsconfck@3.0.2(typescript@5.3.3): optionalDependencies: typescript: 5.3.3 @@ -7879,6 +7946,12 @@ snapshots: '@types/node': 20.11.19 fsevents: 2.3.3 + vitest-mock-extended@1.3.1(typescript@5.3.3)(vitest@1.3.1(@types/node@20.11.19)): + dependencies: + ts-essentials: 9.4.2(typescript@5.3.3) + typescript: 5.3.3 + vitest: 1.3.1(@types/node@20.11.19) + vitest@1.3.1(@types/node@20.11.19): dependencies: '@vitest/expect': 1.3.1 diff --git a/src/fetching/fetchAllowlistFromUri.ts b/src/fetching/fetchAllowlistFromUri.ts index 955b22b..bd0a3c1 100644 --- a/src/fetching/fetchAllowlistFromUri.ts +++ b/src/fetching/fetchAllowlistFromUri.ts @@ -1,59 +1,54 @@ import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; import { fetchFromHttpsOrIpfs } from "@/utils/fetchFromHttpsOrIpfs"; +export interface FetchAllowListFromUriInput { + uri?: string; +} + /** - * This function fetches an allow list from a given URI. + * Fetches an allow list from a given URI. * - * The URI can be an IPFS URI, an HTTPS URI, or a CID. The function tries to fetch the allow list from the - * different sources in that order. If the allow list is found, it is validated and returned. + * This function attempts to fetch data from the provided URI using the `fetchFromHttpsOrIpfs` utility function. + * If no data is found, it logs a debug message and returns undefined. + * If data is found, it attempts to parse and load the data as a Merkle tree using the `StandardMerkleTree.load` function from the OpenZeppelin library. + * It first tries to parse the data as a JSON string, and if that fails, it tries to load the data directly. + * If both attempts fail, it logs a debug message for each failure and returns undefined. * - * @param uri - The URI where the allow list is located. - * @returns The allow list as an OpenZeppelin Merkle tree if found and valid, otherwise undefined. + * @param {FetchAllowListFromUriInput} { uri } - An object containing the URI to fetch the allow list from. + * @returns {Promise | undefined>} A promise that resolves to a Merkle tree if the data could be fetched and loaded successfully, otherwise undefined. * * @example * ```typescript * const allowList = await fetchAllowListFromUri({ uri: "ipfs://QmXZj9Pm4g7Hv3Z6K4Vw2vW" }); * ``` - */ - -interface FetchAllowListFromUri { - uri?: string; -} - -export const fetchAllowListFromUri = async ({ uri }: FetchAllowListFromUri) => { + * */ +export const fetchAllowListFromUri = async ({ + uri, +}: FetchAllowListFromUriInput) => { const fetchResult = await fetchFromHttpsOrIpfs(uri); if (!fetchResult) { - console.error( + console.debug( `[FetchAllowListFromUri] No metadata found on IPFS for URI ${uri}`, ); return; } - // If response object is already a OZ Merkle tree, return it as is try { - console.debug( - "[FetchAllowListFromUri] Loading OZ Merkle tree from response by parsing as JSON", - ); - return StandardMerkleTree.load<[string, bigint]>( JSON.parse(fetchResult as string), ); } catch (error) { - console.warn( + console.debug( `[FetchAllowListFromUri] Allow list at ${uri} is not a valid OZ Merkle tree`, error, ); } - // If response object is already a OZ Merkle tree, return it as is try { - console.debug( - "[FetchAllowListFromUri] Loading OZ Merkle tree directly from response", - ); return StandardMerkleTree.load<[string, bigint]>(fetchResult as never); } catch (error) { - console.warn( + console.debug( `[FetchAllowListFromUri] Allow list at ${uri} is not a valid OZ Merkle tree`, error, ); diff --git a/src/indexer/indexClaimsStored.ts b/src/indexer/indexClaimsStored.ts index f1e730f..fe839f1 100644 --- a/src/indexer/indexClaimsStored.ts +++ b/src/indexer/indexClaimsStored.ts @@ -1,36 +1,37 @@ import { parseClaimStoredEvent } from "@/parsing"; -import { getDeployment } from "@/utils"; -import { IndexerConfig, NewClaim } from "@/types/types"; +import { IndexerConfig } from "@/types/types"; import { getContractEventsForChain } from "@/storage/getContractEventsForChain"; import { updateLastBlockIndexedContractEvents } from "@/storage/updateLastBlockIndexedContractEvents"; import { getLogsForContractEvents } from "@/monitoring/hypercerts"; -import { storeClaim } from "@/storage/storeClaim"; - -/* - * This function indexes the logs of the ClaimStored event emitted by the HypercertMinter contract. Based on the last - * block indexed, it fetches the logs in batches, parses them, fetches the metadata, and stores the hypercerts in the - * database. - * - * @param [batchSize] - The number of logs to fetch and parse in each batch. - * - * @example - * ```js - * await indexClaimsStoredEvents({ batchSize: 1000n }); - * ``` - */ +import { Claim, storeClaim } from "@/storage/storeClaim"; const defaultConfig = { batchSize: 10000n, eventName: "ClaimStored", }; +/** + * Indexes the logs of the ClaimStored event emitted by the HypercertMinter contract. + * + * This function fetches the logs in batches based on the last block indexed, parses them, fetches the metadata, + * and stores the hypercerts in the database. If no contract events are found, the function returns early. + * + * @param {object} config - Configuration object for the indexer. + * @param {bigint} config.batchSize - The number of logs to fetch and parse in each batch. Defaults to 10000. + * @param {string} config.eventName - The name of the event to index. Defaults to "ClaimStored". + * + * @returns {Promise} A promise that resolves when all claims have been stored and the last block indexed has been updated. + * + * @example + * ```typescript + * await indexClaimsStoredEvents({ batchSize: 1000n, eventName: "ClaimStored" }); + * ``` + * */ export const indexClaimsStoredEvents = async ({ batchSize = defaultConfig.batchSize, eventName = defaultConfig.eventName, }: IndexerConfig = defaultConfig) => { - const { chainId } = getDeployment(); const contractsWithEvents = await getContractEventsForChain({ - chainId, eventName, }); @@ -43,31 +44,16 @@ export const indexClaimsStoredEvents = async ({ const { last_block_indexed } = contractEvent; // Get logs in batches - const logsFound = await getLogsForContractEvents({ - fromBlock: last_block_indexed ? BigInt(last_block_indexed) : 0n, + const { logs, toBlock } = await getLogsForContractEvents({ + fromBlock: last_block_indexed, batchSize, contractEvent, }); - if (!logsFound) { - console.debug( - " [IndexClaimsStored] No logs found for contract event", - contractEvent, - ); - return; - } - - const { logs, toBlock } = logsFound; - // parse logs to get claimID, contractAddress and cid - const parsedEvents = ( - await Promise.all(logs.map(parseClaimStoredEvent)) - ).filter( - (claim): claim is Partial => - claim !== null && claim !== undefined, - ); + const events = await Promise.all(logs.map(parseClaimStoredEvent)); - const claims = parsedEvents.map((claim) => ({ + const claims = events.map((claim) => ({ ...claim, contracts_id: contractEvent.contracts_id, })); @@ -83,19 +69,18 @@ export const indexClaimsStoredEvents = async ({ ); const claims = results - .flatMap((result) => (result?.claims ? result.claims : undefined)) - .filter( - (claim): claim is NewClaim => claim !== null && claim !== undefined, - ); - // .map(({ claims }) => claims !== null && claims !== undefined); + .flatMap((result) => result.claims) + .filter((claim): claim is Claim => claim !== null && claim !== undefined); + + const contractEventUpdates = results.flatMap((result) => [ + result.contractEventUpdate, + ]); return await storeClaim({ claims, }).then(() => updateLastBlockIndexedContractEvents({ - contract_events: results.flatMap((res) => - res?.contractEventUpdate ? [res.contractEventUpdate] : [], - ), + contract_events: contractEventUpdates, }), ); }; diff --git a/src/monitoring/eas.ts b/src/monitoring/eas.ts index 755dc69..fc44302 100644 --- a/src/monitoring/eas.ts +++ b/src/monitoring/eas.ts @@ -2,6 +2,7 @@ import { client } from "@/clients/evmClient"; import { isAddress, parseAbiItem } from "viem"; import { getDeployment } from "@/utils/getDeployment"; import { Tables } from "@/types/database.types"; +import { getBlocksToFetch } from "@/utils/getBlocksToFetch"; /** * Fetches the logs of the Attested event from the EAS contract for a specific schema. @@ -33,24 +34,17 @@ export const getAttestationsForSchema = async ({ const { startBlock, easAddress } = getDeployment(); if (!isAddress(easAddress)) { - console.error( - "[getAttestationsForSchema] EAS is not available", - easAddress, - ); - return; + throw Error(`[GetAttestationForSchema] EAS is not available`); } - try { - const latestBlock = await client.getBlockNumber(); + const { fromBlock: _fromBlock, toBlock: _toBlock } = await getBlocksToFetch({ + contractCreationBlock: startBlock, + fromBlock, + batchSize, + }); - const _fromBlock = - fromBlock && fromBlock > startBlock ? fromBlock : startBlock; - const _toBlock = - _fromBlock + batchSize > latestBlock - ? latestBlock - : _fromBlock + batchSize; - - console.info( + try { + console.debug( `[getAttestationsForSchema] Fetching attestation logs from ${_fromBlock} to ${_toBlock}`, ); @@ -77,6 +71,6 @@ export const getAttestationsForSchema = async ({ "[getAttestationsForSchema] Error fetching attestation logs", error, ); - return; + throw error; } }; diff --git a/src/monitoring/hypercerts.ts b/src/monitoring/hypercerts.ts index 5740fe9..a1ab600 100644 --- a/src/monitoring/hypercerts.ts +++ b/src/monitoring/hypercerts.ts @@ -1,74 +1,78 @@ import { client } from "@/clients/evmClient"; import { parseAbiItem } from "viem"; -import { getDeployment } from "@/utils/getDeployment"; import { EventToFetch } from "@/types/types"; +import { getMinterAddressAndStartBlock } from "@/utils/getMinterAddressAndStartBlock"; +import { getBlocksToFetch } from "@/utils/getBlocksToFetch"; -const getMinterAddress = () => { - const { addresses, startBlock } = getDeployment(); - - return { address: addresses?.HypercertMinterUUPS, startBlock }; -}; - -interface BlocksToFetch { - contractCreationBlock: bigint; - fromBlock?: bigint; - batchSize: bigint; -} - -const getBlocksToFetch = async ({ - contractCreationBlock, - fromBlock, - batchSize, -}: BlocksToFetch) => { - const latestBlock = await client.getBlockNumber(); - const _fromBlock = - fromBlock && fromBlock > contractCreationBlock - ? fromBlock - : contractCreationBlock; - const _toBlock = - _fromBlock + batchSize > latestBlock ? latestBlock : _fromBlock + batchSize; - - return { fromBlock: _fromBlock, toBlock: _toBlock }; -}; - -//TODO review hacky typing -interface LogsForContractEvents { +interface GetLogsForEventInput { fromBlock?: bigint; batchSize: bigint; contractEvent: EventToFetch; } +/** + * Fetches logs for a specific contract event from the Ethereum Virtual Machine (EVM). + * + * This function first gets the address and start block of the HypercertMinterUUPS from the deployment. + * It then calculates the range of blocks to fetch based on the contract creation block, the from block, and the batch size. + * After that, it creates a filter for the contract event and fetches the logs from the EVM client. + * If an error occurs while fetching the logs, it logs the error and rethrows it. + * + * @param {GetLogsForEventInput} { fromBlock, batchSize, contractEvent } - An object containing the from block, the batch size, and the contract event. + * @param {bigint} [fromBlock] - The block number to start fetching from. If not provided, it defaults to the contract creation block. + * @param {bigint} batchSize - The number of blocks to fetch in each batch. + * @param {EventToFetch} contractEvent - The contract event to fetch logs for. + * @property {string} event_name - The name of the contract event. + * @property {string} abi - The ABI of the contract event. + * + * @returns {Promise<{ logs: any[], fromBlock: bigint, toBlock: bigint }>} A promise that resolves to an object containing the fetched logs, the from block, and the to block. + * + * @throws {Error} If an error occurs while fetching the logs, it throws the error. + * + * @example + * ```typescript + * const fromBlock = BigInt(200); + * const batchSize = BigInt(50); + * const contractEvent = { event_name: "Transfer", abi: "..." }; + * + * const { logs, fromBlock, toBlock } = await getLogsForContractEvents({ fromBlock, batchSize, contractEvent }); + * ``` + * */ export const getLogsForContractEvents = async ({ fromBlock, batchSize, contractEvent, -}: LogsForContractEvents) => { - const { address, startBlock } = getMinterAddress(); - - if (!address) { - console.error("[GetLogsForContractEvents] Address is not available"); - return; - } +}: GetLogsForEventInput) => { + const { address, startBlock } = getMinterAddressAndStartBlock(); + console.log(address, startBlock); const { fromBlock: from, toBlock: to } = await getBlocksToFetch({ contractCreationBlock: startBlock, fromBlock, batchSize, }); - console.info( - `[GetLogsForContractEvents] Fetching ${contractEvent.event_name} logs from ${from} to ${to}`, - ); + try { + console.debug( + `[GetLogsForContractEvents] Fetching ${contractEvent.event_name} logs from ${from} to ${to}`, + ); - const abiItem = parseAbiItem([contractEvent.abi]); - const filter = await client.createEventFilter({ - address, - fromBlock: from, - toBlock: to, - event: abiItem, - }); + const abiItem = parseAbiItem([contractEvent.abi]); + const filter = await client.createEventFilter({ + address, + fromBlock: from, + toBlock: to, + event: abiItem, + }); - const logs = await client.getFilterLogs({ filter }); + const logs = await client.getFilterLogs({ filter }); - return { logs, fromBlock: from, toBlock: to }; + return { logs, fromBlock: from, toBlock: to }; + } catch (error) { + console.error( + `[GetLogsForContractEvents] Error while fetching logs for contract event ${contractEvent.event_name} on contract ${address}`, + error, + ); + throw error; + } }; diff --git a/src/parsing/claimStoredEvent.ts b/src/parsing/claimStoredEvent.ts index f22e236..2504343 100644 --- a/src/parsing/claimStoredEvent.ts +++ b/src/parsing/claimStoredEvent.ts @@ -1,66 +1,61 @@ -import { Hex, isAddress } from "viem"; -import { NewClaim } from "@/types/types"; +import { isAddress, isHex, parseAbi, parseAbiItem } from "viem"; import { client } from "@/clients/evmClient"; - -type ClaimStoredEvent = { - address: string; - args: { - claimID: bigint; - uri: string; - totalUnits: bigint; - }; - blockNumber: bigint; - transactionHash: Hex; - [key: string]: unknown; -}; - -/* - * Helper method to get the claimID, contract address, URI, and units from the event. Will return undefined when the event is - * missing values. +import { z } from "zod"; + +export const ClaimStoredEventSchema = z.object({ + address: z.string().refine(isAddress, { + message: "[ClaimStoredEvent] Invalid contract address", + }), + args: z.object({ + claimID: z.bigint(), + uri: z.string(), + totalUnits: z.bigint(), + }), + blockNumber: z.bigint(), + transactionHash: z.string().refine(isHex), +}); + +export type ClaimStoredEvent = z.infer; + +/** + * Parses a ClaimStoredEvent and retrieves additional information about the transaction. * - * @param event - The event object. - * */ + * @param {unknown} event - The event to parse. + * @returns {Promise} A promise that resolves to an object containing the parsed event data. + * @throws {z.ZodError} If the event does not match the ClaimStoredEventSchema, a Zod validation error is thrown. + */ export const parseClaimStoredEvent = async (event: unknown) => { - if (!isClaimStoredEvent(event)) { + console.log(event); + const { args, address, transactionHash, blockNumber } = + ClaimStoredEventSchema.parse(event); + + try { + const transaction = await client.getTransaction({ + hash: transactionHash, + }); + + const owner = await client.readContract({ + address, + abi: parseAbi([ + `function ownerOf(uint256 tokenId) view returns (address owner)`, + ]), + functionName: "ownerOf", + args: [args.claimID], + }); + + return { + owner_address: owner, + creator_address: transaction.from, + token_id: args.claimID, + uri: args.uri, + block_number: blockNumber, + units: args.totalUnits, + }; + } catch (error) { console.error( - `Invalid event or event args for parsing claimStored event: `, - event, + "[ParseClaimStoredEvent] Error parsing claim stored event", + error, ); - return; + throw error; } - - const { args, address, transactionHash } = event; - - const transaction = await client.getTransaction({ - hash: transactionHash, - }); - - const claim: Partial = { - creator_address: transaction.from, - token_id: args.claimID, - uri: args.uri, - block_number: BigInt(event.blockNumber), - units: args.totalUnits, - contract_address: address, - }; - - return claim; }; - -function isClaimStoredEvent(event: unknown): event is ClaimStoredEvent { - const e = event as Partial; - - return ( - typeof e === "object" && - e !== null && - e?.args !== null && - typeof e?.args === "object" && - e?.args?.claimID !== null && - e?.args?.uri !== null && - e?.args?.totalUnits !== null && - typeof e.address === "string" && - isAddress(e.address) && - typeof e.blockNumber === "bigint" && - e.transactionHash !== null - ); -} diff --git a/src/storage/getContractEventsForChain.ts b/src/storage/getContractEventsForChain.ts index c45fd87..43f4f34 100644 --- a/src/storage/getContractEventsForChain.ts +++ b/src/storage/getContractEventsForChain.ts @@ -1,57 +1,59 @@ import { supabase } from "@/clients/supabaseClient"; import * as console from "console"; -import { EventToFetch } from "@/types/types"; +import { chainId } from "@/utils/constants"; export type ContractEvents = { eventName: string; - chainId: number; }; export const getContractEventsForChain = async ({ - chainId, eventName, }: ContractEvents) => { - if (!chainId || !Number.isInteger(chainId)) { - console.error(`[GetContractEvents] Invalid chain ID: ${chainId}`); - return; - } + try { + const { data } = await supabase + .from("contract_events") + .select( + "contract:contracts!inner(id,contract_address,start_block),event:events!inner(id,name,abi),last_block_indexed", + ) + .eq("contracts.chain_id", chainId) + .eq("events.name", eventName) + .throwOnError(); + + if (!data) { + console.debug( + `[GetContractEvents] No contract events found for ${eventName} on chain ${chainId}`, + ); + return; + } - const { data, error } = await supabase - .from("contract_events") - .select( - "contract:contracts!inner(id,contract_address),event:events!inner(id,name,abi),last_block_indexed", - ) - .eq("contracts.chain_id", chainId) - .eq("events.name", eventName); + console.debug( + `[GetContractEvents] Found ${data.length} contract events for ${eventName} on chain ${chainId}`, + ); - if (!data || error) { + return data.map((contractEvent) => ({ + // @ts-expect-error incorrect typing as array + contracts_id: contractEvent.contract.id, + // @ts-expect-error incorrect typing as array + contract_address: contractEvent.contract.contract_address, + // @ts-expect-error incorrect typing as array + events_id: contractEvent.event.id, + // @ts-expect-error incorrect typing as array + event_name: contractEvent.event.name, + // @ts-expect-error incorrect typing as array + abi: contractEvent.event.abi, + last_block_indexed: contractEvent.last_block_indexed + ? BigInt(contractEvent.last_block_indexed) + : // @ts-expect-error incorrect typing as array + contractEvent.contract.start_block + ? // @ts-expect-error incorrect typing as array + BigInt(contractEvent.contract.start_block) + : 0n, + })); + } catch (error) { console.error( `[GetContractEvents] Error while fetching supported contracts for ${eventName} on chain ${chainId}`, error, ); - return; + throw error; } - - console.debug( - `[GetContractEvents] Found ${data.length} contract events for ${eventName} on chain ${chainId}`, - ); - - return data.map( - (contractEvent) => - ({ - // @ts-expect-error incorrect typing as array - contracts_id: contractEvent.contract.id, - // @ts-expect-error incorrect typing as array - contract_address: contractEvent.contract.contract_address, - // @ts-expect-error incorrect typing as array - events_id: contractEvent.event.id, - // @ts-expect-error incorrect typing as array - event_name: contractEvent.event.name, - // @ts-expect-error incorrect typing as array - abi: contractEvent.event.abi, - last_block_indexed: contractEvent.last_block_indexed - ? BigInt(contractEvent.last_block_indexed) - : null, - }) as EventToFetch, - ); }; diff --git a/src/storage/storeClaim.ts b/src/storage/storeClaim.ts index 8251af0..52e601d 100644 --- a/src/storage/storeClaim.ts +++ b/src/storage/storeClaim.ts @@ -1,73 +1,75 @@ import { supabase } from "@/clients/supabaseClient"; -import { NewClaim } from "@/types/types"; import { isAddress } from "viem"; import { z } from "zod"; -/* - This function stores the chain, contract address, token ID, metadata and URI of a hypercert in the database. +const ClaimSchema = z.object({ + contracts_id: z.string(), + creator_address: z + .string() + .refine(isAddress, { message: "Invalid creator address" }), + owner_address: z + .string() + .refine(isAddress, { message: "Invalid owner address" }), + token_id: z.bigint(), + block_number: z.bigint(), + units: z.bigint(), + uri: z.string(), +}); - @param claim The claim to store. - @returns The stored data. +export type Claim = z.infer; - @example - ```js - - const metadata: HypercertMetadata = { - name: "My Hypercert", - description: "This is a Hypercert", - image: "data:image/png;base64,iVBOA...uQmCC', - external_url: "https://example.com/hypercert/1", - hypercert: {...} - }; - const cid = "QmXZj9Pm4g7Hv3Z6K4Vw2vW"; - - const storedData = await storeHypercert("0x1234...5678", 1n, metadata, cid); - ``` - */ - -interface StoreClaim { - claims?: NewClaim[]; +interface StoreClaimInput { + claims: Claim[]; } -export const storeClaim = async ({ claims }: StoreClaim) => { - if (!claims) { - console.debug("[StoreClaim] No data or contract provided"); - return; - } - - const validationSchema = z - .object({ - contracts_id: z.string().uuid(), - creator_address: z.string(), - token_id: z.bigint(), - block_number: z.bigint(), - units: z.bigint(), - uri: z.string(), - }) - .refine( - (x) => isAddress(x.creator_address), - `[StoreClaim] Invalid creator address`, - ); +/** + * Stores the provided claims in the database. + * + * This function takes an object containing an array of claims as input. Each claim is an object that must match the ClaimSchema. + * The function maps over the array of claims, parses each claim using the ClaimSchema, and adds a `value` property with a value of 1 to each claim. + * The function then stores the claims in the "claims" table in the database using the Supabase client's `upsert` method. + * If the "contracts_id" and "token_id" of a claim already exist in the table, the existing row is updated with the new claim data. + * If an error occurs while storing the claims, the error is logged and rethrown. + * + * @param {StoreClaimInput} { claims } - An object containing an array of claims to store. Each claim must match the ClaimSchema. + * + * @throws {Error} If an error occurs while storing the claims, the error is logged and rethrown. + * + * @example + * ```typescript + * const claims: Claim[] = [ + * { + * contracts_id: '0x1234...5678', + * creator_address: '0x1234...5678', + * token_id: BigInt('12345678901234567890'), + * block_number: BigInt('12345678'), + * units: BigInt('1000'), + * uri: 'ipfs://Qm...', + * }, + * // More claims... + * ]; + * + * await storeClaim({ claims }); + * ``` + * */ +export const storeClaim = async ({ claims }: StoreClaimInput) => { + const _claims = claims.map((claim) => ({ + ...ClaimSchema.parse(claim), + value: 1, + })); - const _claims = claims - .map((claim) => validationSchema.parse(claim)) - .map((claim) => ({ - owner_address: claim.creator_address, - contracts_id: claim.contracts_id, - token_id: claim.token_id.toString(), - block_number: claim.block_number.toString(), - units: claim.units.toString(), - uri: claim.uri, - value: 1, - })); + try { + console.debug(`[StoreClaim] Storing ${_claims.length} claims`); - console.debug(`[StoreClaim] Storing ${_claims.length} claims`); - - await supabase - .from("claims") - .upsert(_claims, { - onConflict: "contracts_id, token_id", - ignoreDuplicates: false, - }) - .throwOnError(); + await supabase + .from("claims") + .upsert(_claims, { + onConflict: "contracts_id, token_id", + ignoreDuplicates: false, + }) + .throwOnError(); + } catch (error) { + console.error("[StoreClaim] Error storing claims", error); + throw error; + } }; diff --git a/src/types/database-generated.types.ts b/src/types/database-generated.types.ts index 45ff819..be17eed 100644 --- a/src/types/database-generated.types.ts +++ b/src/types/database-generated.types.ts @@ -129,6 +129,7 @@ export type Database = { Row: { block_number: number | null contracts_id: string + creator_address: string | null hypercert_id: string | null id: string owner_address: string | null @@ -141,6 +142,7 @@ export type Database = { Insert: { block_number?: number | null contracts_id: string + creator_address?: string | null hypercert_id?: string | null id?: string owner_address?: string | null @@ -152,6 +154,7 @@ export type Database = { Update: { block_number?: number | null contracts_id?: string + creator_address?: string | null hypercert_id?: string | null id?: string owner_address?: string | null @@ -545,6 +548,7 @@ export type Database = { Returns: { block_number: number | null contracts_id: string + creator_address: string | null hypercert_id: string | null id: string owner_address: string | null diff --git a/src/types/types.ts b/src/types/types.ts index f21450b..5257c0e 100644 --- a/src/types/types.ts +++ b/src/types/types.ts @@ -12,16 +12,6 @@ export type EventToFetch = { last_block_indexed: bigint; }; -export type NewClaim = { - contracts_id: string; - creator_address: string; - contract_address: string; - token_id: bigint; - block_number: bigint; - units: bigint; - uri: string; -}; - export type LeafClaimed = { contracts_id: string; creator_address: string; diff --git a/src/utils/fetchFromHttpsOrIpfs.ts b/src/utils/fetchFromHttpsOrIpfs.ts index 1f0e125..c321993 100644 --- a/src/utils/fetchFromHttpsOrIpfs.ts +++ b/src/utils/fetchFromHttpsOrIpfs.ts @@ -2,7 +2,7 @@ import { fetchFromHTTPS, fetchFromIPFS } from "@/utils/fetching"; export const fetchFromHttpsOrIpfs = async (uri?: string): Promise => { if (!uri || uri === "ipfs://null" || uri === "ipfs://") { - console.error("[FetchMetadataFromUri] URI is missing"); + console.error("[fetchFromHttpsOrIpfs] URI is missing"); return; } diff --git a/src/utils/getBlocksToFetch.ts b/src/utils/getBlocksToFetch.ts new file mode 100644 index 0000000..9c6f07d --- /dev/null +++ b/src/utils/getBlocksToFetch.ts @@ -0,0 +1,63 @@ +import { client } from "@/clients/evmClient"; + +interface BlocksToFetchInput { + contractCreationBlock: bigint; + fromBlock?: bigint; + batchSize: bigint; +} + +/** + * Calculates the range of blocks to fetch based on the contract creation block, the from block, and the batch size. + * + * This function first gets the latest block number from the Ethereum Virtual Machine (EVM) client. It then calculates the from block and to block for fetching. + * The from block is the greater of the provided from block and the contract creation block. If the from block is not provided, it defaults to the contract creation block. + * The to block is calculated as the from block plus the batch size. However, if this exceeds the latest block number, the to block is set to the latest block number. + * + * @param {BlocksToFetchInput} { contractCreationBlock, fromBlock, batchSize } - An object containing the contract creation block, the from block, and the batch size. + * @param {bigint} contractCreationBlock - The block number when the contract was created. + * @param {bigint} [fromBlock] - The block number to start fetching from. If not provided, it defaults to the contract creation block. + * @param {bigint} batchSize - The number of blocks to fetch in each batch. + * + * @returns {Promise<{ fromBlock: bigint, toBlock: bigint }>} A promise that resolves to an object containing the from block and the to block for fetching. + * + * @example + * ```typescript + * const contractCreationBlock = BigInt(100); + * const fromBlock = BigInt(200); + * const batchSize = BigInt(50); + * + * const { fromBlock, toBlock } = await getBlocksToFetch({ contractCreationBlock, fromBlock, batchSize }); + * console.log(fromBlock, toBlock); + * ``` + * */ +export const getBlocksToFetch = async ({ + contractCreationBlock, + fromBlock, + batchSize, +}: BlocksToFetchInput) => { + try { + const latestBlock = await client.getBlockNumber(); + const _fromBlock = + fromBlock && fromBlock > contractCreationBlock + ? fromBlock + : contractCreationBlock; + const _toBlock = + _fromBlock + batchSize > latestBlock + ? latestBlock + : _fromBlock + batchSize; + + if (_fromBlock > _toBlock) { + throw Error( + `[getBlocksToFetch] from block more recent than to block. [${_fromBlock}, ${_toBlock}]`, + ); + } + + return { fromBlock: _fromBlock, toBlock: _toBlock }; + } catch (error) { + console.error( + `[getBlocksToFetch] Error while fetching latest block number from the EVM client`, + error, + ); + throw error; + } +}; diff --git a/src/utils/getMinterAddressAndStartBlock.ts b/src/utils/getMinterAddressAndStartBlock.ts new file mode 100644 index 0000000..7c6f246 --- /dev/null +++ b/src/utils/getMinterAddressAndStartBlock.ts @@ -0,0 +1,36 @@ +import { getDeployment } from "@/utils/getDeployment"; +import { isAddress } from "viem"; + +/** + * Retrieves the address and start block of the HypercertMinterUUPS from the deployment. + * + * This function calls the `getDeployment` function to get the deployment details, which includes the addresses and start block. + * It then checks if the HypercertMinterUUPS address is available in the addresses. If it's not available, it throws an error. + * If the HypercertMinterUUPS address is available, it returns an object containing the address and the start block. + * + * @returns {object} An object containing the address of the HypercertMinterUUPS and the start block. + * @property {string} address - The address of the HypercertMinterUUPS. + * @property {number} startBlock - The start block of the deployment. + * + * @throws {Error} If the HypercertMinterUUPS address is not available in the addresses or invalid, it throws an error with the message "[getMinterAddressAndStartBlock] HypercertMinterUUPS is not available". + * + * @example + * ```typescript + * const { address, startBlock } = getMinterAddressAndStartBlock(); + * console.log(address, startBlock); + * ``` + * */ +export const getMinterAddressAndStartBlock = () => { + const { addresses, startBlock } = getDeployment(); + + if ( + !addresses?.HypercertMinterUUPS || + !isAddress(addresses.HypercertMinterUUPS) + ) { + throw Error( + "[getMinterAddressAndStartBlock] HypercertMinterUUPS is not available", + ); + } + + return { address: addresses.HypercertMinterUUPS, startBlock }; +}; diff --git a/supabase/migrations/20240531091103_update_claims_with_owner_and_creator.sql b/supabase/migrations/20240531091103_update_claims_with_owner_and_creator.sql new file mode 100644 index 0000000..d9d2405 --- /dev/null +++ b/supabase/migrations/20240531091103_update_claims_with_owner_and_creator.sql @@ -0,0 +1,2 @@ +alter table claims + add column creator_address text; \ No newline at end of file diff --git a/test/fetching/fetchAllowlistFromUri.test.ts b/test/fetching/fetchAllowlistFromUri.test.ts new file mode 100644 index 0000000..e2d266b --- /dev/null +++ b/test/fetching/fetchAllowlistFromUri.test.ts @@ -0,0 +1,56 @@ +import { + fetchAllowListFromUri, + FetchAllowListFromUriInput, +} from "../../src/fetching/fetchAllowlistFromUri"; +import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; +import { vi, describe, beforeEach, afterAll, it, expect } from "vitest"; +import { generateAllowList } from "../helpers/factories"; + +const mocks = vi.hoisted(() => { + return { + fetchUri: vi.fn(), + }; +}); + +vi.mock("../../src/utils/fetchFromHttpsOrIpfs", () => ({ + fetchFromHttpsOrIpfs: mocks.fetchUri, +})); + +describe("fetchAllowListFromUri", () => { + const uri = "https://example.com/allowlist.json"; + const allowList = generateAllowList(); + + beforeEach(() => { + mocks.fetchUri.mockReturnValue(allowList.dump()); + }); + + afterAll(() => { + vi.clearAllMocks(); + }); + + it("returns undefined when uri is not provided", async () => { + // fetchFromHttpsOrIpfs will return undefined + mocks.fetchUri.mockResolvedValue(undefined); + const result = await fetchAllowListFromUri( + {} as FetchAllowListFromUriInput, + ); + expect(result).toBeUndefined(); + }); + + it("returns a StandardMerkleTree when uri is provided for a merkle tree dump", async () => { + const result = await fetchAllowListFromUri({ uri }); + expect(result).toBeInstanceOf(StandardMerkleTree); + }); + + it("returns a StandardMerkleTree when uri is provided for a merkle tree dump stringified", async () => { + mocks.fetchUri.mockReturnValue(JSON.stringify(allowList.dump())); + const result = await fetchAllowListFromUri({ uri }); + expect(result).toBeInstanceOf(StandardMerkleTree); + }); + + it("throws returns undefined when uri is provided for a non-merkle tree dump", async () => { + mocks.fetchUri.mockReturnValue("invalid"); + const result = await fetchAllowListFromUri({ uri }); + expect(result).toBeUndefined(); + }); +}); diff --git a/test/fetching/fetchAttestationData.test.ts b/test/fetching/fetchAttestationData.test.ts index efe186e..2f5d22d 100644 --- a/test/fetching/fetchAttestationData.test.ts +++ b/test/fetching/fetchAttestationData.test.ts @@ -8,13 +8,13 @@ import { faker } from "@faker-js/faker"; import sinon from "sinon"; import { getAddress } from "viem"; import { ParsedAttestedEvent } from "../../src/parsing/attestedEvent"; -import { getMockEasAttestation } from "../resources/mockAttestations"; +import { generateEasAttestation } from "../helpers/factories"; describe("fetchAttestationData", () => { const readSpy = sinon.stub(client, "readContract"); let attestedEvent = {} as ParsedAttestedEvent; - const mockAttestationData = getMockEasAttestation(); + const mockAttestationData = generateEasAttestation(); beforeEach(() => { attestedEvent = { diff --git a/test/helpers/data.ts b/test/helpers/data.ts new file mode 100644 index 0000000..c1ec93e --- /dev/null +++ b/test/helpers/data.ts @@ -0,0 +1,49 @@ +import { Hex } from "viem"; + +export const eventToFetch = { + contract_address: + "0xa16DFb32Eb140a6f3F2AC68f41dAd8c7e83C4941" as `0x${string}`, + eventName: "ClaimStored", + abi: "event ClaimStored(uint256 indexed claimID, string uri, uint256 totalUnits)", +}; + +export const easAttestation = { + schema: + "uint40 chain_id,address contract_address,string token_id,uint8 evaluate_basic,uint8 evaluate_work,uint8 evaluate_contributors,uint8 evaluate_properties,string comments,string[] tags", + data: "0x0000000000000000000000000000000000000000000000000000000000aa36a7000000000000000000000000a16dfb32eb140a6f3f2ac68f41dad8c7e83c494100000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000000000000000000000000000000000000000002a3134363332313431373737363030333533393238393235313038313139353636303333303932363038300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000104a757374206576616c756174696e672e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000000573616c61640000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005737465616b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005736175636500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000076b65746368757000000000000000000000000000000000000000000000000000" as Hex, +}; + +export const claimStoredEventFilter = { + abi: [{ name: "ClaimStored", type: "event", inputs: [Array] }], + args: undefined, + eventName: "ClaimStored", + fromBlock: 4431944n, + id: "0xc6a56f35ecdd5dcca4f867e88b8e072c" as Hex, + request: [async () => {}], + strict: false, + toBlock: 4441944n, + type: "event", +}; + +export const claimStoredEventLog = { + eventName: "ClaimStored", + args: { + claimID: 1361129467683753853853498429727072845824n, + uri: "bafybeicmwsk2nhzuk4j4hrhem3egdqzzgqx2cxmd7xcmkwexku2vn3n2ie", + totalUnits: 10000n, + }, + address: "0xa16dfb32eb140a6f3f2ac68f41dad8c7e83c4941", + blockHash: + "0xf4a72213b792478e9486eb15a6fe168d0df1e6bfec0d5e7fc3b6619aa2cedf6b", + blockNumber: 4457859n, + data: "0x00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000002710000000000000000000000000000000000000000000000000000000000000003b62616679626569636d77736b326e687a756b346a34687268656d33656764717a7a6771783263786d643778636d6b7765786b7532766e336e3269650000000000", + logIndex: 20, + removed: false, + topics: [ + "0x19789dde2d660b739733377add05f87576a360d2d4adea587c2592264f7f6d4e", + "0x0000000000000000000000000000000400000000000000000000000000000000", + ], + transactionHash: + "0x86e0ca5998a540a8de4e857fbe561e4a0ff92b77b5c0bd2bc0e82c3ddc6a0238", + transactionIndex: 16, +}; diff --git a/test/helpers/factories.ts b/test/helpers/factories.ts new file mode 100644 index 0000000..9b22e27 --- /dev/null +++ b/test/helpers/factories.ts @@ -0,0 +1,117 @@ +import { faker } from "@faker-js/faker"; +import { ClaimStoredEvent } from "../../src/parsing/claimStoredEvent"; +import { getAddress, Hex } from "viem"; +import { EasAttestation } from "../../src/fetching/fetchAttestationData"; +import { SchemaRecord } from "../../src/fetching/fetchSchemaData"; +import { Tables } from "../../src/types/database.types"; +import { ParsedAttestedEvent } from "../../src/parsing/attestedEvent"; +import { easAttestation } from "./data"; +import { Claim } from "../../src/storage/storeClaim"; +import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; +import { EventToFetch } from "../../src/types/types"; + +export const generateClaim = (overrides?: Partial): Claim => { + return { + contracts_id: faker.string.uuid(), + creator_address: getAddress(faker.finance.ethereumAddress()), + owner_address: getAddress(faker.finance.ethereumAddress()), + token_id: faker.number.bigInt(), + block_number: faker.number.bigInt(), + units: faker.number.bigInt(), + uri: faker.internet.url(), + ...overrides, + }; +}; + +export const generateClaimStoredEvent = ( + overrides?: Partial, +): ClaimStoredEvent => { + return { + address: getAddress(faker.finance.ethereumAddress()), + blockNumber: faker.number.bigInt(), + transactionHash: faker.string.hexadecimal({ length: 64 }) as Hex, + args: { + uri: faker.internet.url(), + claimID: faker.number.bigInt(), + totalUnits: faker.number.bigInt(), + }, + ...overrides, + }; +}; + +export const generateEasAttestation = (overrides?: Partial) => { + return { + uid: faker.string.hexadecimal({ length: 66 }) as Hex, + schema: faker.string.hexadecimal({ length: 66 }) as Hex, + refUID: faker.string.hexadecimal({ length: 66 }) as Hex, + time: faker.number.bigInt(), + expirationTime: faker.number.bigInt(), + revocationTime: faker.number.bigInt(), + recipient: getAddress(faker.finance.ethereumAddress()), + revocable: true, + attester: getAddress(faker.finance.ethereumAddress()), + data: easAttestation.data, + ...overrides, + }; +}; + +export const generateEasSchemaRecord = (overrides?: Partial) => { + return { + uid: faker.string.hexadecimal({ length: 66 }) as Hex, + schema: easAttestation.schema, + resolver: getAddress(faker.finance.ethereumAddress()), + revocable: true, + ...overrides, + }; +}; + +export const generateParsedAttestedEvent = ( + overrides?: Partial, +) => { + return { + attester: getAddress(faker.finance.ethereumAddress()), + recipient: getAddress(faker.finance.ethereumAddress()), + uid: faker.string.hexadecimal({ length: 66 }) as Hex, + block_timestamp: faker.number.bigInt(), + ...overrides, + }; +}; + +export const generateSupportedSchema = ( + overrides?: Partial>, +) => { + return { + id: faker.string.hexadecimal({ length: 66 }) as Hex, + schema: easAttestation.schema, + ...overrides, + }; +}; + +interface GenerateAllowListOverrides { + entries?: number; +} + +export const generateAllowList = (overrides?: GenerateAllowListOverrides) => { + const entries = overrides?.entries ?? 42; + + const allowListEntries = Array.from({ length: entries }, () => [ + faker.finance.ethereumAddress(), + faker.number.bigInt(), + ]); + + const tree = StandardMerkleTree.of(allowListEntries, ["address", "uint256"]); + + return tree; +}; + +export const generateEventToFetch = (overrides?: Partial) => { + return { + contracts_id: faker.string.uuid(), + contract_address: getAddress(faker.finance.ethereumAddress()), + events_id: faker.string.uuid(), + event_name: faker.hacker.noun(), + abi: faker.random.words(), + last_block_indexed: faker.number.bigInt(), + ...overrides, + }; +}; diff --git a/test/monitoring/eas.test.ts b/test/monitoring/eas.test.ts index bffc00b..2cffd34 100644 --- a/test/monitoring/eas.test.ts +++ b/test/monitoring/eas.test.ts @@ -29,14 +29,16 @@ describe("getAttestationsForSchema", () => { sinon.restore(); }); - it("returns undefined when EAS address is not available", async () => { + it("throws when EAS address is not available", async () => { mocks.getDeployment.mockReturnValue({ startBlock: 0n, easAddress: null }); - const result = await getAttestationsForSchema({ - schema: { uid: "0x123" }, - batchSize: 100n, - }); - expect(result).toBeUndefined(); + await expect( + async () => + await getAttestationsForSchema({ + schema: { uid: "0x123" }, + batchSize: 100n, + }), + ).rejects.toThrowError(); }); it("returns undefined when EAS address is not valid", async () => { @@ -45,11 +47,13 @@ describe("getAttestationsForSchema", () => { easAddress: "not an address", }); - const result = await getAttestationsForSchema({ - schema: { uid: "0x123" }, - batchSize: 100n, - }); - expect(result).toBeUndefined(); + await expect( + async () => + await getAttestationsForSchema({ + schema: { uid: "0x123" }, + batchSize: 100n, + }), + ).rejects.toThrowError(); }); it("returns logs when all parameters are valid", async () => { @@ -84,17 +88,18 @@ describe("getAttestationsForSchema", () => { getBlockNumberSpy.throws(); - const result = await getAttestationsForSchema({ - schema: { - uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", - }, - batchSize: 100n, - }); - - expect(result).toBeUndefined(); + await expect( + async () => + await getAttestationsForSchema({ + schema: { + uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + }, + batchSize: 100n, + }), + ).rejects.toThrowError(); }); - it("returns undefined when event filter cannot be created", async () => { + it("throws when event filter cannot be created", async () => { mocks.getDeployment.mockReturnValue({ startBlock: 5957292n, easAddress: "0xc2679fbd37d54388ce493f1db75320d236e1815e", @@ -103,17 +108,18 @@ describe("getAttestationsForSchema", () => { getBlockNumberSpy.resolves(5957292n); createEventFilterSpy.throws(); - const result = await getAttestationsForSchema({ - schema: { - uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", - }, - batchSize: 100n, - }); - - expect(result).toBeUndefined(); + await expect( + async () => + await getAttestationsForSchema({ + schema: { + uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + }, + batchSize: 100n, + }), + ).rejects.toThrowError(); }); - it("returns undefined when logs cannot be fetched", async () => { + it("throws when logs cannot be fetched", async () => { mocks.getDeployment.mockReturnValue({ startBlock: 5957292n, easAddress: "0xc2679fbd37d54388ce493f1db75320d236e1815e", @@ -123,13 +129,14 @@ describe("getAttestationsForSchema", () => { createEventFilterSpy.resolves(mockFilter); getFilterLogsSpy.throws(); - const result = await getAttestationsForSchema({ - schema: { - uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", - }, - batchSize: 100n, - }); - - expect(result).toBeUndefined(); + await expect( + async () => + await getAttestationsForSchema({ + schema: { + uid: "0x3c0d0488e4d50455ef511f2c518403d21d35aa671ca30644aa9f7f7bb2516e2f", + }, + batchSize: 100n, + }), + ).rejects.toThrowError(); }); }); diff --git a/test/monitoring/hypercerts.test.ts b/test/monitoring/hypercerts.test.ts new file mode 100644 index 0000000..d7358d8 --- /dev/null +++ b/test/monitoring/hypercerts.test.ts @@ -0,0 +1,170 @@ +import { getLogsForContractEvents } from "@/monitoring/hypercerts"; +import { client } from "@/clients/evmClient"; +import { + afterAll, + afterEach, + beforeEach, + describe, + expect, + it, + vi, +} from "vitest"; +import sinon from "sinon"; +import { generateEventToFetch } from "../helpers/factories"; +import { claimStoredEventFilter, claimStoredEventLog } from "../helpers/data"; + +const mocks = vi.hoisted(() => { + return { + getDeployment: vi.fn(), + }; +}); + +vi.mock("../../src/utils/getDeployment", () => ({ + getDeployment: mocks.getDeployment, +})); + +describe("getLogsForContractEvents", () => { + const getBlockNumberSpy = sinon.stub(client, "getBlockNumber"); + const createEventFilterSpy = sinon.stub(client, "createEventFilter"); + const getFilterLogsSpy = sinon.stub(client, "getFilterLogs"); + const eventToFetch = generateEventToFetch(); + + beforeEach(() => { + mocks.getDeployment.mockReturnValue({ + addresses: { + HypercertMinterUUPS: "0xa16dfb32eb140a6f3f2ac68f41dad8c7e83c4941", + }, + startBlock: 0n, + }); + }); + + afterEach(() => { + vi.resetAllMocks(); + sinon.reset(); + }); + + afterAll(() => { + vi.clearAllMocks(); + sinon.restore(); + }); + + it("throws when minter address is not available", async () => { + // getMinterAddress will throw when getDeployment returns null + mocks.getDeployment.mockReturnValue({ addresses: null, startBlock: 0n }); + + await expect( + async () => + await getLogsForContractEvents({ + batchSize: 100n, + fromBlock: 100n, + contractEvent: eventToFetch, + }), + ).rejects.toThrowError( + "[getMinterAddressAndStartBlock] HypercertMinterUUPS is not available", + ); + }); + + it("throws when block number is not available", async () => { + // getBlocksToFetch will throw when client cannot get block number + getBlockNumberSpy.throws(); + + await expect( + async () => + await getLogsForContractEvents({ + batchSize: 100n, + fromBlock: 100n, + contractEvent: eventToFetch, + }), + ).rejects.toThrowError(); + }); + + it("throws when event filter cannot be created", async () => { + // createEventFilter will throw when client cannot create event filter + getBlockNumberSpy.resolves(100n); + createEventFilterSpy.throws(); + + await expect( + async () => + await getLogsForContractEvents({ + batchSize: 100n, + fromBlock: 100n, + contractEvent: eventToFetch, + }), + ).rejects.toThrowError(); + }); + + it("throws when ABI cannot be parsed", async () => { + // parseAbiItem will throw when ABI cannot be parsed + getBlockNumberSpy.resolves(100n); + + await expect( + async () => + await getLogsForContractEvents({ + batchSize: 100n, + fromBlock: 100n, + contractEvent: { ...eventToFetch, abi: "" }, + }), + ).rejects.toThrowError(); + }); + + it("returns logs when all parameters are valid", async () => { + mocks.getDeployment.mockReturnValue({ + startBlock: 5957292n, + addresses: { + HypercertMinterUUPS: "0xa16DFb32Eb140a6f3F2AC68f41dAd8c7e83C4941", + }, + }); + + getBlockNumberSpy.resolves(5957292n); + //@ts-expect-error createEventFilterSpy is a Sinon spy + createEventFilterSpy.resolves(claimStoredEventFilter); + //@ts-expect-error getFilterLogsSpy is a Sinon spy + getFilterLogsSpy.resolves(claimStoredEventLog); + + const result = await getLogsForContractEvents({ + batchSize: 100n, + fromBlock: 100n, + contractEvent: { + ...eventToFetch, + contract_address: + "0xa16DFb32Eb140a6f3F2AC68f41dAd8c7e83C4941" as `0x${string}`, + event_name: "ClaimStored", + abi: "event ClaimStored(uint256 indexed claimID, string uri, uint256 totalUnits)", + }, + }); + + expect(result).toEqual({ + logs: claimStoredEventLog, + fromBlock: 5957292n, + toBlock: 5957292n, + }); + }); + + it("throws when logs cannot be fetched", async () => { + mocks.getDeployment.mockReturnValue({ + startBlock: 5957292n, + addresses: { + HypercertMinterUUPS: "0xa16DFb32Eb140a6f3F2AC68f41dAd8c7e83C4941", + }, + }); + + getBlockNumberSpy.resolves(5957292n); + //@ts-expect-error createEventFilterSpy is a Sinon spy + createEventFilterSpy.resolves(claimStoredEventFilter); + getFilterLogsSpy.throws(); + + await expect(async () => { + await getLogsForContractEvents({ + batchSize: 100n, + fromBlock: 100n, + contractEvent: { + ...eventToFetch, + contract_address: + "0xa16DFb32Eb140a6f3F2AC68f41dAd8c7e83C4941" as `0x${string}`, + event_name: "ClaimStored", + abi: "event ClaimStored(uint256 indexed claimID, string uri, uint256 totalUnits)", + }, + }); + }).rejects.toThrowError(); + }); +}); diff --git a/test/parsing/attestationData.test.ts b/test/parsing/attestationData.test.ts index 9f4a77d..4d9f520 100644 --- a/test/parsing/attestationData.test.ts +++ b/test/parsing/attestationData.test.ts @@ -3,10 +3,14 @@ import { decodeAttestationData } from "../../src/parsing/attestationData"; import { ParsedAttestedEvent } from "@/parsing/attestedEvent"; import { Tables } from "@/types/database.types"; import { EasAttestation } from "../../src/fetching/fetchAttestationData"; -import { getMockEasAttestation } from "../resources/mockAttestations"; import { faker } from "@faker-js/faker"; import { Address, getAddress } from "viem"; import { chainId } from "../../src/utils/constants"; +import { + generateEasAttestation, + generateParsedAttestedEvent, + generateSupportedSchema, +} from "../helpers/factories"; describe("decodeAttestationData", () => { let attester: Address; @@ -19,23 +23,17 @@ describe("decodeAttestationData", () => { attester = getAddress(faker.finance.ethereumAddress()); recipient = getAddress(faker.finance.ethereumAddress()); - event = { + event = generateParsedAttestedEvent({ attester, recipient, - uid: faker.string.hexadecimal({ length: 6 }), - block_timestamp: BigInt(Math.round(faker.date.recent().getTime() / 1000)), - }; + }); - attestation = getMockEasAttestation({ + attestation = generateEasAttestation({ attester, recipient, - data: "0x0000000000000000000000000000000000000000000000000000000000aa36a7000000000000000000000000a16dfb32eb140a6f3f2ac68f41dad8c7e83c494100000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000000000000000000000000000000000000000002a3134363332313431373737363030333533393238393235313038313139353636303333303932363038300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000104a757374206576616c756174696e672e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000000573616c61640000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005737465616b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005736175636500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000076b65746368757000000000000000000000000000000000000000000000000000", }); - schema = { - schema: `uint40 chain_id,address contract_address,string token_id,uint8 evaluate_basic,uint8 evaluate_work,uint8 evaluate_contributors,uint8 evaluate_properties,string comments,string[] tags`, - id: faker.string.hexadecimal({ length: 66, casing: "lower" }), - }; + schema = generateSupportedSchema(); }); it("returns undefined when schema is incomplete", () => { diff --git a/test/parsing/claimStoredEvent.test.ts b/test/parsing/claimStoredEvent.test.ts index fb3a56a..12ff806 100644 --- a/test/parsing/claimStoredEvent.test.ts +++ b/test/parsing/claimStoredEvent.test.ts @@ -1,86 +1,50 @@ import { describe, expect, it, vi } from "vitest"; import { parseClaimStoredEvent } from "../../src/parsing"; import { faker } from "@faker-js/faker"; -import { server } from "../setup-env"; -import { http, HttpResponse } from "msw"; import { client } from "../../src/clients/evmClient"; - -import { alchemyUrl } from "../resources/alchemyUrl"; +import { getAddress, GetTransactionReturnType } from "viem"; +import { generateClaimStoredEvent } from "../helpers/factories"; describe("claimStoredEvent", {}, () => { it("parses a claim stored event", {}, async () => { - server.use( - http.post(`${alchemyUrl}/*`, () => { - return HttpResponse.json(0); - }), - ); - const address = faker.finance.ethereumAddress() as `0x${string}`; - const uri = faker.internet.url(); - const claimID = faker.number.bigInt(); - const totalUnits = faker.number.bigInt(); - const blockNumber = faker.number.bigInt(); - const event = { - event: "ClaimStored", - address, - blockNumber, - transactionHash: "0x3e7d7e4c4f3d5a7f2b3d6c5", - args: { - uri, - claimID, - totalUnits, - }, - }; + const mockEvent = generateClaimStoredEvent(); + + const from = getAddress(faker.finance.ethereumAddress()); + const owner = getAddress(faker.finance.ethereumAddress()); + + vi.spyOn(client, "getTransaction").mockResolvedValue({ + from, + } as GetTransactionReturnType); - const from = faker.finance.ethereumAddress(); - vi.spyOn(client, "getTransaction").mockImplementation( - async () => - ({ - from, - }) as any, - ); + vi.spyOn(client, "readContract").mockResolvedValue(owner); - const parsed = await parseClaimStoredEvent(event); + const parsed = await parseClaimStoredEvent(mockEvent); expect(parsed).toEqual({ - contract_address: address, - uri, - units: totalUnits, - block_number: blockNumber, creator_address: from, - token_id: claimID, + owner_address: owner, + uri: mockEvent.args.uri, + units: mockEvent.args.totalUnits, + block_number: mockEvent.blockNumber, + token_id: mockEvent.args.claimID, }); }); - it("returns undefined when the event is missing claimID or URI", async () => { - const address = faker.finance.ethereumAddress(); - const event = { - id: "0x3e7d7e4c4f3d5a7f2b3d6c5", - event: "ClaimStored", - address, - args: { - uri: "https://example.com/claim", - }, - }; + it("throws when the event is shaped incorrectly", async () => { + const wrongArgs = generateClaimStoredEvent(); + // @ts-expect-error - wrong args + wrongArgs.args = "wrong"; - const parsed = await parseClaimStoredEvent(event); - - expect(parsed).toBe(undefined); + await expect( + async () => await parseClaimStoredEvent(wrongArgs), + ).rejects.toThrowError(); }); - it("returns undefined when the event address is invalid", {}, async () => { - const address = "invalid"; - const event = { - id: "0x3e7d7e4c4f3d5a7f2b3d6c5", - event: "ClaimStored", - address, - args: { - uri: "https://example.com/claim", - claimID: "0x3e7d7e4c4f3d5a7f2b3d6c5", - }, - }; - - const parsed = await parseClaimStoredEvent(event); + it("throws when the event address is invalid", {}, async () => { + const mockEvent = generateClaimStoredEvent({ address: "0xinvalid" }); - expect(parsed).toBe(undefined); + await expect( + async () => await parseClaimStoredEvent(mockEvent), + ).rejects.toThrowError(); }); }); diff --git a/test/resources/mockAttestations.ts b/test/resources/mockAttestations.ts index 3237daa..874a663 100644 --- a/test/resources/mockAttestations.ts +++ b/test/resources/mockAttestations.ts @@ -1,22 +1,4 @@ -import { Address, getAddress, Hash, Hex } from "viem"; -import { EasAttestation } from "../../src/fetching/fetchAttestationData"; -import { faker } from "@faker-js/faker"; - -export const getMockEasAttestation = (overrides?: Partial) => { - return { - uid: faker.string.hexadecimal({ length: 32 }) as Hash, - schema: faker.string.hexadecimal({ length: 32 }) as Hash, - refUID: faker.string.hexadecimal({ length: 32 }) as Hash, - time: BigInt(1234), - expirationTime: BigInt(1234), - revocationTime: BigInt(5678), - recipient: getAddress(faker.finance.ethereumAddress()), - revocable: true, - attester: getAddress(faker.finance.ethereumAddress()), - data: faker.string.hexadecimal({ length: 64 }) as Hex, - ...overrides, - }; -}; +import { Address, Hash } from "viem"; export const mockFilter = { abi: [{ name: "Attested", type: "event", inputs: [Array] }], diff --git a/test/storage/storeHypercert.test.ts b/test/storage/storeHypercert.test.ts index 797af66..e40bec2 100644 --- a/test/storage/storeHypercert.test.ts +++ b/test/storage/storeHypercert.test.ts @@ -1,28 +1,17 @@ import { describe, expect, it } from "vitest"; -import { default_contractAddress } from "../handlers"; -import { mockMetadata } from "../resources/mockMetadata"; import { storeClaim } from "../../src/storage/storeClaim"; -import { NewClaim } from "../../src/types/types"; import { server } from "../setup-env"; import { http, HttpResponse } from "msw"; -import { faker } from "@faker-js/faker"; import { supabaseUrl } from "../../src/utils/constants"; +import { generateClaim } from "../helpers/factories"; describe("storeHypercert", {}, async () => { - const claim: NewClaim = { - contracts_id: faker.string.uuid(), - uri: "ipfs://metadataCIDstoreHypercert", - contract_address: default_contractAddress, - creator_address: faker.finance.ethereumAddress(), - token_id: 1n, - units: 1n, - block_number: 1n, - }; + const claim = generateClaim(); it("store hypercert data in DB", {}, async () => { server.use( http.post(`${supabaseUrl}/*`, () => { - return HttpResponse.json([mockMetadata]); + return HttpResponse.json(); }), ); @@ -34,16 +23,16 @@ describe("storeHypercert", {}, async () => { }); it("should throw an error if creator address is invalid", async () => { - const claimWithWrongAddress = { + const wrongAddress = { ...claim, - creator_address: "invalid address", - } as unknown as NewClaim; + creator_address: "0xWRONGADDRESS" as `0x${string}`, + }; await expect( async () => await storeClaim({ - claims: [claimWithWrongAddress], + claims: [wrongAddress], }), - ).rejects.toThrowError("[StoreClaim] Invalid creator address"); + ).rejects.toThrowError(); }); }); diff --git a/test/utils/getBlocksToFetch.test.ts b/test/utils/getBlocksToFetch.test.ts new file mode 100644 index 0000000..278a3f8 --- /dev/null +++ b/test/utils/getBlocksToFetch.test.ts @@ -0,0 +1,60 @@ +import { afterAll, afterEach, describe, it } from "vitest"; +import { client } from "@/clients/evmClient"; +import sinon from "sinon"; +import { getBlocksToFetch } from "../../src/utils/getBlocksToFetch"; + +describe("getBlocksToFetch", () => { + const readSpy = sinon.stub(client, "getBlockNumber"); + + const defaultInput = { + contractCreationBlock: 420n, + fromBlock: 1337n, + batchSize: 100n, + }; + afterEach(() => { + sinon.reset(); + }); + + afterAll(() => { + sinon.restore(); + }); + + it("throws when call for block fails", async ({ expect }) => { + readSpy.throws(); + await expect(() => getBlocksToFetch(defaultInput)).rejects.toThrowError(); + }); + + it("returns correct block range when fromBlock is greater than contractCreationBlock", async ({ + expect, + }) => { + readSpy.resolves(1500n); + const result = await getBlocksToFetch(defaultInput); + expect(result).toEqual({ fromBlock: 1337n, toBlock: 1437n }); + }); + + it("returns correct block range when fromBlock is less than contractCreationBlock", async ({ + expect, + }) => { + readSpy.resolves(1500n); + const input = { ...defaultInput, fromBlock: 400n }; + const result = await getBlocksToFetch(input); + expect(result).toEqual({ fromBlock: 420n, toBlock: 520n }); + }); + + it("returns correct block range when batchSize is greater than remaining blocks", async ({ + expect, + }) => { + readSpy.resolves(1400n); + const result = await getBlocksToFetch(defaultInput); + expect(result).toEqual({ fromBlock: 1337n, toBlock: 1400n }); + }); + + it("throws when fromBlock is more recent than toBlock", async ({ + expect, + }) => { + readSpy.resolves(1300n); + await expect(() => getBlocksToFetch(defaultInput)).rejects.toThrowError( + "[getBlocksToFetch] from block more recent than to block. [1337, 1300]", + ); + }); +}); diff --git a/test/utils/getMinterAddressAndStartBlock.test.ts b/test/utils/getMinterAddressAndStartBlock.test.ts new file mode 100644 index 0000000..feabef0 --- /dev/null +++ b/test/utils/getMinterAddressAndStartBlock.test.ts @@ -0,0 +1,59 @@ +import { afterAll, afterEach, describe, expect, it, vi } from "vitest"; +import { getMinterAddressAndStartBlock } from "../../src/utils/getMinterAddressAndStartBlock"; +import { getAddress } from "viem"; +import { faker } from "@faker-js/faker"; + +const mocks = vi.hoisted(() => { + return { + getDeployment: vi.fn(), + }; +}); + +vi.mock("../../src/utils/getDeployment", () => ({ + getDeployment: mocks.getDeployment, +})); + +describe("getMinterAddressAndStartBlock", () => { + const minterAddress = getAddress(faker.finance.ethereumAddress()); + afterEach(() => { + vi.clearAllMocks(); + }); + + afterAll(() => { + vi.clearAllMocks(); + }); + + it("returns minter address and startblock for supported chain ID", () => { + mocks.getDeployment.mockReturnValue({ + addresses: { HypercertMinterUUPS: minterAddress }, + startBlock: 12345n, + }); + + const res = getMinterAddressAndStartBlock(); + + expect(res.address).toEqual(minterAddress); + expect(res.startBlock).toEqual(12345n); + }); + + it("throws error when address not found", () => { + mocks.getDeployment.mockReturnValue({ + addresses: { HypercertMinterUUPS: null }, + startBlock: 12345n, + }); + + expect(() => getMinterAddressAndStartBlock()).toThrowError( + "[getMinterAddressAndStartBlock] HypercertMinterUUPS is not available", + ); + }); + + it("throws error when address is not valid", () => { + mocks.getDeployment.mockReturnValue({ + addresses: { HypercertMinterUUPS: "not an address" }, + startBlock: 12345n, + }); + + expect(() => getMinterAddressAndStartBlock()).toThrowError( + "[getMinterAddressAndStartBlock] HypercertMinterUUPS is not available", + ); + }); +}); From 99c9bde3e26e0a177e882f1d411d4817de4cba21 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Fri, 31 May 2024 17:43:16 +0200 Subject: [PATCH 10/12] chore(cleanup): remove heartbeart smoke test --- test/server.test.ts | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 test/server.test.ts diff --git a/test/server.test.ts b/test/server.test.ts deleted file mode 100644 index 452887f..0000000 --- a/test/server.test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { describe, it } from "vitest"; -import { app as server } from "../src/server"; -import request from "supertest"; - -describe("server", () => { - it("GET /heartbeat", async () => { - await request(server).get("/heartbeat").expect(200).expect("OK"); - }); -}); From 3d59233af312ccf4e93b0324fc018a3720e7fad3 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Tue, 4 Jun 2024 19:35:46 +0200 Subject: [PATCH 11/12] feat(checks): cleanups and last tests --- package.json | 2 +- pnpm-lock.yaml | 21 +++- src/clients/evmClient.ts | 3 +- src/fetching/fetchMetadataFromUri.ts | 4 +- src/indexer/indexAllowlistCreated.ts | 2 - .../indexAllowlistSingleClaimMinted.ts | 2 - src/indexer/indexFractionTransfers.ts | 1 - src/indexer/indexUnitTransfers.ts | 1 - src/monitoring/hypercerts.ts | 2 +- src/parsing/claimStoredEvent.ts | 3 +- src/parsing/leafClaimedEvent.ts | 54 ++------- src/parsing/transferSingleEvent.ts | 62 +++------- src/parsing/valueTransferEvent.ts | 56 ++------- src/server.ts | 1 - test/parsing/leafClaimedEvent.test.ts | 29 ++--- test/parsing/transferSingleEvent.test.ts | 113 ++++++++---------- test/parsing/valueTransferEvent.test.ts | 93 +++++++------- vitest.config.ts | 1 + 18 files changed, 183 insertions(+), 267 deletions(-) diff --git a/package.json b/package.json index 4be80c6..6b42a63 100644 --- a/package.json +++ b/package.json @@ -69,7 +69,7 @@ "lodash": "^4.17.21", "luxon": "^3.4.4", "type-fest": "^4.10.3", - "viem": "^2.9.19", + "viem": "^2.13.5", "zod": "^3.23.6" }, "packageManager": "pnpm@9.0.6" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c09f33e..4d81003 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -51,8 +51,8 @@ importers: specifier: ^4.10.3 version: 4.10.3 viem: - specifier: ^2.9.19 - version: 2.9.19(typescript@5.3.3)(zod@3.23.6) + specifier: ^2.13.5 + version: 2.13.5(typescript@5.3.3)(zod@3.23.6) zod: specifier: ^3.23.6 version: 3.23.6 @@ -2511,6 +2511,11 @@ packages: peerDependencies: ws: '*' + isows@1.0.4: + resolution: {integrity: sha512-hEzjY+x9u9hPmBom9IIAqdJCwNLax+xrPb51vEPpERoFlIxgmZcHzsT5jKG06nvInKOBGvReAVz80Umed5CczQ==} + peerDependencies: + ws: '*' + istanbul-lib-coverage@3.2.2: resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} engines: {node: '>=8'} @@ -3593,8 +3598,8 @@ packages: typescript: optional: true - viem@2.9.19: - resolution: {integrity: sha512-1txsVoTz9+XGQpuN62wcDXasNtalW52UR41KnzwWTwHtV2cDcGuVuS/j/hcuQdZ7pU8X8jtq2IrwwR4jjKpy9Q==} + viem@2.13.5: + resolution: {integrity: sha512-jXkFh52GwBLJJE/OWkPZdXwiO3zpgTL7ebhpMcQkaa7F7xEhYeGBjqs26zn6rjlUYP43JjoAf/VnpmbYTQvR6Q==} peerDependencies: typescript: '>=5.0.4' peerDependenciesMeta: @@ -6811,6 +6816,10 @@ snapshots: dependencies: ws: 8.13.0 + isows@1.0.4(ws@8.13.0): + dependencies: + ws: 8.13.0 + istanbul-lib-coverage@3.2.2: {} istanbul-lib-report@3.0.1: @@ -7892,7 +7901,7 @@ snapshots: - utf-8-validate - zod - viem@2.9.19(typescript@5.3.3)(zod@3.23.6): + viem@2.13.5(typescript@5.3.3)(zod@3.23.6): dependencies: '@adraffy/ens-normalize': 1.10.0 '@noble/curves': 1.2.0 @@ -7900,7 +7909,7 @@ snapshots: '@scure/bip32': 1.3.2 '@scure/bip39': 1.2.1 abitype: 1.0.0(typescript@5.3.3)(zod@3.23.6) - isows: 1.0.3(ws@8.13.0) + isows: 1.0.4(ws@8.13.0) ws: 8.13.0 optionalDependencies: typescript: 5.3.3 diff --git a/src/clients/evmClient.ts b/src/clients/evmClient.ts index db65c8b..0ccf22e 100644 --- a/src/clients/evmClient.ts +++ b/src/clients/evmClient.ts @@ -18,7 +18,7 @@ const selectedNetwork = () => { const alchemyUrl = () => { switch (chainId) { case 10: - return `https://opt-mainnet.g.alchemy.com/v2/${apiKey}`; + return `https://opt-mainnet.g.alchemy.com/v2/${alchemyApiKey}`; case 84532: return `https://base-sepolia.g.alchemy.com/v2/${alchemyApiKey}`; case 11155111: @@ -29,6 +29,7 @@ const alchemyUrl = () => { }; /* Returns a PublicClient instance for the configured network. */ +// @ts-ignose viem client type to complex to type export const client = createPublicClient({ cacheTime: 10_000, chain: selectedNetwork(), diff --git a/src/fetching/fetchMetadataFromUri.ts b/src/fetching/fetchMetadataFromUri.ts index f2a913e..0c66dc7 100644 --- a/src/fetching/fetchMetadataFromUri.ts +++ b/src/fetching/fetchMetadataFromUri.ts @@ -33,7 +33,7 @@ export const fetchMetadataFromUri = async ({ uri }: FetchMetadataFromUri) => { const fetchResult = await fetchFromHttpsOrIpfs(uri); if (!fetchResult) { - console.error( + console.warn( `[FetchMetadataFromUri] No metadata found on IPFS for URI ${uri}`, ); return; @@ -42,7 +42,7 @@ export const fetchMetadataFromUri = async ({ uri }: FetchMetadataFromUri) => { const { valid, data, errors } = validateMetaData(fetchResult); if (!valid) { - console.error( + console.warn( `[FetchMetadataFromUri] Invalid metadata for URI ${uri}`, errors, ); diff --git a/src/indexer/indexAllowlistCreated.ts b/src/indexer/indexAllowlistCreated.ts index 33b9128..27c91d0 100644 --- a/src/indexer/indexAllowlistCreated.ts +++ b/src/indexer/indexAllowlistCreated.ts @@ -29,9 +29,7 @@ export const indexAllowListCreated = async ({ batchSize = defaultConfig.batchSize, eventName = defaultConfig.eventName, }: IndexerConfig = defaultConfig) => { - const { chainId } = getDeployment(); const contractsWithEvents = await getContractEventsForChain({ - chainId, eventName, }); diff --git a/src/indexer/indexAllowlistSingleClaimMinted.ts b/src/indexer/indexAllowlistSingleClaimMinted.ts index 84fb68a..952f596 100644 --- a/src/indexer/indexAllowlistSingleClaimMinted.ts +++ b/src/indexer/indexAllowlistSingleClaimMinted.ts @@ -31,13 +31,11 @@ export const indexAllowlistSingleClaimMinted = async ({ }: IndexerConfig = defaultConfig) => { const { chainId } = getDeployment(); const contractsWithEvents = await getContractEventsForChain({ - chainId, eventName, }); const currentBlock = await client.getBlockNumber(); const currentEventsClaimStored = await getContractEventsForChain({ - chainId, eventName: "ClaimStored", }); const latestIndexedBlockClaimStored = (currentEventsClaimStored || []).reduce( diff --git a/src/indexer/indexFractionTransfers.ts b/src/indexer/indexFractionTransfers.ts index d86229f..bca1713 100644 --- a/src/indexer/indexFractionTransfers.ts +++ b/src/indexer/indexFractionTransfers.ts @@ -31,7 +31,6 @@ export const indexTransferSingleEvents = async ({ }: IndexerConfig = defaultConfig) => { const { chainId } = getDeployment(); const contractsWithEvents = await getContractEventsForChain({ - chainId, eventName, }); diff --git a/src/indexer/indexUnitTransfers.ts b/src/indexer/indexUnitTransfers.ts index d5d8048..36f41e8 100644 --- a/src/indexer/indexUnitTransfers.ts +++ b/src/indexer/indexUnitTransfers.ts @@ -30,7 +30,6 @@ export const indexUnitTransfers = async ({ }: IndexerConfig = defaultConfig) => { const { chainId } = getDeployment(); const contractsWithEvents = await getContractEventsForChain({ - chainId, eventName, }); diff --git a/src/monitoring/hypercerts.ts b/src/monitoring/hypercerts.ts index a1ab600..69ae1a4 100644 --- a/src/monitoring/hypercerts.ts +++ b/src/monitoring/hypercerts.ts @@ -11,7 +11,7 @@ interface GetLogsForEventInput { } /** - * Fetches logs for a specific contract event from the Ethereum Virtual Machine (EVM). + * Fetches logs for a specific contract event. * * This function first gets the address and start block of the HypercertMinterUUPS from the deployment. * It then calculates the range of blocks to fetch based on the contract creation block, the from block, and the batch size. diff --git a/src/parsing/claimStoredEvent.ts b/src/parsing/claimStoredEvent.ts index 2504343..44fae5a 100644 --- a/src/parsing/claimStoredEvent.ts +++ b/src/parsing/claimStoredEvent.ts @@ -1,4 +1,4 @@ -import { isAddress, isHex, parseAbi, parseAbiItem } from "viem"; +import { isAddress, isHex, parseAbi } from "viem"; import { client } from "@/clients/evmClient"; import { z } from "zod"; @@ -25,7 +25,6 @@ export type ClaimStoredEvent = z.infer; * @throws {z.ZodError} If the event does not match the ClaimStoredEventSchema, a Zod validation error is thrown. */ export const parseClaimStoredEvent = async (event: unknown) => { - console.log(event); const { args, address, transactionHash, blockNumber } = ClaimStoredEventSchema.parse(event); diff --git a/src/parsing/leafClaimedEvent.ts b/src/parsing/leafClaimedEvent.ts index d3d7e7e..aa9f310 100644 --- a/src/parsing/leafClaimedEvent.ts +++ b/src/parsing/leafClaimedEvent.ts @@ -1,19 +1,18 @@ -import { Hex, isAddress } from "viem"; +import { isAddress, isHex } from "viem"; import { getBlockTimestamp } from "@/utils/getBlockTimestamp"; import { LeafClaimed } from "@/types/types"; import { client } from "@/clients/evmClient"; import { z } from "zod"; -type LeadClaimedEvent = { - address: string; - args: { - tokenID: bigint; - leaf: string; - }; - blockNumber: bigint; - transactionHash: Hex; - [key: string]: unknown; -}; +const LeafClaimedSchema = z.object({ + address: z.string().refine(isAddress), + args: z.object({ + tokenID: z.bigint(), + leaf: z.string(), + }), + blockNumber: z.bigint(), + transactionHash: z.string().refine(isHex), +}); /* * Helper method to get the tokenID, contract address, minter address and leaf hash from the event. Will return undefined when the event is @@ -22,15 +21,8 @@ type LeadClaimedEvent = { * @param event - The event object. * */ export const parseLeafClaimedEvent = async (event: unknown) => { - if (!isLeafClaimedEvent(event)) { - console.error( - `Invalid event or event args for parsing claimStored event: `, - event, - ); - return; - } - - const { args, address, transactionHash } = event; + const { args, blockNumber, address, transactionHash } = + LeafClaimedSchema.parse(event); const transaction = await client.getTransaction({ hash: transactionHash, @@ -39,30 +31,10 @@ export const parseLeafClaimedEvent = async (event: unknown) => { const claim: Partial = { creator_address: transaction.from, token_id: args.tokenID, - block_timestamp: await getBlockTimestamp(event.blockNumber), + block_timestamp: await getBlockTimestamp(blockNumber), contract_address: address, leaf: args.leaf, }; return claim; }; - -function isLeafClaimedEvent(event: unknown): event is LeadClaimedEvent { - const e = event as Partial; - - try { - z.object({ - args: z.object({ - tokenID: z.bigint(), - leaf: z.string(), - }), - address: z.string().refine(isAddress), - blockNumber: z.bigint(), - transactionHash: z.string(), - }).parse(e); - return true; - } catch (error) { - console.error("[isLeafClaimedEvent]", error); - return false; - } -} diff --git a/src/parsing/transferSingleEvent.ts b/src/parsing/transferSingleEvent.ts index 2d999c4..a971520 100644 --- a/src/parsing/transferSingleEvent.ts +++ b/src/parsing/transferSingleEvent.ts @@ -1,41 +1,33 @@ import { isAddress } from "viem"; import { getBlockTimestamp } from "@/utils/getBlockTimestamp"; import { NewTransfer } from "@/types/types"; +import { z } from "zod"; -type TransferSingleEvent = { - address: string; - args: { - operator: string; - from: string; - to: string; - id: bigint; - value: bigint; - }; - blockNumber: bigint; - [key: string]: unknown; -}; +const TransferSingleEventSchema = z.object({ + address: z.string().refine(isAddress), + args: z.object({ + operator: z.string().refine(isAddress), + from: z.string().refine(isAddress), + to: z.string().refine(isAddress), + id: z.bigint(), + value: z.bigint(), + }), + blockNumber: z.bigint(), +}); /* - * Helper method to get the sender, recipient, tokenID and value from the event. Will return undefined when the event is + * Helper method to get the sender, recipient, tokenID and value from the event. Will throw when the event is * missing any of the required fields. * * @param event - The event object. * */ export const parseTransferSingle = async (event: unknown) => { - if (!isTransferSingleEvent(event)) { - console.error( - `Invalid event or event args for parsing TransferSingle event: `, - event, - ); - return; - } - - const { args } = event; + const { args, blockNumber } = TransferSingleEventSchema.parse(event); const row: Partial = { token_id: args.id, - block_timestamp: await getBlockTimestamp(event.blockNumber), - block_number: event.blockNumber, + block_timestamp: await getBlockTimestamp(blockNumber), + block_number: blockNumber, value: args.value, to_owner_address: args.to, from_owner_address: args.from, @@ -43,25 +35,3 @@ export const parseTransferSingle = async (event: unknown) => { return row; }; - -function isTransferSingleEvent(event: unknown): event is TransferSingleEvent { - const e = event as Partial; - - return ( - typeof e === "object" && - e !== null && - e?.args !== null && - typeof e?.args === "object" && - typeof e?.args.operator === "string" && - isAddress(e?.args.operator) && - typeof e?.args.from === "string" && - isAddress(e?.args.from) && - typeof e?.args.to === "string" && - isAddress(e?.args.to) && - typeof e?.args.id === "bigint" && - typeof e?.args.value === "bigint" && - typeof e.address === "string" && - isAddress(e.address) && - typeof e.blockNumber === "bigint" - ); -} diff --git a/src/parsing/valueTransferEvent.ts b/src/parsing/valueTransferEvent.ts index 8483548..2210ab6 100644 --- a/src/parsing/valueTransferEvent.ts +++ b/src/parsing/valueTransferEvent.ts @@ -1,18 +1,18 @@ import { isAddress } from "viem"; import { getBlockTimestamp } from "@/utils/getBlockTimestamp"; import { NewUnitTransfer } from "@/types/types"; +import { z } from "zod"; -type ValueTransferEvent = { - address: string; - args: { - claimID: bigint; - fromTokenID: bigint; - toTokenID: bigint; - value: bigint; - }; - blockNumber: bigint; - [key: string]: unknown; -}; +const ValueTransferEventSchema = z.object({ + address: z.string().refine(isAddress), + args: z.object({ + claimID: z.bigint(), + fromTokenID: z.bigint(), + toTokenID: z.bigint(), + value: z.bigint(), + }), + blockNumber: z.bigint(), +}); /* * Helper method to get the sender, recipient, tokenID and value from the event. Will return undefined when the event is @@ -21,44 +21,14 @@ type ValueTransferEvent = { * @param event - The event object. * */ export const parseValueTransfer = async (event: unknown) => { - if (!isValueTransferEvent(event)) { - console.error( - `[ParseValueTransfer] Invalid event or event args for parsing TransferSingle event: `, - event, - ); - return; - } - - const { args } = event; + const { args, blockNumber } = ValueTransferEventSchema.parse(event); const row: Partial = { from_token_id: args.fromTokenID, to_token_id: args.toTokenID, - block_timestamp: await getBlockTimestamp(event.blockNumber), + block_timestamp: await getBlockTimestamp(blockNumber), units: args.value, }; return row; }; - -function isValueTransferEvent(event: unknown): event is ValueTransferEvent { - const e = event as Partial; - - return ( - typeof e === "object" && - e !== null && - e?.args !== null && - typeof e?.args === "object" && - e?.args.claimID !== null && - typeof e?.args?.claimID === "bigint" && - e?.args.fromTokenID !== null && - typeof e?.args.fromTokenID === "bigint" && - e?.args.toTokenID !== null && - typeof e?.args.toTokenID === "bigint" && - e?.args.value !== null && - typeof e?.args.value === "bigint" && - typeof e.address === "string" && - isAddress(e.address) && - typeof e.blockNumber === "bigint" - ); -} diff --git a/src/server.ts b/src/server.ts index 07d2d40..5314019 100644 --- a/src/server.ts +++ b/src/server.ts @@ -38,7 +38,6 @@ Sentry.init({ new Sentry.Integrations.Http({ tracing: true }), // enable Express.js middleware tracing new Sentry.Integrations.Express({ app }), - // @ts-ignore new ProfilingIntegration(), captureConsoleIntegration({ levels: ["error"], diff --git a/test/parsing/leafClaimedEvent.test.ts b/test/parsing/leafClaimedEvent.test.ts index e401248..990de3c 100644 --- a/test/parsing/leafClaimedEvent.test.ts +++ b/test/parsing/leafClaimedEvent.test.ts @@ -6,6 +6,7 @@ import { http, HttpResponse } from "msw"; import { client } from "../../src/clients/evmClient"; import { alchemyUrl } from "../resources/alchemyUrl"; +import { getAddress } from "viem"; describe("leafClaimedEvent", {}, () => { it("parses a leaf claimed event", {}, async () => { @@ -14,7 +15,7 @@ describe("leafClaimedEvent", {}, () => { return HttpResponse.json(0); }), ); - const address = faker.finance.ethereumAddress() as `0x${string}`; + const address = getAddress(faker.finance.ethereumAddress()); const tokenID = faker.number.bigInt(); const leaf = faker.string.alphanumeric("10"); const blockNumber = faker.number.bigInt(); @@ -29,7 +30,7 @@ describe("leafClaimedEvent", {}, () => { }, }; - const from = faker.finance.ethereumAddress(); + const from = getAddress(faker.finance.ethereumAddress()); vi.spyOn(client, "getTransaction").mockImplementation( async () => ({ @@ -57,7 +58,7 @@ describe("leafClaimedEvent", {}, () => { }); }); - it("returns undefined when the event is missing leaf", async () => { + it("throws when the event is missing leaf", async () => { const address = faker.finance.ethereumAddress() as `0x${string}`; const tokenID = faker.number.bigInt(); const blockNumber = faker.number.bigInt(); @@ -71,12 +72,12 @@ describe("leafClaimedEvent", {}, () => { }, }; - const parsed = await parseLeafClaimedEvent(event); - - expect(parsed).toBe(undefined); + await expect( + async () => await parseLeafClaimedEvent(event), + ).rejects.toThrowError(); }); - it("returns undefined when the event is missing tokenID", async () => { + it("throws when the event is missing tokenID", async () => { const address = faker.finance.ethereumAddress() as `0x${string}`; const leaf = faker.string.alphanumeric("10"); const blockNumber = faker.number.bigInt(); @@ -90,12 +91,12 @@ describe("leafClaimedEvent", {}, () => { }, }; - const parsed = await parseLeafClaimedEvent(event); - - expect(parsed).toBe(undefined); + await expect( + async () => await parseLeafClaimedEvent(event), + ).rejects.toThrowError(); }); - it("returns undefined when the event address is invalid", {}, async () => { + it("throws when the event address is invalid", {}, async () => { const address = "invalid"; const event = { id: "0x3e7d7e4c4f3d5a7f2b3d6c5", @@ -107,8 +108,8 @@ describe("leafClaimedEvent", {}, () => { }, }; - const parsed = await parseLeafClaimedEvent(event); - - expect(parsed).toBe(undefined); + await expect( + async () => await parseLeafClaimedEvent(event), + ).rejects.toThrowError(); }); }); diff --git a/test/parsing/transferSingleEvent.test.ts b/test/parsing/transferSingleEvent.test.ts index a196377..122705a 100644 --- a/test/parsing/transferSingleEvent.test.ts +++ b/test/parsing/transferSingleEvent.test.ts @@ -9,7 +9,7 @@ import { alchemyUrl } from "../resources/alchemyUrl"; import { getAddress } from "viem"; describe("transferSingleEvent", {}, () => { - const from = faker.finance.ethereumAddress(); + const from = getAddress(faker.finance.ethereumAddress()); const timestamp = 10; const contractAddress = getAddress(faker.finance.ethereumAddress()); const operatorAddress = getAddress(faker.finance.ethereumAddress()); @@ -82,68 +82,61 @@ describe("transferSingleEvent", {}, () => { args, }; - const parsed1 = await parseTransferSingle({ - ...event, - args: { - ...args, - id: "not a bigint", - }, - }); - expect(parsed1).toBeUndefined(); - - const parsed2 = await parseTransferSingle({ - ...event, - args: { - ...args, - value: "not a bigint", - }, - }); - expect(parsed2).toBeUndefined(); + await expect( + async () => + await parseTransferSingle({ + ...event, + args: { + ...args, + id: "not a bigint", + }, + }), + ).rejects.toThrowError(); - const parsed3 = await parseTransferSingle({ - ...event, - args: { - ...args, - to: 1, - }, - }); - expect(parsed3).toBeUndefined(); - - const parsed4 = await parseTransferSingle({ - ...event, - args: { - ...args, - from: 1, - }, - }); - expect(parsed4).toBeUndefined(); + await expect( + parseTransferSingle({ + ...event, + args: { + ...args, + value: "not a bigint", + }, + }), + ).rejects.toThrowError(); - const parsed5 = await parseTransferSingle({ - ...event, - args: { - ...args, - operator: 1, - }, - }); - expect(parsed5).toBeUndefined(); + await expect( + parseTransferSingle({ + ...event, + args: { + ...args, + to: 1, + }, + }), + ).rejects.toThrowError(); - const parsed6 = await parseTransferSingle({ - ...event, - args: { - ...args, - operator: operatorAddress, - from: fromAddress, - to: toAddress, - id: 1, - value: 1, - }, - }); - expect(parsed6).toBeUndefined(); + await expect( + parseTransferSingle({ + ...event, + args: { + ...args, + from: 1, + }, + }), + ).rejects.toThrowError(); - const parsed7 = await parseTransferSingle({ - ...event, - args: {}, - }); - expect(parsed7).toBeUndefined(); + await expect( + parseTransferSingle({ + ...event, + args: { + ...args, + operator: 1, + }, + }), + ).rejects.toThrowError(); + await expect( + parseTransferSingle({ + ...event, + args: {}, + }), + ).rejects.toThrowError(); }); }); diff --git a/test/parsing/valueTransferEvent.test.ts b/test/parsing/valueTransferEvent.test.ts index 06d8c4c..94b7337 100644 --- a/test/parsing/valueTransferEvent.test.ts +++ b/test/parsing/valueTransferEvent.test.ts @@ -5,13 +5,14 @@ import { http, HttpResponse } from "msw"; import { parseValueTransfer } from "../../src/parsing/valueTransferEvent"; import { client } from "../../src/clients/evmClient"; import { alchemyUrl } from "../resources/alchemyUrl"; +import { getAddress } from "viem"; describe("valueTransferEvent", () => { const claimID = faker.number.bigInt(); const fromTokenID = faker.number.bigInt(); const toTokenID = faker.number.bigInt(); const value = faker.number.bigInt(); - const address = faker.finance.ethereumAddress(); + const address = getAddress(faker.finance.ethereumAddress()); const blockNumber = faker.number.bigInt(); const timestamp = faker.number.int(); @@ -55,54 +56,60 @@ describe("valueTransferEvent", () => { }); it("fails if event is invalid", async () => { - const parsed1 = await parseValueTransfer({ - ...event, - address: "not an address", - }); - expect(parsed1).toBeUndefined(); + await expect( + parseValueTransfer({ + ...event, + address: "not an address", + }), + ).rejects.toThrow(); - const parsed2 = await parseValueTransfer({ - ...event, - blockNumber: "not an int", - }); - expect(parsed2).toBeUndefined(); + await expect( + parseValueTransfer({ + ...event, + blockNumber: "not an int", + }), + ).rejects.toThrow(); }); it("fails if args are invalid", async () => { - const parsed1 = await parseValueTransfer({ - ...event, - args: { - ...event.args, - fromTokenID: "not an int", - }, - }); - expect(parsed1).toBeUndefined(); + await expect( + parseValueTransfer({ + ...event, + args: { + ...event.args, + claimID: "not an int", + }, + }), + ).rejects.toThrow(); - const parsed2 = await parseValueTransfer({ - ...event, - args: { - ...event.args, - claimID: "not a int", - }, - }); - expect(parsed2).toBeUndefined(); + await expect( + parseValueTransfer({ + ...event, + args: { + ...event.args, + fromTokenID: "not an int", + }, + }), + ).rejects.toThrow(); - const parsed3 = await parseValueTransfer({ - ...event, - args: { - ...event.args, - toTokenID: "not a int", - }, - }); - expect(parsed3).toBeUndefined(); + await expect( + parseValueTransfer({ + ...event, + args: { + ...event.args, + toTokenID: "not an int", + }, + }), + ).rejects.toThrow(); - const parsed4 = await parseValueTransfer({ - ...event, - args: { - ...event.args, - value: "not a int", - }, - }); - expect(parsed4).toBeUndefined(); + await expect( + parseValueTransfer({ + ...event, + args: { + ...event.args, + value: "not an int", + }, + }), + ).rejects.toThrow(); }); }); diff --git a/vitest.config.ts b/vitest.config.ts index 81626c3..f7eec93 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -25,6 +25,7 @@ export default defineConfig({ ...(configDefaults.coverage.exclude as string[]), "**/*.types.ts", "**/types.ts", + "all_leaf_claimed_events.ts", ], }, }, From 9d86532702ed8feb0f9539262266f1ca70e126c0 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Tue, 4 Jun 2024 21:38:54 +0200 Subject: [PATCH 12/12] fix(types): use db generated typed for store input --- src/fetching/fetchAllowlistFromUri.ts | 23 +--------- src/indexer/indexAllowlistRecords.ts | 48 ++++++++++++--------- src/storage/getUnparsedAllowListsRecords.ts | 12 ++---- src/storage/storeAllowListData.ts | 4 +- src/storage/storeAllowListRecords.ts | 4 +- src/storage/storeAttestations.ts | 13 +++--- src/storage/storeHypercertAllowList.ts | 12 ++---- src/storage/storeMetadata.ts | 4 +- src/storage/storeSupportedSchemas.ts | 4 +- src/storage/storeUnits.ts | 1 - src/utils/parseToOzMerkleTree.ts | 21 +++++++++ 11 files changed, 73 insertions(+), 73 deletions(-) create mode 100644 src/utils/parseToOzMerkleTree.ts diff --git a/src/fetching/fetchAllowlistFromUri.ts b/src/fetching/fetchAllowlistFromUri.ts index bd0a3c1..38d2925 100644 --- a/src/fetching/fetchAllowlistFromUri.ts +++ b/src/fetching/fetchAllowlistFromUri.ts @@ -1,5 +1,5 @@ -import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; import { fetchFromHttpsOrIpfs } from "@/utils/fetchFromHttpsOrIpfs"; +import { parseToOzMerkleTree } from "@/utils/parseToOzMerkleTree"; export interface FetchAllowListFromUriInput { uri?: string; @@ -33,24 +33,5 @@ export const fetchAllowListFromUri = async ({ ); return; } - - try { - return StandardMerkleTree.load<[string, bigint]>( - JSON.parse(fetchResult as string), - ); - } catch (error) { - console.debug( - `[FetchAllowListFromUri] Allow list at ${uri} is not a valid OZ Merkle tree`, - error, - ); - } - - try { - return StandardMerkleTree.load<[string, bigint]>(fetchResult as never); - } catch (error) { - console.debug( - `[FetchAllowListFromUri] Allow list at ${uri} is not a valid OZ Merkle tree`, - error, - ); - } + return parseToOzMerkleTree(fetchResult, uri); }; diff --git a/src/indexer/indexAllowlistRecords.ts b/src/indexer/indexAllowlistRecords.ts index 71b3bb6..4daec04 100644 --- a/src/indexer/indexAllowlistRecords.ts +++ b/src/indexer/indexAllowlistRecords.ts @@ -1,21 +1,22 @@ import { IndexerConfig } from "@/types/types"; import { storeAllowListRecords } from "@/storage/storeAllowListRecords"; -import { - getUnparsedAllowListRecords, - UnparsedAllowListRecord, -} from "@/storage/getUnparsedAllowListsRecords"; -import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; +import { getUnparsedAllowListRecords } from "@/storage/getUnparsedAllowListsRecords"; +import { Database } from "@/types/database.types"; +import { parseToOzMerkleTree } from "@/utils/parseToOzMerkleTree"; -/* - * This function indexes the logs of the ClaimStored event emitted by the HypercertMinter contract. Based on the last - * block indexed, it fetches the logs in batches, parses them, fetches the metadata, and stores the hypercerts in the - * database. +/** + * This function indexes the unparsed allow lists in batches. * - * @param [batchSize] - The number of logs to fetch and parse in each batch. + * It fetches the unparsed allow lists from the database and processes them in batches. The size of the batches is determined by the `batchSize` parameter. + * If no unparsed allow lists are found, the function logs a debug message and returns. + * For each batch, it calls the `processAllowListEntriesBatch` function to process the allow list entries. + * + * @param {IndexerConfig} config - The configuration for the indexer. It has a `batchSize` property that defaults to `defaultConfig.batchSize`. + * @returns {Promise} A promise that resolves when all batches have been processed. * * @example - * ```js - * await indexClaimsStoredEvents({ batchSize: 1000n }); + * ```typescript + * await indexAllowlistRecords({ batchSize: 1000n }); * ``` */ @@ -54,15 +55,15 @@ export const indexAllowlistRecords = async ({ }; const processAllowListEntriesBatch = async ( - batch: UnparsedAllowListRecord[], + batch: Database["public"]["Functions"]["get_unparsed_hypercert_allow_lists"]["Returns"], ) => { const allowListsToStore = await Promise.all( batch.map(async (allowList) => { - const tree = StandardMerkleTree.load(allowList.data); - + // TODO + const tree = parseToOzMerkleTree(allowList?.data); if (!tree) { - console.debug( - "[IndexAllowlistRecords] Failed to load tree from data", + console.error( + "[IndexAllowlistRecords] Error while loading tree from data", allowList, ); return; @@ -87,13 +88,20 @@ const processAllowListEntriesBatch = async ( try { await Promise.all( - allowListsToStore.map((data) => + allowListsToStore.map((data) => { + if (!data || !data.records) { + console.debug( + "[IndexAllowlistRecords] No records found for allow list", + data, + ); + return; + } storeAllowListRecords({ claim_id: data?.claim_id, allow_list_data_id: data?.al_data_id, records: data?.records, - }), - ), + }); + }), ); } catch (error) { console.error( diff --git a/src/storage/getUnparsedAllowListsRecords.ts b/src/storage/getUnparsedAllowListsRecords.ts index d57227d..4fc357b 100644 --- a/src/storage/getUnparsedAllowListsRecords.ts +++ b/src/storage/getUnparsedAllowListsRecords.ts @@ -1,17 +1,13 @@ import { supabase } from "@/clients/supabaseClient"; - -export type UnparsedAllowListRecord = { - claim_id: string; - al_data_id: string; - // TODO: Can this be typed correctly? - data: any; -}; +import { Database } from "@/types/database.types"; export const getUnparsedAllowListRecords = async () => { const { data, error } = await supabase .rpc("get_unparsed_hypercert_allow_lists") .select() - .returns(); + .returns< + Database["public"]["Functions"]["get_unparsed_hypercert_allow_lists"]["Returns"] + >(); if (error) { console.error( diff --git a/src/storage/storeAllowListData.ts b/src/storage/storeAllowListData.ts index 4e6404c..ebbfc34 100644 --- a/src/storage/storeAllowListData.ts +++ b/src/storage/storeAllowListData.ts @@ -1,9 +1,9 @@ import { supabase } from "@/clients/supabaseClient"; -import { Tables } from "@/types/database.types"; +import { Database } from "@/types/database.types"; import _ from "lodash"; interface StoreAllowListData { - allowListData: Partial>[]; + allowListData: Database["public"]["Tables"]["allow_list_data"]["Update"][]; } export const storeAllowListData = async ({ diff --git a/src/storage/storeAllowListRecords.ts b/src/storage/storeAllowListRecords.ts index e2860d5..4675782 100644 --- a/src/storage/storeAllowListRecords.ts +++ b/src/storage/storeAllowListRecords.ts @@ -1,10 +1,10 @@ import { supabase } from "@/clients/supabaseClient"; -import { Tables } from "@/types/database.types"; +import { Database } from "@/types/database.types"; interface StoreAllowListRecords { claim_id?: string; allow_list_data_id?: string; - records?: Partial>[]; + records: Database["public"]["Tables"]["hypercert_allow_list_records"]["Update"][]; } export const storeAllowListRecords = async ({ diff --git a/src/storage/storeAttestations.ts b/src/storage/storeAttestations.ts index 51c12f9..e7eb83d 100644 --- a/src/storage/storeAttestations.ts +++ b/src/storage/storeAttestations.ts @@ -1,7 +1,11 @@ import { supabase } from "@/clients/supabaseClient"; -import { Tables } from "@/types/database.types"; +import { Database, Tables } from "@/types/database.types"; import * as console from "node:console"; +interface StoreAttestations { + attestations: Database["public"]["Tables"]["attestations"]["Update"][]; +} + /* * Stores the provided attestation data in the database. * @@ -12,12 +16,7 @@ import * as console from "node:console"; */ export const storeAttestations = async ({ attestations, -}: { - attestations: ( - | Omit, "id" | "claims_id"> - | undefined - )[]; -}) => { +}: StoreAttestations) => { const _attestations = (attestations = attestations.filter( (attestation) => attestation !== null && attestation !== undefined, )); diff --git a/src/storage/storeHypercertAllowList.ts b/src/storage/storeHypercertAllowList.ts index be7db02..1498ce0 100644 --- a/src/storage/storeHypercertAllowList.ts +++ b/src/storage/storeHypercertAllowList.ts @@ -1,14 +1,10 @@ import { supabase } from "@/clients/supabaseClient"; import { Database } from "@/types/database.types"; -interface StoreHypercertAllowList { - batchToStore: Database["public"]["CompositeTypes"]["allow_list_data_type"][]; -} - export const storeHypercertAllowList = async ({ - batchToStore, -}: StoreHypercertAllowList) => { - if (batchToStore.length === 0) { + p_hc_allow_list_roots, +}: Database["public"]["Functions"]["store_hypercert_allow_list_roots"]["Args"]) => { + if (p_hc_allow_list_roots.length === 0) { console.debug( "[StoreHypercertAllowList] No hypercert and allow list data to store", ); @@ -17,7 +13,7 @@ export const storeHypercertAllowList = async ({ await supabase .rpc("store_hypercert_allow_list_roots", { - p_hc_allow_list_roots: batchToStore, + p_hc_allow_list_roots, }) .throwOnError(); }; diff --git a/src/storage/storeMetadata.ts b/src/storage/storeMetadata.ts index 4c2ad94..3f81246 100644 --- a/src/storage/storeMetadata.ts +++ b/src/storage/storeMetadata.ts @@ -1,5 +1,5 @@ import { supabase } from "@/clients/supabaseClient"; -import { Tables } from "@/types/database.types"; +import { Database, Tables } from "@/types/database.types"; import { z } from "zod"; /* @@ -25,7 +25,7 @@ import { z } from "zod"; */ interface StoreMetadata { - metadata?: Partial>[]; + metadata?: Database["public"]["Tables"]["metadata"]["Update"][]; } export const storeMetadata = async ({ metadata }: StoreMetadata) => { diff --git a/src/storage/storeSupportedSchemas.ts b/src/storage/storeSupportedSchemas.ts index e63fb56..735aaff 100644 --- a/src/storage/storeSupportedSchemas.ts +++ b/src/storage/storeSupportedSchemas.ts @@ -1,8 +1,8 @@ import { supabase } from "@/clients/supabaseClient"; -import { Tables } from "@/types/database.types"; +import { Database, Tables } from "@/types/database.types"; interface StoreSupportedSchemas { - supportedSchemas: Tables<"supported_schemas">[]; + supportedSchemas: Database["public"]["Tables"]["supported_schemas"]["Update"][]; } /** diff --git a/src/storage/storeUnits.ts b/src/storage/storeUnits.ts index 59fd3cd..c3a17de 100644 --- a/src/storage/storeUnits.ts +++ b/src/storage/storeUnits.ts @@ -1,5 +1,4 @@ import { supabase } from "@/clients/supabaseClient"; -import { Tables } from "@/types/database.types"; import { NewUnitTransfer } from "@/types/types"; import { getClaimTokenId } from "@/utils/tokenIds"; diff --git a/src/utils/parseToOzMerkleTree.ts b/src/utils/parseToOzMerkleTree.ts new file mode 100644 index 0000000..db8a02d --- /dev/null +++ b/src/utils/parseToOzMerkleTree.ts @@ -0,0 +1,21 @@ +import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; + +export const parseToOzMerkleTree = (fetchResult: unknown, uri?: string) => { + try { + return StandardMerkleTree.load<[string, bigint]>( + JSON.parse(fetchResult as string), + ); + } catch (error) { + console.debug( + `[parseToOzMerkleTree] Allow list at ${uri} is not a valid OZ Merkle tree`, + ); + } + + try { + return StandardMerkleTree.load<[string, bigint]>(fetchResult as never); + } catch (error) { + console.debug( + `[parseToOzMerkleTree] Allow list at ${uri} is not a valid OZ Merkle tree`, + ); + } +};