From 8ed1466d861cfea2bc2c4cb0d12f1865c06c6932 Mon Sep 17 00:00:00 2001 From: mckrava Date: Thu, 14 Dec 2023 12:10:24 +0200 Subject: [PATCH] feat: added suport new datasource subquery --- codegenConfig/codegen-subquery-nova-query.ts | 23 + deployment/feature-based/all.yaml | 20 +- deployment/prod/all.yaml | 20 +- package.json | 3 +- schema.gql | 5 + src/common/entities/enums.ts | 6 +- src/config.module.ts | 48 +- src/constants/blockchain.ts | 48 +- src/constants/common.ts | 10 +- src/dependencyServiceModule.module.ts | 2 +- .../dataAggregator/dataAggregator.module.ts | 2 +- ...tTransfersChunkHandlerResponse.response.ts | 4 +- .../getIndexerLastProcessedHeight.args.dto.ts | 3 + .../dto/getMainGiantSquidStatus.args.dto.ts | 3 - .../dto/getTransfersByAccount.args.dto.ts | 6 +- ...indexerLastProcessedHeightDecorated.dto.ts | 3 + .../dto/transfersByAccountDecorated.dto.ts | 21 + .../services/aggregation.helper.ts | 31 +- .../entities/blockchain/blockchain.service.ts | 26 +- .../blockchain/entities/blockchain.entity.ts | 8 + .../entities/transferNative.entity.ts | 19 +- .../collectTransfersDataChunk.worker.ts | 10 +- src/utils/commonUtils.ts | 14 +- src/utils/cryptoUtils.ts | 14 + src/utils/dataSourceUtils.ts | 124 - src/utils/dataSources/common.ts | 40 + src/utils/dataSources/dataSourceDecorators.ts | 122 + .../dataSources/dataSourceUtils.subQuery.ts | 74 + .../dataSources/dataSourceUtils.subSquid.ts | 94 + src/utils/dataSources/dataSourceUtils.ts | 58 + .../graphQl/gsquidMain/gsquid-main-query.ts | 24 +- src/utils/graphQl/gsquidMain/query.ts | 18 +- src/utils/graphQl/subQueryNova/query.ts | 29 + .../subQueryNova/subquery-nova-query.ts | 2761 +++++++++++++++++ .../graphQl/subQueryNova/transfer.dto.ts | 8 + test/checkAccountCanRpc.spec.ts | 55 +- 36 files changed, 3501 insertions(+), 255 deletions(-) create mode 100644 codegenConfig/codegen-subquery-nova-query.ts create mode 100644 src/modules/dataAggregator/dto/getIndexerLastProcessedHeight.args.dto.ts delete mode 100644 src/modules/dataAggregator/dto/getMainGiantSquidStatus.args.dto.ts create mode 100644 src/modules/dataAggregator/dto/indexerLastProcessedHeightDecorated.dto.ts create mode 100644 src/modules/dataAggregator/dto/transfersByAccountDecorated.dto.ts delete mode 100644 src/utils/dataSourceUtils.ts create mode 100644 src/utils/dataSources/common.ts create mode 100644 src/utils/dataSources/dataSourceDecorators.ts create mode 100644 src/utils/dataSources/dataSourceUtils.subQuery.ts create mode 100644 src/utils/dataSources/dataSourceUtils.subSquid.ts create mode 100644 src/utils/dataSources/dataSourceUtils.ts create mode 100644 src/utils/graphQl/subQueryNova/query.ts create mode 100644 src/utils/graphQl/subQueryNova/subquery-nova-query.ts create mode 100644 src/utils/graphQl/subQueryNova/transfer.dto.ts diff --git a/codegenConfig/codegen-subquery-nova-query.ts b/codegenConfig/codegen-subquery-nova-query.ts new file mode 100644 index 0000000..07855c9 --- /dev/null +++ b/codegenConfig/codegen-subquery-nova-query.ts @@ -0,0 +1,23 @@ +import type { CodegenConfig } from '@graphql-codegen/cli'; +import * as dotenv from 'dotenv'; +dotenv.config({ path: `${__dirname}/../.env.local` }); + +const queryUrl = process.env.DATA_SOURCE_SUBQUERY_NOVA_POLKADOT; +if (!queryUrl) throw new Error('Codegen error: Datahub Mutation URL not set'); + +const config: CodegenConfig = { + overwrite: true, + schema: queryUrl, + documents: 'src/utils/graphQl/subQueryNova/query.ts', + generates: { + 'src/utils/graphQl/subQueryNova/subquery-nova-query.ts': { + plugins: [ + 'typescript', + 'typescript-operations', + 'typescript-document-nodes', + ], + }, + }, +}; + +export default config; diff --git a/deployment/feature-based/all.yaml b/deployment/feature-based/all.yaml index d7409ab..10733f4 100644 --- a/deployment/feature-based/all.yaml +++ b/deployment/feature-based/all.yaml @@ -61,11 +61,21 @@ data: AGGREGATOR_REDIS_ENABLE_SSL: 'false' AGGREGATOR_HISTORY_RENEW_INTERVAL_MS: '60000' AGGREGATOR_GS_MAIN_CHUNK_BLOCKS_SIZE: '1000000' - DATA_SOURCE_GSQUID_MAIN_POLKADOT: 'https://squid.subsquid.io/gs-main-polkadot/graphql' - DATA_SOURCE_GSQUID_MAIN_KUSAMA: 'https://squid.subsquid.io/gs-main-kusama/graphql' - DATA_SOURCE_GSQUID_MAIN_MOONBEAM: 'https://squid.subsquid.io/gs-main-moonbeam/graphql' - DATA_SOURCE_GSQUID_MAIN_MOONRIVER: 'https://squid.subsquid.io/gs-main-moonriver/graphql' - DATA_SOURCE_GSQUID_MAIN_ASTAR: 'https://squid.subsquid.io/gs-main-astar/graphql' + + DATA_SOURCE__SUBSQUID__POLKADOT__TRANSFER: 'https://squid.subsquid.io/gs-main-polkadot/graphql' + DATA_SOURCE__SUBSQUID__KUSAMA__TRANSFER: 'https://squid.subsquid.io/gs-main-kusama/graphql' + DATA_SOURCE__SUBSQUID__MOONBEAM__TRANSFER: 'https://squid.subsquid.io/gs-main-moonbeam/graphql' + DATA_SOURCE__SUBSQUID__MOONRIVER__TRANSFER: 'https://squid.subsquid.io/gs-main-moonriver/graphql' + DATA_SOURCE__SUBSQUID__ASTAR__TRANSFER: 'https://squid.subsquid.io/gs-main-astar/graphql' + + DATA_SOURCE__SUBQUERY__POLKADOT__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-polkadot' + DATA_SOURCE__SUBQUERY__KUSAMA__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-kusama' + DATA_SOURCE__SUBQUERY__MOONBEAM__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-moonbeam' + DATA_SOURCE__SUBQUERY__MOONRIVER__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-moonriver' + DATA_SOURCE__SUBQUERY__ASTAR__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-astar' + + DATA_SOURCE_PROVIDER_TRANSFER: 'SUBQUERY' + NODE_ENV: 'staging' APP_PORT: '3000' diff --git a/deployment/prod/all.yaml b/deployment/prod/all.yaml index 62669a0..c0801c3 100644 --- a/deployment/prod/all.yaml +++ b/deployment/prod/all.yaml @@ -61,10 +61,20 @@ data: AGGREGATOR_REDIS_ENABLE_SSL: 'false' AGGREGATOR_HISTORY_RENEW_INTERVAL_MS: '60000' AGGREGATOR_GS_MAIN_CHUNK_BLOCKS_SIZE: '1000000' - DATA_SOURCE_GSQUID_MAIN_POLKADOT: 'https://squid.subsquid.io/gs-main-polkadot/graphql' - DATA_SOURCE_GSQUID_MAIN_KUSAMA: 'https://squid.subsquid.io/gs-main-kusama/graphql' - DATA_SOURCE_GSQUID_MAIN_MOONBEAM: 'https://squid.subsquid.io/gs-main-moonbeam/graphql' - DATA_SOURCE_GSQUID_MAIN_MOONRIVER: 'https://squid.subsquid.io/gs-main-moonriver/graphql' - DATA_SOURCE_GSQUID_MAIN_ASTAR: 'https://squid.subsquid.io/gs-main-astar/graphql' + + DATA_SOURCE__SUBSQUID__POLKADOT__TRANSFER: 'https://squid.subsquid.io/gs-main-polkadot/graphql' + DATA_SOURCE__SUBSQUID__KUSAMA__TRANSFER: 'https://squid.subsquid.io/gs-main-kusama/graphql' + DATA_SOURCE__SUBSQUID__MOONBEAM__TRANSFER: 'https://squid.subsquid.io/gs-main-moonbeam/graphql' + DATA_SOURCE__SUBSQUID__MOONRIVER__TRANSFER: 'https://squid.subsquid.io/gs-main-moonriver/graphql' + DATA_SOURCE__SUBSQUID__ASTAR__TRANSFER: 'https://squid.subsquid.io/gs-main-astar/graphql' + + DATA_SOURCE__SUBQUERY__POLKADOT__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-polkadot' + DATA_SOURCE__SUBQUERY__KUSAMA__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-kusama' + DATA_SOURCE__SUBQUERY__MOONBEAM__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-moonbeam' + DATA_SOURCE__SUBQUERY__MOONRIVER__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-moonriver' + DATA_SOURCE__SUBQUERY__ASTAR__TRANSFER: 'https://api.subquery.network/sq/nova-wallet/nova-wallet-astar' + + DATA_SOURCE_PROVIDER_TRANSFER: 'SUBQUERY' + NODE_ENV: 'production' APP_PORT: '3000' diff --git a/package.json b/package.json index d3ae189..c48b5f5 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,8 @@ "test:cov": "jest --coverage", "test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand", "test:e2e": "jest --config ./test/jest-e2e.json", - "codegen:gsquid-main-query": "graphql-codegen --require dotenv/config --config codegenConfig/codegen-gsquid-main-query.ts" + "codegen:gsquid-main-query": "graphql-codegen --require dotenv/config --config codegenConfig/codegen-gsquid-main-query.ts", + "codegen:subquery-nova-query": "graphql-codegen --require dotenv/config --config codegenConfig/codegen-subquery-nova-query.ts" }, "dependencies": { "@apollo/server": "^4.7.2", diff --git a/schema.gql b/schema.gql index 95bc9af..de9a51e 100644 --- a/schema.gql +++ b/schema.gql @@ -6,6 +6,8 @@ type Blockchain { id: String! text: String! info: String! + symbols: [String!] + prefix: Int tag: BlockchainTag! decimal: Int! logo: String! @@ -22,10 +24,13 @@ enum BlockchainTag { type TransferNative { id: String! + blockchain: Blockchain! blockNumber: Int! extrinsicHash: String + eventIndex: Int timestamp: DateTime! amount: BigInt! + fee: BigInt success: Boolean! from: Account! to: Account! diff --git a/src/common/entities/enums.ts b/src/common/entities/enums.ts index 3d40d12..0b50bb7 100644 --- a/src/common/entities/enums.ts +++ b/src/common/entities/enums.ts @@ -3,7 +3,7 @@ import { FindAccountTxHistoryOrderBy, NativeTransactionKind, QueryOrder, - TransactionKind, + TransactionKind, TransferDirection, VoteResult, } from '../../constants/common'; import { BlockchainTag } from '../../constants/blockchain'; @@ -31,3 +31,7 @@ registerEnumType(NativeTransactionKind, { registerEnumType(BlockchainTag, { name: 'BlockchainTag', }); + +registerEnumType(TransferDirection, { + name: 'TransferDirection', +}); diff --git a/src/config.module.ts b/src/config.module.ts index 790317d..c75c40b 100644 --- a/src/config.module.ts +++ b/src/config.module.ts @@ -1,9 +1,10 @@ import { Global, Module, Provider } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; import { transformAndValidateSync } from 'class-transformer-validator'; -import { IsNotEmpty } from 'class-validator'; +import { IsEnum, IsNotEmpty } from 'class-validator'; import * as dotenv from 'dotenv'; import { Transform } from 'class-transformer'; +import { DataSourceProviders } from './constants/common'; dotenv.config({ path: `${__dirname}/../.env.local` }); @@ -39,31 +40,56 @@ export class AppConfig { @IsNotEmpty() readonly AGGREGATOR_REDIS_ENABLE_SSL: boolean; + @Transform(({ value }: { value: string }) => +value) @IsNotEmpty() - readonly DATA_SOURCE_GSQUID_MAIN_POLKADOT: string; + readonly AGGREGATOR_HISTORY_RENEW_INTERVAL_MS: number; + @Transform(({ value }: { value: string }) => +value) @IsNotEmpty() - readonly DATA_SOURCE_GSQUID_MAIN_KUSAMA: string; + readonly AGGREGATOR_GS_MAIN_CHUNK_BLOCKS_SIZE: number; @IsNotEmpty() - readonly DATA_SOURCE_GSQUID_MAIN_MOONBEAM: string; + readonly NODE_ENV: string; + + /** + * === DATA SOURCES === + */ @IsNotEmpty() - readonly DATA_SOURCE_GSQUID_MAIN_MOONRIVER: string; + @Transform(({ value }) => ('' + value).toUpperCase()) + @IsEnum(DataSourceProviders) + readonly DATA_SOURCE_PROVIDER_TRANSFER: DataSourceProviders; + @IsNotEmpty() - readonly DATA_SOURCE_GSQUID_MAIN_ASTAR: string; + readonly DATA_SOURCE__SUBSQUID__POLKADOT__TRANSFER: string; - @Transform(({ value }: { value: string }) => +value) @IsNotEmpty() - readonly AGGREGATOR_HISTORY_RENEW_INTERVAL_MS: number; + readonly DATA_SOURCE__SUBSQUID__KUSAMA__TRANSFER: string; - @Transform(({ value }: { value: string }) => +value) @IsNotEmpty() - readonly AGGREGATOR_GS_MAIN_CHUNK_BLOCKS_SIZE: number; + readonly DATA_SOURCE__SUBSQUID__MOONBEAM__TRANSFER: string; @IsNotEmpty() - readonly NODE_ENV: string; + readonly DATA_SOURCE__SUBSQUID__MOONRIVER__TRANSFER: string; + + @IsNotEmpty() + readonly DATA_SOURCE__SUBSQUID__ASTAR__TRANSFER: string; + + @IsNotEmpty() + readonly DATA_SOURCE__SUBQUERY__POLKADOT__TRANSFER: string; + + @IsNotEmpty() + readonly DATA_SOURCE__SUBQUERY__KUSAMA__TRANSFER: string; + + @IsNotEmpty() + readonly DATA_SOURCE__SUBQUERY__MOONBEAM__TRANSFER: string; + + @IsNotEmpty() + readonly DATA_SOURCE__SUBQUERY__MOONRIVER__TRANSFER: string; + + @IsNotEmpty() + readonly DATA_SOURCE__SUBQUERY__ASTAR__TRANSFER: string; } @Global() diff --git a/src/constants/blockchain.ts b/src/constants/blockchain.ts index 9ff3c39..1c2f7ed 100644 --- a/src/constants/blockchain.ts +++ b/src/constants/blockchain.ts @@ -11,44 +11,56 @@ export enum BlockchainTag { // https://github.com/polkadot-js/apps/blob/7c12692ee34aadd815b282ff4dca3d0a4763a9ca/packages/apps-config/src/endpoints/productionRelayPolkadot.ts#L645 +// https://github.com/paritytech/ss58-registry/blob/main/ss58-registry.json + export const supportedBlockchainDetails: Omit[] = [ { + prefix: 5, text: 'Astar', info: 'astar', tag: BlockchainTag.ASTAR, - decimal: 10, + decimal: 18, + symbols: ['ASTR'], logo: '', color: '#1b6dc1d9', }, { + prefix: 0, text: 'Polkadot', info: 'polkadot', tag: BlockchainTag.POLKADOT, decimal: 10, + symbols: ['DOT'], logo: '', color: '#1b6dc1d9', }, { + prefix: 2, text: 'Kusama', info: 'kusama', tag: BlockchainTag.KUSAMA, - decimal: 10, + decimal: 12, + symbols: ['KSM'], logo: '', color: '#1b6dc1d9', }, { + prefix: 1284, text: 'Moonbeam', info: 'moonbeam', tag: BlockchainTag.MOONBEAM, decimal: 10, + symbols: ['GLMR'], logo: '', color: '#1b6dc1d9', }, { + prefix: 1285, text: 'Moonriver', info: 'moonriver', tag: BlockchainTag.MOONRIVER, - decimal: 10, + decimal: 18, + symbols: ['MOVR'], logo: '', color: '#1b6dc1d9', }, @@ -58,46 +70,36 @@ export const blockchainDataSourceConfigs = [ { tag: BlockchainTag.POLKADOT, events: { - [NativeTransactionKind.TRANSFER]: - 'https://squid.subsquid.io/gs-main-polkadot/graphql', - // [NativeTransactionKind.REWARD]: - // 'https://squid.subsquid.io/gs-main-polkadot/graphql', + [NativeTransactionKind.TRANSFER]: null, + // [NativeTransactionKind.REWARD]: null, }, }, { tag: BlockchainTag.KUSAMA, events: { - [NativeTransactionKind.TRANSFER]: - 'https://squid.subsquid.io/gs-main-kusama/graphql', - // [NativeTransactionKind.REWARD]: - // 'https://squid.subsquid.io/gs-main-kusama/graphql', + [NativeTransactionKind.TRANSFER]: null, + // [NativeTransactionKind.REWARD]: null, }, }, { tag: BlockchainTag.MOONBEAM, events: { - [NativeTransactionKind.TRANSFER]: - 'https://squid.subsquid.io/gs-main-moonbeam/graphql', - // [NativeTransactionKind.REWARD]: - // 'https://squid.subsquid.io/gs-main-moonbeam/graphql', + [NativeTransactionKind.TRANSFER]: null, + // [NativeTransactionKind.REWARD]: null, }, }, { tag: BlockchainTag.MOONRIVER, events: { - [NativeTransactionKind.TRANSFER]: - 'https://squid.subsquid.io/gs-main-moonriver/graphql', - // [NativeTransactionKind.REWARD]: - // 'https://squid.subsquid.io/gs-main-moonriver/graphql', + [NativeTransactionKind.TRANSFER]: null, + // [NativeTransactionKind.REWARD]: null, }, }, { tag: BlockchainTag.ASTAR, events: { - [NativeTransactionKind.TRANSFER]: - 'https://squid.subsquid.io/gs-main-astar/graphql', - // [NativeTransactionKind.REWARD]: - // 'https://squid.subsquid.io/gs-main-astar/graphql', + [NativeTransactionKind.TRANSFER]: null, + // [NativeTransactionKind.REWARD]: null, }, }, ] as const; diff --git a/src/constants/common.ts b/src/constants/common.ts index b3066e8..d006e7e 100644 --- a/src/constants/common.ts +++ b/src/constants/common.ts @@ -19,10 +19,18 @@ export enum TransactionKind { REWARD = 'REWARD', } +export enum TransferDirection { + FROM = 'FROM', + TO = 'TO', +} + export enum NativeTransactionKind { TRANSFER = 'TRANSFER', VOTE = 'VOTE', REWARD = 'REWARD', } - +export enum DataSourceProviders { + SUBSQUID = 'SUBSQUID', + SUBQUERY = 'SUBQUERY', +} \ No newline at end of file diff --git a/src/dependencyServiceModule.module.ts b/src/dependencyServiceModule.module.ts index 23751de..d8a0761 100644 --- a/src/dependencyServiceModule.module.ts +++ b/src/dependencyServiceModule.module.ts @@ -1,7 +1,7 @@ import { Module } from '@nestjs/common'; import { CryptoUtils } from './utils/cryptoUtils'; import { CommonUtils } from './utils/commonUtils'; -import { DataSourceUtils } from './utils/dataSourceUtils'; +import { DataSourceUtils } from './utils/dataSources/dataSourceUtils'; @Module({ providers: [CryptoUtils, CommonUtils, DataSourceUtils], diff --git a/src/modules/dataAggregator/dataAggregator.module.ts b/src/modules/dataAggregator/dataAggregator.module.ts index d2c4d35..805d4bf 100644 --- a/src/modules/dataAggregator/dataAggregator.module.ts +++ b/src/modules/dataAggregator/dataAggregator.module.ts @@ -8,7 +8,6 @@ import { AccountService } from '../entities/account/account.service'; import { Account } from '../entities/account/entities/account.entity'; import { Blockchain } from '../entities/blockchain/entities/blockchain.entity'; import { AggregationHelper } from './services/aggregation.helper'; -import { DataSourceUtils } from '../../utils/dataSourceUtils'; import { TransferNativeService } from '../entities/transferNative/transferNative.service'; import { TransactionService } from '../entities/transaction/transaction.service'; import { AccountTransactionService } from '../entities/accountTransaction/accountTransaction.service'; @@ -18,6 +17,7 @@ import { BullModule } from '@nestjs/bull'; import { SubIdAggregatorQueueName } from '../../constants/queues'; import { Transaction } from '../entities/transaction/entities/transaction.entity'; import { DatasourceChunksParallelHandlingProducer } from '../queueProcessor/services/producers/datasourceChunksParallelHandling.producer'; +import { DataSourceUtils } from '../../utils/dataSources/dataSourceUtils'; @Module({ imports: [ diff --git a/src/modules/dataAggregator/dto/collectTransfersChunkHandlerResponse.response.ts b/src/modules/dataAggregator/dto/collectTransfersChunkHandlerResponse.response.ts index 40e5461..00b781d 100644 --- a/src/modules/dataAggregator/dto/collectTransfersChunkHandlerResponse.response.ts +++ b/src/modules/dataAggregator/dto/collectTransfersChunkHandlerResponse.response.ts @@ -1,5 +1,5 @@ -import { GetTransfersByAccountQuery } from '../../../utils/graphQl/gsquidMain/gsquid-main-query'; +import { TransferDecoratedDto } from './transfersByAccountDecorated.dto'; export class CollectTransfersChunkHandlerResponseResponse { - fetchedChunkData: GetTransfersByAccountQuery['transfers']; + fetchedChunkData: TransferDecoratedDto[]; } diff --git a/src/modules/dataAggregator/dto/getIndexerLastProcessedHeight.args.dto.ts b/src/modules/dataAggregator/dto/getIndexerLastProcessedHeight.args.dto.ts new file mode 100644 index 0000000..ffee8c3 --- /dev/null +++ b/src/modules/dataAggregator/dto/getIndexerLastProcessedHeight.args.dto.ts @@ -0,0 +1,3 @@ +export class GetIndexerLastProcessedHeightArgs { + queryUrl: string; +} diff --git a/src/modules/dataAggregator/dto/getMainGiantSquidStatus.args.dto.ts b/src/modules/dataAggregator/dto/getMainGiantSquidStatus.args.dto.ts deleted file mode 100644 index 7ca1279..0000000 --- a/src/modules/dataAggregator/dto/getMainGiantSquidStatus.args.dto.ts +++ /dev/null @@ -1,3 +0,0 @@ -export class GetMainGiantSquidStatusArgs { - queryUrl: string; -} diff --git a/src/modules/dataAggregator/dto/getTransfersByAccount.args.dto.ts b/src/modules/dataAggregator/dto/getTransfersByAccount.args.dto.ts index 683f007..f241b2e 100644 --- a/src/modules/dataAggregator/dto/getTransfersByAccount.args.dto.ts +++ b/src/modules/dataAggregator/dto/getTransfersByAccount.args.dto.ts @@ -1,8 +1,12 @@ +import { BlockchainTag } from '../../../constants/blockchain'; + export class GetTransfersByAccountArgs { + blockchainTag: BlockchainTag; limit: number; offset: number; blockNumber_gt: number; blockNumber_lt: number | null; - publicKey: string; + publicKey?: string; + address?: string; queryUrl: string; } diff --git a/src/modules/dataAggregator/dto/indexerLastProcessedHeightDecorated.dto.ts b/src/modules/dataAggregator/dto/indexerLastProcessedHeightDecorated.dto.ts new file mode 100644 index 0000000..9eae7d1 --- /dev/null +++ b/src/modules/dataAggregator/dto/indexerLastProcessedHeightDecorated.dto.ts @@ -0,0 +1,3 @@ +export type IndexerLastProcessedHeightDecoratedDto = { + height: number; +}; diff --git a/src/modules/dataAggregator/dto/transfersByAccountDecorated.dto.ts b/src/modules/dataAggregator/dto/transfersByAccountDecorated.dto.ts new file mode 100644 index 0000000..3cf4c8d --- /dev/null +++ b/src/modules/dataAggregator/dto/transfersByAccountDecorated.dto.ts @@ -0,0 +1,21 @@ +import { TransferDirection } from '../../../constants/common'; + +export type TransferDecoratedDto = { + direction?: TransferDirection | null; + transfer?: { + id: string; + amount: any; + fee?: string; + blockNumber: number; + eventIndex?: number; + extrinsicHash?: string | null; + success: boolean; + timestamp: any; + from: { publicKey: string }; + to: { publicKey: string }; + } | null; +}; + +export type TransfersByAccountDecoratedDto = { + transfers: TransferDecoratedDto[]; +}; diff --git a/src/modules/dataAggregator/services/aggregation.helper.ts b/src/modules/dataAggregator/services/aggregation.helper.ts index e9dc43b..c3efe42 100644 --- a/src/modules/dataAggregator/services/aggregation.helper.ts +++ b/src/modules/dataAggregator/services/aggregation.helper.ts @@ -2,18 +2,13 @@ import { Injectable } from '@nestjs/common'; import { BlockchainService } from '../../entities/blockchain/blockchain.service'; import { DatasourceHandlingProducer } from '../../queueProcessor/services/producers/datasourceHandling.producer'; import { CollectEventDataFromDataSourceInput } from '../../queueProcessor/dto/collectEventDataFromDataSource.input'; -import { DataSourceUtils } from '../../../utils/dataSourceUtils'; import { TransferNativeService } from '../../entities/transferNative/transferNative.service'; -import { - GetTransfersByAccountQuery, - Transfer as SquidTransfer, - TransferDirection, -} from '../../../utils/graphQl/gsquidMain/gsquid-main-query'; import { TransferNative } from '../../entities/transferNative/entities/transferNative.entity'; import { Transaction } from '../../entities/transaction/entities/transaction.entity'; import { NativeTransactionKind, TransactionKind, + TransferDirection, } from '../../../constants/common'; import { AccountTransaction } from '../../entities/accountTransaction/entities/accountTransaction.entity'; import { AccountService } from '../../entities/account/account.service'; @@ -27,6 +22,8 @@ import { BlockchainTag } from '../../../constants/blockchain'; import { CollectTransfersChunkHandlerResponseResponse } from '../dto/collectTransfersChunkHandlerResponse.response'; import { AppConfig } from '../../../config.module'; import { DatasourceChunksParallelHandlingProducer } from '../../queueProcessor/services/producers/datasourceChunksParallelHandling.producer'; +import { DataSourceUtils } from '../../../utils/dataSources/dataSourceUtils'; +import { TransferDecoratedDto } from '../dto/transfersByAccountDecorated.dto'; @Injectable() export class AggregationHelper { @@ -84,14 +81,14 @@ export class AggregationHelper { async collectTransferEventData( inputData: CollectEventDataFromDataSourceInput, ): Promise { - const sourceSquidStatus = - await this.dataSourceUtils.getMainGiantSquidStatus({ + const sourceIndexerStatus = + await this.dataSourceUtils.getIndexerLastProcessedHeight({ queryUrl: inputData.sourceUrl, }); let chunksRanges = this.getChunksRanges({ latestProcessedBlock: inputData.latestProcessedBlock, - totalBlocks: sourceSquidStatus.squidStatus.height, + totalBlocks: sourceIndexerStatus.height, }); const aggregationChunkResults = await Promise.allSettled( @@ -111,7 +108,7 @@ export class AggregationHelper { ); let lastBlock = 0; - let totalFetchedData: GetTransfersByAccountQuery['transfers'] = []; + let totalFetchedData: TransferDecoratedDto[] = []; for (const aggregationResult of aggregationChunkResults) { if (aggregationResult.status !== 'fulfilled') continue; totalFetchedData.push( @@ -133,11 +130,13 @@ export class AggregationHelper { for (const transferData of totalFetchedData) { const nativeTransferEntity = new TransferNative({ - id: transferData.id, + id: transferData.transfer.id, blockNumber: transferData.transfer.blockNumber, extrinsicHash: transferData.transfer.extrinsicHash, + eventIndex: transferData.transfer.eventIndex ?? null, timestamp: new Date(transferData.transfer.timestamp), amount: BigInt(transferData.transfer.amount), + fee: transferData.transfer.fee ? BigInt(transferData.transfer.fee) : 0n, success: transferData.transfer.success, from: await this.accountService.getOrCreateAccount( transferData.transfer.from.publicKey, @@ -145,9 +144,10 @@ export class AggregationHelper { to: await this.accountService.getOrCreateAccount( transferData.transfer.to.publicKey, ), + blockchain, }); const txKind = - transferData.direction === TransferDirection.From + transferData.direction === TransferDirection.FROM ? TransactionKind.TRANSFER_FROM : TransactionKind.TRANSFER_TO; @@ -158,7 +158,7 @@ export class AggregationHelper { }); const accountTransaction = new AccountTransaction({ - id: `${this.commonUtils.getStringShortcut(inputData.publicKey)}-${ + id: `${this.commonUtils.getStringEndingShortcut(inputData.publicKey)}-${ transactionEntity.id }`, ownerPublicKey: txAccount.id, @@ -204,7 +204,7 @@ export class AggregationHelper { latestProcessedBlock: lastBlock || inputData.latestProcessedBlock || - sourceSquidStatus.squidStatus.height - 300, + sourceIndexerStatus.height - 300, action: inputData.event, blockchainTag: inputData.blockchainTag, }; @@ -213,7 +213,7 @@ export class AggregationHelper { async collectTransferEventDataChunk( inputData: CollectEventDataChunkFromDataSourceInput, ): Promise { - const responseBuffer: GetTransfersByAccountQuery['transfers'] = []; + const responseBuffer: TransferDecoratedDto[] = []; let index = 1; const pubicKeyShort = `${inputData.publicKey.substring( @@ -231,6 +231,7 @@ export class AggregationHelper { ); const resp = await this.dataSourceUtils.getTransfersByAccount({ + blockchainTag: inputData.blockchainTag, limit: pageSize, offset: currentOffset, publicKey: inputData.publicKey, diff --git a/src/modules/entities/blockchain/blockchain.service.ts b/src/modules/entities/blockchain/blockchain.service.ts index 1798c71..157b9b2 100644 --- a/src/modules/entities/blockchain/blockchain.service.ts +++ b/src/modules/entities/blockchain/blockchain.service.ts @@ -7,15 +7,35 @@ import { BlockchainTag, supportedBlockchainDetails, } from '../../../constants/blockchain'; +import { AppConfig } from '../../../config.module'; @Injectable() export class BlockchainService { - public readonly blockchainDataSourceConfigs = blockchainDataSourceConfigs; + public readonly blockchainDataSourceConfigs = []; constructor( @InjectRepository(Blockchain) public readonly blockchainRepository: Repository, - ) {} + private appConfig: AppConfig, + ) { + this.setDataSourceEndpoints(); + } + + setDataSourceEndpoints() { + for (const chainConfig of blockchainDataSourceConfigs) { + const chainConfigUpdated = chainConfig; + + for (const eventName in chainConfig.events) { + chainConfigUpdated.events[eventName] = + this.appConfig[ + `DATA_SOURCE__${ + this.appConfig[`DATA_SOURCE_PROVIDER_${eventName}`] + }__${chainConfig.tag}__${eventName}` + ]; + } + this.blockchainDataSourceConfigs.push(chainConfigUpdated); + } + } async getOrCreateBlockchain(blockchainId: string): Promise { if (blockchainId === null || !blockchainId) @@ -55,6 +75,8 @@ export class BlockchainService { chainData.text = supportedChainData.text; chainData.logo = supportedChainData.logo; chainData.decimal = supportedChainData.decimal; + chainData.prefix = supportedChainData.prefix; + chainData.symbols = supportedChainData.symbols; chainData.color = supportedChainData.color; chainsToSave.push(chainData); diff --git a/src/modules/entities/blockchain/entities/blockchain.entity.ts b/src/modules/entities/blockchain/entities/blockchain.entity.ts index 601364f..aaf5e11 100644 --- a/src/modules/entities/blockchain/entities/blockchain.entity.ts +++ b/src/modules/entities/blockchain/entities/blockchain.entity.ts @@ -18,6 +18,14 @@ export class Blockchain { @Field(() => String, { nullable: false }) info: string; + @Column('text', { array: true, default: [], nullable: true }) + @Field(() => [String], { nullable: true, defaultValue: [] }) + symbols: string[]; + + @Column({ nullable: true }) + @Field(() => Int, { nullable: true }) + prefix?: number; + @Column({ type: 'enum', enum: BlockchainTag, diff --git a/src/modules/entities/transferNative/entities/transferNative.entity.ts b/src/modules/entities/transferNative/entities/transferNative.entity.ts index 13d893e..c8e0092 100644 --- a/src/modules/entities/transferNative/entities/transferNative.entity.ts +++ b/src/modules/entities/transferNative/entities/transferNative.entity.ts @@ -24,6 +24,13 @@ export class TransferNative { @Field(() => String) id: string; + @ManyToOne(() => Blockchain, (blockchain) => blockchain.id, { + nullable: false, + }) + @JoinColumn({ name: 'blockchain_id' }) + @Field(() => Blockchain, { nullable: false }) + blockchain: Blockchain; + @Column({ nullable: false, name: 'block_number' }) @Field(() => Int, { nullable: false }) blockNumber: number; @@ -32,15 +39,23 @@ export class TransferNative { @Field(() => String, { nullable: true }) extrinsicHash?: string; + @Column({ nullable: true, name: 'event_index' }) + @Field(() => Int, { nullable: true }) + eventIndex?: number; + @Column({ type: 'timestamp with time zone', nullable: false }) @Field(() => Date, { nullable: false }) timestamp: Date; - @Column({ type: 'numeric', transformer: bigintTransformer, nullable: true }) + @Column({ type: 'numeric', transformer: bigintTransformer, nullable: false }) @Field(() => GraphQLBigInt, { nullable: false }) amount: bigint; - @Column({ nullable: true }) + @Column({ type: 'numeric', transformer: bigintTransformer, nullable: true }) + @Field(() => GraphQLBigInt, { nullable: true }) + fee?: bigint; + + @Column({ nullable: false }) @Field(() => Boolean, { nullable: false }) success: boolean; diff --git a/src/modules/queueProcessor/services/workers/collectTransfersDataChunk.worker.ts b/src/modules/queueProcessor/services/workers/collectTransfersDataChunk.worker.ts index f63b502..c3c805f 100644 --- a/src/modules/queueProcessor/services/workers/collectTransfersDataChunk.worker.ts +++ b/src/modules/queueProcessor/services/workers/collectTransfersDataChunk.worker.ts @@ -1,7 +1,8 @@ import { Job, DoneCallback } from 'bull'; -import { GetTransfersByAccountQuery } from '../../../../utils/graphQl/gsquidMain/gsquid-main-query'; -import { DataSourceUtils } from '../../../../utils/dataSourceUtils'; import crypto from 'node:crypto'; +import { DataSourceUtils } from '../../../../utils/dataSources/dataSourceUtils'; +import { TransferDecoratedDto } from '../../../dataAggregator/dto/transfersByAccountDecorated.dto'; +import { AppConfig } from '../../../../config.module'; export default async function (job: Job, cb: DoneCallback) { try { @@ -12,9 +13,9 @@ export default async function (job: Job, cb: DoneCallback) { ); const inputData = job.data; - const dataSourceUtils = new DataSourceUtils(); + const dataSourceUtils = new DataSourceUtils(new AppConfig()); - const responseBuffer: GetTransfersByAccountQuery['transfers'] = []; + const responseBuffer: TransferDecoratedDto[] = []; let index = 1; const runQuery = async (offset: number = 0) => { @@ -28,6 +29,7 @@ export default async function (job: Job, cb: DoneCallback) { ); const resp = await dataSourceUtils.getTransfersByAccount({ + blockchainTag: inputData.blockchainTag, limit: pageSize, offset: currentOffset, publicKey: inputData.publicKey, diff --git a/src/utils/commonUtils.ts b/src/utils/commonUtils.ts index 4faade1..58fafab 100644 --- a/src/utils/commonUtils.ts +++ b/src/utils/commonUtils.ts @@ -4,7 +4,17 @@ import { Injectable } from '@nestjs/common'; export class CommonUtils { constructor() {} - getStringShortcut(publicKey: string): string { - return publicKey.substring(publicKey.length - 6, publicKey.length - 1); + getStringEndingShortcut(str: string): string { + return str.substring(str.length - 6, str.length - 1); + } + + getTransferId({ + blockNumber, + eventIndex, + }: { + blockNumber: number; + eventIndex?: string | number; + }) { + return `${blockNumber}-${eventIndex}`; } } diff --git a/src/utils/cryptoUtils.ts b/src/utils/cryptoUtils.ts index 16495b5..bd942df 100644 --- a/src/utils/cryptoUtils.ts +++ b/src/utils/cryptoUtils.ts @@ -2,9 +2,16 @@ import { Injectable } from '@nestjs/common'; import { decodeAddress, encodeAddress } from '@polkadot/util-crypto'; import { u8aToHex, isHex, hexToU8a } from '@polkadot/util'; import { ethers } from 'ethers'; +import { + BlockchainTag, + supportedBlockchainDetails, +} from '../constants/blockchain'; @Injectable() export class CryptoUtils { + private supportedBlockchainDetailsMap = new Map( + supportedBlockchainDetails.map((data) => [data.tag, data]), + ); constructor() {} addressToHex(address: string | Uint8Array) { @@ -12,6 +19,13 @@ export class CryptoUtils { return u8aToHex(publicKey); } + publicKeyToFormattedAddress(publicKey: string, blockchainTag: BlockchainTag) { + return encodeAddress( + decodeAddress(publicKey), + this.supportedBlockchainDetailsMap.get(blockchainTag).prefix, + ); + } + isValidAddress(maybeAddress: string) { if ( this.isValidSubstrateAddress(maybeAddress) || diff --git a/src/utils/dataSourceUtils.ts b/src/utils/dataSourceUtils.ts deleted file mode 100644 index 078800a..0000000 --- a/src/utils/dataSourceUtils.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { Injectable } from '@nestjs/common'; -import { GraphQLClient, RequestOptions, Variables } from 'graphql-request'; -import { - GET_MAIN_SQUID_STATUS, - GET_TRANSFERS_BY_ACCOUNT, - GET_TRANSFERS_COUNT_BY_ACCOUNT, -} from './graphQl/gsquidMain/query'; -import { - GetMainSquidStatusQuery, - GetTransfersByAccountQuery, - GetTransfersCountByAccountQuery, - QueryTransfersArgs, - QueryTransfersConnectionArgs, - Transfer, - TransferOrderByInput, -} from './graphQl/gsquidMain/gsquid-main-query'; -import { GetTransfersByAccountArgs } from '../modules/dataAggregator/dto/getTransfersByAccount.args.dto'; -import { GetTransfersCountByAccountArgs } from '../modules/dataAggregator/dto/getTransfersCountByAccount.args.dto'; -import { GetMainGiantSquidStatusArgs } from '../modules/dataAggregator/dto/getMainGiantSquidStatus.args.dto'; - -@Injectable() -export class DataSourceUtils { - // private graphQLClient: GraphQLClient; - - constructor() {} - - async requestWithRetry( - requestPromise: Promise, - { retries = 3, everyMs = 1_000 }, - retriesCount = 0, - ): Promise { - try { - return await requestPromise; - } catch (e) { - const updatedCount = retriesCount + 1; - if (updatedCount > retries) { - throw Error((e as Error).message); - } - await new Promise((resolve) => setTimeout(resolve, everyMs)); - return await this.requestWithRetry( - requestPromise, - { retries, everyMs }, - updatedCount, - ); - } - } - - squidQueryRequest( - config: RequestOptions, - queryUrl: string, - ) { - if (!queryUrl) throw new Error('queryUrl is not provided'); - - const TIMEOUT = 2 * 60 * 1000; - const client = new GraphQLClient(queryUrl, { - timeout: TIMEOUT, - ...config, - }); - return client.request({ queryUrl, ...config }); - } - - async getTransfersByAccount(data: GetTransfersByAccountArgs) { - // console.log( - // `request started :: ${data.blockNumber_gt}/${data.blockNumber_lt}`, - // ); - const res = await this.requestWithRetry( - this.squidQueryRequest( - { - document: GET_TRANSFERS_BY_ACCOUNT, - variables: { - limit: data.limit, - offset: data.offset, - // orderBy: [TransferOrderByInput.TransferTimestampAsc], - where: { - account: { publicKey_eq: data.publicKey }, - transfer: { - blockNumber_gt: data.blockNumber_gt, - ...(data.blockNumber_lt - ? { blockNumber_lt: data.blockNumber_lt } - : {}), - }, - }, - }, - }, - data.queryUrl, - ), - { retries: 5, everyMs: 1_500 }, - ); - return res; - } - - async getTransfersCountByAccount(data: GetTransfersCountByAccountArgs) { - const res = await this.squidQueryRequest< - GetTransfersCountByAccountQuery, - QueryTransfersConnectionArgs - >( - { - document: GET_TRANSFERS_COUNT_BY_ACCOUNT, - variables: { - orderBy: [TransferOrderByInput.IdAsc], - where: { - account: { publicKey_eq: data.publicKey }, - transfer: { - blockNumber_gt: data.blockNumber_gt, - }, - }, - }, - }, - data.queryUrl, - ); - return res; - } - - async getMainGiantSquidStatus(data: GetMainGiantSquidStatusArgs) { - const res = await this.squidQueryRequest( - { - document: GET_MAIN_SQUID_STATUS, - variables: {}, - }, - data.queryUrl, - ); - return res; - } -} diff --git a/src/utils/dataSources/common.ts b/src/utils/dataSources/common.ts new file mode 100644 index 0000000..6dde9d4 --- /dev/null +++ b/src/utils/dataSources/common.ts @@ -0,0 +1,40 @@ +import { GraphQLClient, RequestOptions, Variables } from 'graphql-request'; + +export class CommonDataSourceUtils { + constructor() {} + + async requestWithRetry( + requestPromise: Promise, + { retries = 3, everyMs = 1_000 }, + retriesCount = 0, + ): Promise { + try { + return await requestPromise; + } catch (e) { + const updatedCount = retriesCount + 1; + if (updatedCount > retries) { + throw Error((e as Error).message); + } + await new Promise((resolve) => setTimeout(resolve, everyMs)); + return await this.requestWithRetry( + requestPromise, + { retries, everyMs }, + updatedCount, + ); + } + } + + indexerQueryRequest( + config: RequestOptions, + queryUrl: string, + ) { + if (!queryUrl) throw new Error('queryUrl is not provided'); + + const TIMEOUT = 2 * 60 * 1000; + const client = new GraphQLClient(queryUrl, { + timeout: TIMEOUT, + ...config, + }); + return client.request({ queryUrl, ...config }); + } +} diff --git a/src/utils/dataSources/dataSourceDecorators.ts b/src/utils/dataSources/dataSourceDecorators.ts new file mode 100644 index 0000000..d7f5e80 --- /dev/null +++ b/src/utils/dataSources/dataSourceDecorators.ts @@ -0,0 +1,122 @@ +import { + GetIndexerLastProcessedHeightSubQueryQuery, + GetTransfersByAccountSubQueryQuery, +} from '../graphQl/subQueryNova/subquery-nova-query'; +import { TransfersByAccountDecoratedDto } from '../../modules/dataAggregator/dto/transfersByAccountDecorated.dto'; +import { TransferDirection } from '../../constants/common'; +import { CommonUtils } from '../commonUtils'; +import { CryptoUtils } from '../cryptoUtils'; +import { TransferDto } from '../graphQl/subQueryNova/transfer.dto'; +import { IndexerLastProcessedHeightDecoratedDto } from '../../modules/dataAggregator/dto/indexerLastProcessedHeightDecorated.dto'; +import { + GetMainSquidStatusSubSquidQuery, + GetTransfersByAccountSubSquidQuery, +} from '../graphQl/gsquidMain/gsquid-main-query'; + +export class DataSourceDecorators { + private commonUtils: CommonUtils = new CommonUtils(); + private cryptoUtils: CryptoUtils = new CryptoUtils(); + + getTransferDirection({ + from, + to, + address, + }: { + from: string; + to: string; + address: string; + }): TransferDirection { + if (address === from) return TransferDirection.FROM; + return TransferDirection.TO; + } + + decorateGetTransfersByAccountResponseFromSubQuery( + queryResponse: GetTransfersByAccountSubQueryQuery, + ): TransfersByAccountDecoratedDto { + const decoratedData: TransfersByAccountDecoratedDto = { + transfers: [], + }; + + for (const node of queryResponse.historyElements.nodes) { + const transferData = node.transfer as TransferDto; + decoratedData.transfers.push({ + direction: this.getTransferDirection({ + from: transferData.from, + to: transferData.to, + address: node.address, + }), + transfer: { + id: this.commonUtils.getTransferId({ + blockNumber: node.blockNumber, + eventIndex: transferData.eventIdx, + }), + amount: transferData.amount, + fee: transferData.fee, + blockNumber: node.blockNumber, + extrinsicHash: node.extrinsicHash, + eventIndex: transferData.eventIdx, + success: transferData.success, + timestamp: +node.timestamp * 1000, + from: { + publicKey: this.cryptoUtils.addressToHex(transferData.from), + }, + to: { publicKey: this.cryptoUtils.addressToHex(transferData.to) }, + }, + }); + } + return decoratedData; + } + + decorateGetTransfersByAccountResponseFromSubSquid( + queryResponse: GetTransfersByAccountSubSquidQuery, + ) { + const decoratedData: TransfersByAccountDecoratedDto = { + transfers: [], + }; + + // for (const transfer of queryResponse.transfers) { + // + // decoratedData.transfers.push({ + // direction: this.getTransferDirection({ + // from: transfer.transfer.from.publicKey, + // to: transfer.transfer.to.publicKey, + // address: node.address, + // }), + // transfer: { + // id: this.commonUtils.getTransferId({ + // blockNumber: transfer.transfer.blockNumber, + // eventIndex: transfer.transfer.eventIdx, + // }), + // amount: transferData.amount, + // fee: 0, + // blockNumber: node.blockNumber, + // extrinsicHash: node.extrinsicHash, + // eventIndex: transferData.eventIdx, + // success: transferData.success, + // timestamp: node.timestamp, + // from: { + // publicKey: this.cryptoUtils.addressToHex(transferData.from), + // }, + // to: { publicKey: this.cryptoUtils.addressToHex(transferData.to) }, + // }, + // }); + // } + return decoratedData; + } + + decorateGetIndexerLastProcessedHeightFromSubQuery( + queryResponse: GetIndexerLastProcessedHeightSubQueryQuery, + ): IndexerLastProcessedHeightDecoratedDto { + return { + height: +queryResponse._metadata.lastProcessedHeight, + }; + } + + decorateGetIndexerLastProcessedHeightFromSubSquid( + queryResponse: GetMainSquidStatusSubSquidQuery, + ) { + return { + height: +queryResponse.squidStatus.height, + }; + } +} diff --git a/src/utils/dataSources/dataSourceUtils.subQuery.ts b/src/utils/dataSources/dataSourceUtils.subQuery.ts new file mode 100644 index 0000000..d00f132 --- /dev/null +++ b/src/utils/dataSources/dataSourceUtils.subQuery.ts @@ -0,0 +1,74 @@ +import { Injectable } from '@nestjs/common'; +import { GetTransfersByAccountArgs } from '../../modules/dataAggregator/dto/getTransfersByAccount.args.dto'; +import { GetTransfersCountByAccountArgs } from '../../modules/dataAggregator/dto/getTransfersCountByAccount.args.dto'; +import { GetIndexerLastProcessedHeightArgs } from '../../modules/dataAggregator/dto/getIndexerLastProcessedHeight.args.dto'; +import { CommonDataSourceUtils } from './common'; +import { + GET_INDEXER_LAST_PROCESSED_HEIGHT, + GET_TRANSFERS_BY_ACCOUNT_SUBQUERY, +} from '../graphQl/subQueryNova/query'; +import { + GetIndexerLastProcessedHeightSubQueryQuery, + GetTransfersByAccountSubQueryQuery, + GetTransfersByAccountSubQueryQueryVariables, +} from '../graphQl/subQueryNova/subquery-nova-query'; +import { CryptoUtils } from '../cryptoUtils'; + +// @Injectable() +export class DataSourceUtilsSubQuery extends CommonDataSourceUtils { + private cryptoUtils: CryptoUtils = new CryptoUtils(); + + async getTransfersByAccount(data: GetTransfersByAccountArgs) { + const res = await this.requestWithRetry( + this.indexerQueryRequest< + GetTransfersByAccountSubQueryQuery, + GetTransfersByAccountSubQueryQueryVariables + >( + { + document: GET_TRANSFERS_BY_ACCOUNT_SUBQUERY, + variables: { + offset: data.offset, + first: data.limit, + // orderBy: [TransferOrderByInput.TransferTimestampAsc], + filter: { + address: { + equalTo: this.cryptoUtils.publicKeyToFormattedAddress( + data.publicKey, + data.blockchainTag, + ), + }, + blockNumber: { + greaterThan: data.blockNumber_gt, + ...(data.blockNumber_lt + ? { lessThan: data.blockNumber_lt } + : {}), + }, + transfer: { isNull: false }, + }, + }, + }, + data.queryUrl, + ), + { retries: 5, everyMs: 1_500 }, + ); + return res; + } + + async getIndexerLastProcessedHeight(data: GetIndexerLastProcessedHeightArgs) { + const res = + await this.requestWithRetry( + this.indexerQueryRequest< + GetIndexerLastProcessedHeightSubQueryQuery, + {} + >( + { + document: GET_INDEXER_LAST_PROCESSED_HEIGHT, + variables: {}, + }, + data.queryUrl, + ), + { retries: 5, everyMs: 1_500 }, + ); + return res; + } +} diff --git a/src/utils/dataSources/dataSourceUtils.subSquid.ts b/src/utils/dataSources/dataSourceUtils.subSquid.ts new file mode 100644 index 0000000..5a57f09 --- /dev/null +++ b/src/utils/dataSources/dataSourceUtils.subSquid.ts @@ -0,0 +1,94 @@ +import { Injectable } from '@nestjs/common'; +import { GraphQLClient, RequestOptions, Variables } from 'graphql-request'; +import { + GET_MAIN_SQUID_STATUS_SUBSQUID, + GET_TRANSFERS_BY_ACCOUNT_SUBSQUID, + GET_TRANSFERS_COUNT_BY_ACCOUNT_SUBSQUID, +} from '../graphQl/gsquidMain/query'; +import { + GetMainSquidStatusSubSquidQuery, + GetTransfersByAccountSubSquidQuery, + GetTransfersCountByAccountSubSquidQuery, + QueryTransfersArgs, + QueryTransfersConnectionArgs, + Transfer, + TransferOrderByInput, +} from '../graphQl/gsquidMain/gsquid-main-query'; +import { GetTransfersByAccountArgs } from '../../modules/dataAggregator/dto/getTransfersByAccount.args.dto'; +import { GetTransfersCountByAccountArgs } from '../../modules/dataAggregator/dto/getTransfersCountByAccount.args.dto'; +import { GetIndexerLastProcessedHeightArgs } from '../../modules/dataAggregator/dto/getIndexerLastProcessedHeight.args.dto'; +import { CommonDataSourceUtils } from './common'; + +// @Injectable() +export class DataSourceUtilsSubSquid extends CommonDataSourceUtils { + // private graphQLClient: GraphQLClient; + + async getTransfersByAccount(data: GetTransfersByAccountArgs) { + // console.log( + // `request started :: ${data.blockNumber_gt}/${data.blockNumber_lt}`, + // ); + const res = await this.requestWithRetry( + this.indexerQueryRequest< + GetTransfersByAccountSubSquidQuery, + QueryTransfersArgs + >( + { + document: GET_TRANSFERS_BY_ACCOUNT_SUBSQUID, + variables: { + limit: data.limit, + offset: data.offset, + // orderBy: [TransferOrderByInput.TransferTimestampAsc], + where: { + account: { publicKey_eq: data.publicKey }, + transfer: { + blockNumber_gt: data.blockNumber_gt, + ...(data.blockNumber_lt + ? { blockNumber_lt: data.blockNumber_lt } + : {}), + }, + }, + }, + }, + data.queryUrl, + ), + { retries: 5, everyMs: 1_500 }, + ); + return res; + } + + // async getTransfersCountByAccount(data: GetTransfersCountByAccountArgs) { + // const res = await this.indexerQueryRequest< + // GetTransfersCountByAccountQuery, + // QueryTransfersConnectionArgs + // >( + // { + // document: GET_TRANSFERS_COUNT_BY_ACCOUNT_SUBSQUID, + // variables: { + // orderBy: [TransferOrderByInput.IdAsc], + // where: { + // account: { publicKey_eq: data.publicKey }, + // transfer: { + // blockNumber_gt: data.blockNumber_gt, + // }, + // }, + // }, + // }, + // data.queryUrl, + // ); + // return res; + // } + + async getIndexerLastProcessedHeight(data: GetIndexerLastProcessedHeightArgs) { + const res = await this.indexerQueryRequest< + GetMainSquidStatusSubSquidQuery, + {} + >( + { + document: GET_MAIN_SQUID_STATUS_SUBSQUID, + variables: {}, + }, + data.queryUrl, + ); + return res; + } +} diff --git a/src/utils/dataSources/dataSourceUtils.ts b/src/utils/dataSources/dataSourceUtils.ts new file mode 100644 index 0000000..27af2f0 --- /dev/null +++ b/src/utils/dataSources/dataSourceUtils.ts @@ -0,0 +1,58 @@ +import { Injectable } from '@nestjs/common'; +import { GetTransfersByAccountArgs } from '../../modules/dataAggregator/dto/getTransfersByAccount.args.dto'; +import { GetIndexerLastProcessedHeightArgs } from '../../modules/dataAggregator/dto/getIndexerLastProcessedHeight.args.dto'; +import { DataSourceUtilsSubSquid } from './dataSourceUtils.subSquid'; +import { DataSourceUtilsSubQuery } from './dataSourceUtils.subQuery'; +import { DataSourceDecorators } from './dataSourceDecorators'; +import { IndexerLastProcessedHeightDecoratedDto } from '../../modules/dataAggregator/dto/indexerLastProcessedHeightDecorated.dto'; +import { TransfersByAccountDecoratedDto } from '../../modules/dataAggregator/dto/transfersByAccountDecorated.dto'; +import { AppConfig } from '../../config.module'; +import { DataSourceProviders } from '../../constants/common'; + +@Injectable() +export class DataSourceUtils extends DataSourceDecorators { + constructor(private appConfig: AppConfig) { + super(); + } + + private subsSquid: DataSourceUtilsSubSquid = new DataSourceUtilsSubSquid(); + private subsQuery: DataSourceUtilsSubQuery = new DataSourceUtilsSubQuery(); + + async getTransfersByAccount( + data: GetTransfersByAccountArgs, + ): Promise { + switch (this.appConfig.DATA_SOURCE_PROVIDER_TRANSFER) { + case DataSourceProviders.SUBSQUID: + return this.decorateGetTransfersByAccountResponseFromSubSquid( + await this.subsSquid.getTransfersByAccount(data), + ); + case DataSourceProviders.SUBQUERY: + return this.decorateGetTransfersByAccountResponseFromSubQuery( + await this.subsQuery.getTransfersByAccount(data), + ); + default: + new Error( + `Valid DATA_SOURCE_PROVIDER_TRANSFERS has not been provided. [${this.appConfig.DATA_SOURCE_PROVIDER_TRANSFER}]`, + ); + } + } + + async getIndexerLastProcessedHeight( + data: GetIndexerLastProcessedHeightArgs, + ): Promise { + switch (this.appConfig.DATA_SOURCE_PROVIDER_TRANSFER) { + case DataSourceProviders.SUBSQUID: + return this.decorateGetIndexerLastProcessedHeightFromSubSquid( + await this.subsSquid.getIndexerLastProcessedHeight(data), + ); + case DataSourceProviders.SUBQUERY: + return this.decorateGetIndexerLastProcessedHeightFromSubQuery( + await this.subsQuery.getIndexerLastProcessedHeight(data), + ); + default: + new Error( + `Valid DATA_SOURCE_PROVIDER_TRANSFERS has not been provided. [${this.appConfig.DATA_SOURCE_PROVIDER_TRANSFER}]`, + ); + } + } +} diff --git a/src/utils/graphQl/gsquidMain/gsquid-main-query.ts b/src/utils/graphQl/gsquidMain/gsquid-main-query.ts index 8755e5d..2c17db0 100644 --- a/src/utils/graphQl/gsquidMain/gsquid-main-query.ts +++ b/src/utils/graphQl/gsquidMain/gsquid-main-query.ts @@ -1041,31 +1041,31 @@ export type WhereIdInput = { id: Scalars['String']['input']; }; -export type GetTransfersByAccountQueryVariables = Exact<{ +export type GetTransfersByAccountSubSquidQueryVariables = Exact<{ where: TransferWhereInput; limit: Scalars['Int']['input']; offset: Scalars['Int']['input']; }>; -export type GetTransfersByAccountQuery = { __typename?: 'Query', transfers: Array<{ __typename?: 'Transfer', id: string, direction?: TransferDirection | null, transfer?: { __typename?: 'NativeTransfer', amount: any, blockNumber: number, extrinsicHash?: string | null, id: string, success: boolean, timestamp: any, from: { __typename?: 'Account', publicKey: string }, to: { __typename?: 'Account', publicKey: string } } | null }> }; +export type GetTransfersByAccountSubSquidQuery = { __typename?: 'Query', transfers: Array<{ __typename?: 'Transfer', id: string, direction?: TransferDirection | null, transfer?: { __typename?: 'NativeTransfer', amount: any, blockNumber: number, extrinsicHash?: string | null, id: string, success: boolean, timestamp: any, from: { __typename?: 'Account', publicKey: string }, to: { __typename?: 'Account', publicKey: string } } | null }> }; -export type GetTransfersCountByAccountQueryVariables = Exact<{ +export type GetTransfersCountByAccountSubSquidQueryVariables = Exact<{ where: TransferWhereInput; orderBy: Array | TransferOrderByInput; }>; -export type GetTransfersCountByAccountQuery = { __typename?: 'Query', transfersConnection: { __typename?: 'TransfersConnection', totalCount: number } }; +export type GetTransfersCountByAccountSubSquidQuery = { __typename?: 'Query', transfersConnection: { __typename?: 'TransfersConnection', totalCount: number } }; -export type GetMainSquidStatusQueryVariables = Exact<{ [key: string]: never; }>; +export type GetMainSquidStatusSubSquidQueryVariables = Exact<{ [key: string]: never; }>; -export type GetMainSquidStatusQuery = { __typename?: 'Query', squidStatus?: { __typename?: 'SquidStatus', height?: number | null } | null }; +export type GetMainSquidStatusSubSquidQuery = { __typename?: 'Query', squidStatus?: { __typename?: 'SquidStatus', height?: number | null } | null }; -export const GetTransfersByAccount = gql` - query GetTransfersByAccount($where: TransferWhereInput!, $limit: Int!, $offset: Int!) { +export const GetTransfersByAccountSubSquid = gql` + query GetTransfersByAccountSubSquid($where: TransferWhereInput!, $limit: Int!, $offset: Int!) { transfers(where: $where, limit: $limit, offset: $offset) { id direction @@ -1086,15 +1086,15 @@ export const GetTransfersByAccount = gql` } } `; -export const GetTransfersCountByAccount = gql` - query GetTransfersCountByAccount($where: TransferWhereInput!, $orderBy: [TransferOrderByInput!]!) { +export const GetTransfersCountByAccountSubSquid = gql` + query GetTransfersCountByAccountSubSquid($where: TransferWhereInput!, $orderBy: [TransferOrderByInput!]!) { transfersConnection(where: $where, orderBy: $orderBy) { totalCount } } `; -export const GetMainSquidStatus = gql` - query GetMainSquidStatus { +export const GetMainSquidStatusSubSquid = gql` + query GetMainSquidStatusSubSquid { squidStatus { height } diff --git a/src/utils/graphQl/gsquidMain/query.ts b/src/utils/graphQl/gsquidMain/query.ts index 5592057..584dfe7 100644 --- a/src/utils/graphQl/gsquidMain/query.ts +++ b/src/utils/graphQl/gsquidMain/query.ts @@ -1,8 +1,8 @@ import gql from 'graphql-tag'; import { TransferOrderByInput } from './gsquid-main-query'; -export const GET_TRANSFERS_BY_ACCOUNT = gql` - query GetTransfersByAccount( +export const GET_TRANSFERS_BY_ACCOUNT_SUBSQUID = gql` + query GetTransfersByAccountSubSquid( $where: TransferWhereInput! $limit: Int! $offset: Int! # $orderBy: [TransferOrderByInput!]! @@ -32,8 +32,8 @@ export const GET_TRANSFERS_BY_ACCOUNT = gql` } `; -export const GET_TRANSFERS_COUNT_BY_ACCOUNT = gql` - query GetTransfersCountByAccount( +export const GET_TRANSFERS_COUNT_BY_ACCOUNT_SUBSQUID = gql` + query GetTransfersCountByAccountSubSquid( $where: TransferWhereInput! $orderBy: [TransferOrderByInput!]! ) { @@ -43,10 +43,10 @@ export const GET_TRANSFERS_COUNT_BY_ACCOUNT = gql` } `; -export const GET_MAIN_SQUID_STATUS = gql` - query GetMainSquidStatus { - squidStatus { - height - } +export const GET_MAIN_SQUID_STATUS_SUBSQUID = gql` + query GetMainSquidStatusSubSquid { + squidStatus { + height } + } `; diff --git a/src/utils/graphQl/subQueryNova/query.ts b/src/utils/graphQl/subQueryNova/query.ts new file mode 100644 index 0000000..fd27890 --- /dev/null +++ b/src/utils/graphQl/subQueryNova/query.ts @@ -0,0 +1,29 @@ +import gql from 'graphql-tag'; +import { HistoryElementFilter, Scalars } from './subquery-nova-query'; + +export const GET_TRANSFERS_BY_ACCOUNT_SUBQUERY = gql` + query GetTransfersByAccountSubQuery( + $filter: HistoryElementFilter! + $offset: Int + $first: Int + ) { + historyElements(filter: $filter, offset: $offset, first: $first) { + nodes { + transfer + extrinsicHash + extrinsicIdx + blockNumber + address + timestamp + } + } + } +`; + +export const GET_INDEXER_LAST_PROCESSED_HEIGHT = gql` + query GetIndexerLastProcessedHeightSubQuery { + _metadata { + lastProcessedHeight + } + } +`; diff --git a/src/utils/graphQl/subQueryNova/subquery-nova-query.ts b/src/utils/graphQl/subQueryNova/subquery-nova-query.ts new file mode 100644 index 0000000..346a36f --- /dev/null +++ b/src/utils/graphQl/subQueryNova/subquery-nova-query.ts @@ -0,0 +1,2761 @@ +import gql from 'graphql-tag'; +export type Maybe = T | null; +export type InputMaybe = Maybe; +export type Exact = { [K in keyof T]: T[K] }; +export type MakeOptional = Omit & { [SubKey in K]?: Maybe }; +export type MakeMaybe = Omit & { [SubKey in K]: Maybe }; +export type MakeEmpty = { [_ in K]?: never }; +export type Incremental = T | { [P in keyof T]?: P extends ' $fragmentName' | '__typename' ? T[P] : never }; +/** All built-in and custom scalars, mapped to their actual values */ +export type Scalars = { + ID: { input: string; output: string; } + String: { input: string; output: string; } + Boolean: { input: boolean; output: boolean; } + Int: { input: number; output: number; } + Float: { input: number; output: number; } + BigFloat: { input: any; output: any; } + BigInt: { input: any; output: any; } + Cursor: { input: any; output: any; } + Date: { input: any; output: any; } + Datetime: { input: any; output: any; } + JSON: { input: any; output: any; } +}; + +export type AccountPoolReward = Node & { + __typename?: 'AccountPoolReward'; + accumulatedAmount: Scalars['BigFloat']['output']; + address: Scalars['String']['output']; + amount: Scalars['BigFloat']['output']; + blockNumber: Scalars['Int']['output']; + id: Scalars['String']['output']; + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; + poolId: Scalars['Int']['output']; + timestamp: Scalars['BigFloat']['output']; + type: RewardType; +}; + +export type AccountPoolRewardAggregates = { + __typename?: 'AccountPoolRewardAggregates'; + /** Mean average aggregates across the matching connection (ignoring before/after/first/last/offset) */ + average?: Maybe; + /** Distinct count aggregates across the matching connection (ignoring before/after/first/last/offset) */ + distinctCount?: Maybe; + keys?: Maybe>; + /** Maximum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + max?: Maybe; + /** Minimum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + min?: Maybe; + /** Population standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevPopulation?: Maybe; + /** Sample standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevSample?: Maybe; + /** Sum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + sum?: Maybe; + /** Population variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + variancePopulation?: Maybe; + /** Sample variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + varianceSample?: Maybe; +}; + +export type AccountPoolRewardAverageAggregates = { + __typename?: 'AccountPoolRewardAverageAggregates'; + /** Mean average of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Mean average of amount across the matching connection */ + amount?: Maybe; + /** Mean average of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Mean average of poolId across the matching connection */ + poolId?: Maybe; + /** Mean average of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountPoolRewardDistinctCountAggregates = { + __typename?: 'AccountPoolRewardDistinctCountAggregates'; + /** Distinct count of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Distinct count of address across the matching connection */ + address?: Maybe; + /** Distinct count of amount across the matching connection */ + amount?: Maybe; + /** Distinct count of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Distinct count of id across the matching connection */ + id?: Maybe; + /** Distinct count of poolId across the matching connection */ + poolId?: Maybe; + /** Distinct count of timestamp across the matching connection */ + timestamp?: Maybe; + /** Distinct count of type across the matching connection */ + type?: Maybe; +}; + +/** A filter to be used against `AccountPoolReward` object types. All fields are combined with a logical ‘and.’ */ +export type AccountPoolRewardFilter = { + /** Filter by the object’s `accumulatedAmount` field. */ + accumulatedAmount?: InputMaybe; + /** Filter by the object’s `address` field. */ + address?: InputMaybe; + /** Filter by the object’s `amount` field. */ + amount?: InputMaybe; + /** Checks for all expressions in this list. */ + and?: InputMaybe>; + /** Filter by the object’s `blockNumber` field. */ + blockNumber?: InputMaybe; + /** Filter by the object’s `id` field. */ + id?: InputMaybe; + /** Negates the expression. */ + not?: InputMaybe; + /** Checks for any expressions in this list. */ + or?: InputMaybe>; + /** Filter by the object’s `poolId` field. */ + poolId?: InputMaybe; + /** Filter by the object’s `timestamp` field. */ + timestamp?: InputMaybe; + /** Filter by the object’s `type` field. */ + type?: InputMaybe; +}; + +export type AccountPoolRewardMaxAggregates = { + __typename?: 'AccountPoolRewardMaxAggregates'; + /** Maximum of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Maximum of amount across the matching connection */ + amount?: Maybe; + /** Maximum of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Maximum of poolId across the matching connection */ + poolId?: Maybe; + /** Maximum of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountPoolRewardMinAggregates = { + __typename?: 'AccountPoolRewardMinAggregates'; + /** Minimum of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Minimum of amount across the matching connection */ + amount?: Maybe; + /** Minimum of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Minimum of poolId across the matching connection */ + poolId?: Maybe; + /** Minimum of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountPoolRewardStddevPopulationAggregates = { + __typename?: 'AccountPoolRewardStddevPopulationAggregates'; + /** Population standard deviation of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Population standard deviation of amount across the matching connection */ + amount?: Maybe; + /** Population standard deviation of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Population standard deviation of poolId across the matching connection */ + poolId?: Maybe; + /** Population standard deviation of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountPoolRewardStddevSampleAggregates = { + __typename?: 'AccountPoolRewardStddevSampleAggregates'; + /** Sample standard deviation of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Sample standard deviation of amount across the matching connection */ + amount?: Maybe; + /** Sample standard deviation of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Sample standard deviation of poolId across the matching connection */ + poolId?: Maybe; + /** Sample standard deviation of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountPoolRewardSumAggregates = { + __typename?: 'AccountPoolRewardSumAggregates'; + /** Sum of accumulatedAmount across the matching connection */ + accumulatedAmount: Scalars['BigFloat']['output']; + /** Sum of amount across the matching connection */ + amount: Scalars['BigFloat']['output']; + /** Sum of blockNumber across the matching connection */ + blockNumber: Scalars['BigInt']['output']; + /** Sum of poolId across the matching connection */ + poolId: Scalars['BigInt']['output']; + /** Sum of timestamp across the matching connection */ + timestamp: Scalars['BigFloat']['output']; +}; + +export type AccountPoolRewardVariancePopulationAggregates = { + __typename?: 'AccountPoolRewardVariancePopulationAggregates'; + /** Population variance of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Population variance of amount across the matching connection */ + amount?: Maybe; + /** Population variance of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Population variance of poolId across the matching connection */ + poolId?: Maybe; + /** Population variance of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountPoolRewardVarianceSampleAggregates = { + __typename?: 'AccountPoolRewardVarianceSampleAggregates'; + /** Sample variance of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Sample variance of amount across the matching connection */ + amount?: Maybe; + /** Sample variance of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Sample variance of poolId across the matching connection */ + poolId?: Maybe; + /** Sample variance of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +/** A connection to a list of `AccountPoolReward` values. */ +export type AccountPoolRewardsConnection = { + __typename?: 'AccountPoolRewardsConnection'; + /** Aggregates across the matching connection (ignoring before/after/first/last/offset) */ + aggregates?: Maybe; + /** A list of edges which contains the `AccountPoolReward` and cursor to aid in pagination. */ + edges: Array; + /** Grouped aggregates across the matching connection (ignoring before/after/first/last/offset) */ + groupedAggregates?: Maybe>; + /** A list of `AccountPoolReward` objects. */ + nodes: Array>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** The count of *all* `AccountPoolReward` you could get from the connection. */ + totalCount: Scalars['Int']['output']; +}; + + +/** A connection to a list of `AccountPoolReward` values. */ +export type AccountPoolRewardsConnectionGroupedAggregatesArgs = { + groupBy: Array; + having?: InputMaybe; +}; + +/** A `AccountPoolReward` edge in the connection. */ +export type AccountPoolRewardsEdge = { + __typename?: 'AccountPoolRewardsEdge'; + /** A cursor for use in pagination. */ + cursor?: Maybe; + /** The `AccountPoolReward` at the end of the edge. */ + node?: Maybe; +}; + +/** Grouping methods for `AccountPoolReward` for usage during aggregation. */ +export enum AccountPoolRewardsGroupBy { + AccumulatedAmount = 'ACCUMULATED_AMOUNT', + Address = 'ADDRESS', + Amount = 'AMOUNT', + BlockNumber = 'BLOCK_NUMBER', + PoolId = 'POOL_ID', + Timestamp = 'TIMESTAMP', + Type = 'TYPE' +} + +export type AccountPoolRewardsHavingAverageInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountPoolRewardsHavingDistinctCountInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +/** Conditions for `AccountPoolReward` aggregates. */ +export type AccountPoolRewardsHavingInput = { + AND?: InputMaybe>; + OR?: InputMaybe>; + average?: InputMaybe; + distinctCount?: InputMaybe; + max?: InputMaybe; + min?: InputMaybe; + stddevPopulation?: InputMaybe; + stddevSample?: InputMaybe; + sum?: InputMaybe; + variancePopulation?: InputMaybe; + varianceSample?: InputMaybe; +}; + +export type AccountPoolRewardsHavingMaxInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountPoolRewardsHavingMinInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountPoolRewardsHavingStddevPopulationInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountPoolRewardsHavingStddevSampleInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountPoolRewardsHavingSumInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountPoolRewardsHavingVariancePopulationInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountPoolRewardsHavingVarianceSampleInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + poolId?: InputMaybe; + timestamp?: InputMaybe; +}; + +/** Methods to use when ordering `AccountPoolReward`. */ +export enum AccountPoolRewardsOrderBy { + AccumulatedAmountAsc = 'ACCUMULATED_AMOUNT_ASC', + AccumulatedAmountDesc = 'ACCUMULATED_AMOUNT_DESC', + AddressAsc = 'ADDRESS_ASC', + AddressDesc = 'ADDRESS_DESC', + AmountAsc = 'AMOUNT_ASC', + AmountDesc = 'AMOUNT_DESC', + BlockNumberAsc = 'BLOCK_NUMBER_ASC', + BlockNumberDesc = 'BLOCK_NUMBER_DESC', + IdAsc = 'ID_ASC', + IdDesc = 'ID_DESC', + Natural = 'NATURAL', + PoolIdAsc = 'POOL_ID_ASC', + PoolIdDesc = 'POOL_ID_DESC', + PrimaryKeyAsc = 'PRIMARY_KEY_ASC', + PrimaryKeyDesc = 'PRIMARY_KEY_DESC', + TimestampAsc = 'TIMESTAMP_ASC', + TimestampDesc = 'TIMESTAMP_DESC', + TypeAsc = 'TYPE_ASC', + TypeDesc = 'TYPE_DESC' +} + +export type AccountReward = Node & { + __typename?: 'AccountReward'; + accumulatedAmount: Scalars['BigFloat']['output']; + address: Scalars['String']['output']; + amount: Scalars['BigFloat']['output']; + blockNumber: Scalars['Int']['output']; + id: Scalars['String']['output']; + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; + timestamp: Scalars['BigFloat']['output']; + type: RewardType; +}; + +export type AccountRewardAggregates = { + __typename?: 'AccountRewardAggregates'; + /** Mean average aggregates across the matching connection (ignoring before/after/first/last/offset) */ + average?: Maybe; + /** Distinct count aggregates across the matching connection (ignoring before/after/first/last/offset) */ + distinctCount?: Maybe; + keys?: Maybe>; + /** Maximum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + max?: Maybe; + /** Minimum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + min?: Maybe; + /** Population standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevPopulation?: Maybe; + /** Sample standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevSample?: Maybe; + /** Sum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + sum?: Maybe; + /** Population variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + variancePopulation?: Maybe; + /** Sample variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + varianceSample?: Maybe; +}; + +export type AccountRewardAverageAggregates = { + __typename?: 'AccountRewardAverageAggregates'; + /** Mean average of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Mean average of amount across the matching connection */ + amount?: Maybe; + /** Mean average of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Mean average of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountRewardDistinctCountAggregates = { + __typename?: 'AccountRewardDistinctCountAggregates'; + /** Distinct count of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Distinct count of address across the matching connection */ + address?: Maybe; + /** Distinct count of amount across the matching connection */ + amount?: Maybe; + /** Distinct count of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Distinct count of id across the matching connection */ + id?: Maybe; + /** Distinct count of timestamp across the matching connection */ + timestamp?: Maybe; + /** Distinct count of type across the matching connection */ + type?: Maybe; +}; + +/** A filter to be used against `AccountReward` object types. All fields are combined with a logical ‘and.’ */ +export type AccountRewardFilter = { + /** Filter by the object’s `accumulatedAmount` field. */ + accumulatedAmount?: InputMaybe; + /** Filter by the object’s `address` field. */ + address?: InputMaybe; + /** Filter by the object’s `amount` field. */ + amount?: InputMaybe; + /** Checks for all expressions in this list. */ + and?: InputMaybe>; + /** Filter by the object’s `blockNumber` field. */ + blockNumber?: InputMaybe; + /** Filter by the object’s `id` field. */ + id?: InputMaybe; + /** Negates the expression. */ + not?: InputMaybe; + /** Checks for any expressions in this list. */ + or?: InputMaybe>; + /** Filter by the object’s `timestamp` field. */ + timestamp?: InputMaybe; + /** Filter by the object’s `type` field. */ + type?: InputMaybe; +}; + +export type AccountRewardMaxAggregates = { + __typename?: 'AccountRewardMaxAggregates'; + /** Maximum of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Maximum of amount across the matching connection */ + amount?: Maybe; + /** Maximum of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Maximum of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountRewardMinAggregates = { + __typename?: 'AccountRewardMinAggregates'; + /** Minimum of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Minimum of amount across the matching connection */ + amount?: Maybe; + /** Minimum of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Minimum of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountRewardStddevPopulationAggregates = { + __typename?: 'AccountRewardStddevPopulationAggregates'; + /** Population standard deviation of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Population standard deviation of amount across the matching connection */ + amount?: Maybe; + /** Population standard deviation of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Population standard deviation of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountRewardStddevSampleAggregates = { + __typename?: 'AccountRewardStddevSampleAggregates'; + /** Sample standard deviation of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Sample standard deviation of amount across the matching connection */ + amount?: Maybe; + /** Sample standard deviation of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Sample standard deviation of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountRewardSumAggregates = { + __typename?: 'AccountRewardSumAggregates'; + /** Sum of accumulatedAmount across the matching connection */ + accumulatedAmount: Scalars['BigFloat']['output']; + /** Sum of amount across the matching connection */ + amount: Scalars['BigFloat']['output']; + /** Sum of blockNumber across the matching connection */ + blockNumber: Scalars['BigInt']['output']; + /** Sum of timestamp across the matching connection */ + timestamp: Scalars['BigFloat']['output']; +}; + +export type AccountRewardVariancePopulationAggregates = { + __typename?: 'AccountRewardVariancePopulationAggregates'; + /** Population variance of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Population variance of amount across the matching connection */ + amount?: Maybe; + /** Population variance of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Population variance of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type AccountRewardVarianceSampleAggregates = { + __typename?: 'AccountRewardVarianceSampleAggregates'; + /** Sample variance of accumulatedAmount across the matching connection */ + accumulatedAmount?: Maybe; + /** Sample variance of amount across the matching connection */ + amount?: Maybe; + /** Sample variance of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Sample variance of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +/** A connection to a list of `AccountReward` values. */ +export type AccountRewardsConnection = { + __typename?: 'AccountRewardsConnection'; + /** Aggregates across the matching connection (ignoring before/after/first/last/offset) */ + aggregates?: Maybe; + /** A list of edges which contains the `AccountReward` and cursor to aid in pagination. */ + edges: Array; + /** Grouped aggregates across the matching connection (ignoring before/after/first/last/offset) */ + groupedAggregates?: Maybe>; + /** A list of `AccountReward` objects. */ + nodes: Array>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** The count of *all* `AccountReward` you could get from the connection. */ + totalCount: Scalars['Int']['output']; +}; + + +/** A connection to a list of `AccountReward` values. */ +export type AccountRewardsConnectionGroupedAggregatesArgs = { + groupBy: Array; + having?: InputMaybe; +}; + +/** A `AccountReward` edge in the connection. */ +export type AccountRewardsEdge = { + __typename?: 'AccountRewardsEdge'; + /** A cursor for use in pagination. */ + cursor?: Maybe; + /** The `AccountReward` at the end of the edge. */ + node?: Maybe; +}; + +/** Grouping methods for `AccountReward` for usage during aggregation. */ +export enum AccountRewardsGroupBy { + AccumulatedAmount = 'ACCUMULATED_AMOUNT', + Address = 'ADDRESS', + Amount = 'AMOUNT', + BlockNumber = 'BLOCK_NUMBER', + Timestamp = 'TIMESTAMP', + Type = 'TYPE' +} + +export type AccountRewardsHavingAverageInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountRewardsHavingDistinctCountInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +/** Conditions for `AccountReward` aggregates. */ +export type AccountRewardsHavingInput = { + AND?: InputMaybe>; + OR?: InputMaybe>; + average?: InputMaybe; + distinctCount?: InputMaybe; + max?: InputMaybe; + min?: InputMaybe; + stddevPopulation?: InputMaybe; + stddevSample?: InputMaybe; + sum?: InputMaybe; + variancePopulation?: InputMaybe; + varianceSample?: InputMaybe; +}; + +export type AccountRewardsHavingMaxInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountRewardsHavingMinInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountRewardsHavingStddevPopulationInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountRewardsHavingStddevSampleInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountRewardsHavingSumInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountRewardsHavingVariancePopulationInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type AccountRewardsHavingVarianceSampleInput = { + accumulatedAmount?: InputMaybe; + amount?: InputMaybe; + blockNumber?: InputMaybe; + timestamp?: InputMaybe; +}; + +/** Methods to use when ordering `AccountReward`. */ +export enum AccountRewardsOrderBy { + AccumulatedAmountAsc = 'ACCUMULATED_AMOUNT_ASC', + AccumulatedAmountDesc = 'ACCUMULATED_AMOUNT_DESC', + AddressAsc = 'ADDRESS_ASC', + AddressDesc = 'ADDRESS_DESC', + AmountAsc = 'AMOUNT_ASC', + AmountDesc = 'AMOUNT_DESC', + BlockNumberAsc = 'BLOCK_NUMBER_ASC', + BlockNumberDesc = 'BLOCK_NUMBER_DESC', + IdAsc = 'ID_ASC', + IdDesc = 'ID_DESC', + Natural = 'NATURAL', + PrimaryKeyAsc = 'PRIMARY_KEY_ASC', + PrimaryKeyDesc = 'PRIMARY_KEY_DESC', + TimestampAsc = 'TIMESTAMP_ASC', + TimestampDesc = 'TIMESTAMP_DESC', + TypeAsc = 'TYPE_ASC', + TypeDesc = 'TYPE_DESC' +} + +export type AccumulatedPoolReward = Node & { + __typename?: 'AccumulatedPoolReward'; + amount: Scalars['BigFloat']['output']; + id: Scalars['String']['output']; + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; +}; + +export type AccumulatedPoolRewardAggregates = { + __typename?: 'AccumulatedPoolRewardAggregates'; + /** Mean average aggregates across the matching connection (ignoring before/after/first/last/offset) */ + average?: Maybe; + /** Distinct count aggregates across the matching connection (ignoring before/after/first/last/offset) */ + distinctCount?: Maybe; + keys?: Maybe>; + /** Maximum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + max?: Maybe; + /** Minimum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + min?: Maybe; + /** Population standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevPopulation?: Maybe; + /** Sample standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevSample?: Maybe; + /** Sum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + sum?: Maybe; + /** Population variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + variancePopulation?: Maybe; + /** Sample variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + varianceSample?: Maybe; +}; + +export type AccumulatedPoolRewardAverageAggregates = { + __typename?: 'AccumulatedPoolRewardAverageAggregates'; + /** Mean average of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedPoolRewardDistinctCountAggregates = { + __typename?: 'AccumulatedPoolRewardDistinctCountAggregates'; + /** Distinct count of amount across the matching connection */ + amount?: Maybe; + /** Distinct count of id across the matching connection */ + id?: Maybe; +}; + +/** A filter to be used against `AccumulatedPoolReward` object types. All fields are combined with a logical ‘and.’ */ +export type AccumulatedPoolRewardFilter = { + /** Filter by the object’s `amount` field. */ + amount?: InputMaybe; + /** Checks for all expressions in this list. */ + and?: InputMaybe>; + /** Filter by the object’s `id` field. */ + id?: InputMaybe; + /** Negates the expression. */ + not?: InputMaybe; + /** Checks for any expressions in this list. */ + or?: InputMaybe>; +}; + +export type AccumulatedPoolRewardMaxAggregates = { + __typename?: 'AccumulatedPoolRewardMaxAggregates'; + /** Maximum of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedPoolRewardMinAggregates = { + __typename?: 'AccumulatedPoolRewardMinAggregates'; + /** Minimum of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedPoolRewardStddevPopulationAggregates = { + __typename?: 'AccumulatedPoolRewardStddevPopulationAggregates'; + /** Population standard deviation of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedPoolRewardStddevSampleAggregates = { + __typename?: 'AccumulatedPoolRewardStddevSampleAggregates'; + /** Sample standard deviation of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedPoolRewardSumAggregates = { + __typename?: 'AccumulatedPoolRewardSumAggregates'; + /** Sum of amount across the matching connection */ + amount: Scalars['BigFloat']['output']; +}; + +export type AccumulatedPoolRewardVariancePopulationAggregates = { + __typename?: 'AccumulatedPoolRewardVariancePopulationAggregates'; + /** Population variance of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedPoolRewardVarianceSampleAggregates = { + __typename?: 'AccumulatedPoolRewardVarianceSampleAggregates'; + /** Sample variance of amount across the matching connection */ + amount?: Maybe; +}; + +/** A connection to a list of `AccumulatedPoolReward` values. */ +export type AccumulatedPoolRewardsConnection = { + __typename?: 'AccumulatedPoolRewardsConnection'; + /** Aggregates across the matching connection (ignoring before/after/first/last/offset) */ + aggregates?: Maybe; + /** A list of edges which contains the `AccumulatedPoolReward` and cursor to aid in pagination. */ + edges: Array; + /** Grouped aggregates across the matching connection (ignoring before/after/first/last/offset) */ + groupedAggregates?: Maybe>; + /** A list of `AccumulatedPoolReward` objects. */ + nodes: Array>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** The count of *all* `AccumulatedPoolReward` you could get from the connection. */ + totalCount: Scalars['Int']['output']; +}; + + +/** A connection to a list of `AccumulatedPoolReward` values. */ +export type AccumulatedPoolRewardsConnectionGroupedAggregatesArgs = { + groupBy: Array; + having?: InputMaybe; +}; + +/** A `AccumulatedPoolReward` edge in the connection. */ +export type AccumulatedPoolRewardsEdge = { + __typename?: 'AccumulatedPoolRewardsEdge'; + /** A cursor for use in pagination. */ + cursor?: Maybe; + /** The `AccumulatedPoolReward` at the end of the edge. */ + node?: Maybe; +}; + +/** Grouping methods for `AccumulatedPoolReward` for usage during aggregation. */ +export enum AccumulatedPoolRewardsGroupBy { + Amount = 'AMOUNT' +} + +export type AccumulatedPoolRewardsHavingAverageInput = { + amount?: InputMaybe; +}; + +export type AccumulatedPoolRewardsHavingDistinctCountInput = { + amount?: InputMaybe; +}; + +/** Conditions for `AccumulatedPoolReward` aggregates. */ +export type AccumulatedPoolRewardsHavingInput = { + AND?: InputMaybe>; + OR?: InputMaybe>; + average?: InputMaybe; + distinctCount?: InputMaybe; + max?: InputMaybe; + min?: InputMaybe; + stddevPopulation?: InputMaybe; + stddevSample?: InputMaybe; + sum?: InputMaybe; + variancePopulation?: InputMaybe; + varianceSample?: InputMaybe; +}; + +export type AccumulatedPoolRewardsHavingMaxInput = { + amount?: InputMaybe; +}; + +export type AccumulatedPoolRewardsHavingMinInput = { + amount?: InputMaybe; +}; + +export type AccumulatedPoolRewardsHavingStddevPopulationInput = { + amount?: InputMaybe; +}; + +export type AccumulatedPoolRewardsHavingStddevSampleInput = { + amount?: InputMaybe; +}; + +export type AccumulatedPoolRewardsHavingSumInput = { + amount?: InputMaybe; +}; + +export type AccumulatedPoolRewardsHavingVariancePopulationInput = { + amount?: InputMaybe; +}; + +export type AccumulatedPoolRewardsHavingVarianceSampleInput = { + amount?: InputMaybe; +}; + +/** Methods to use when ordering `AccumulatedPoolReward`. */ +export enum AccumulatedPoolRewardsOrderBy { + AmountAsc = 'AMOUNT_ASC', + AmountDesc = 'AMOUNT_DESC', + IdAsc = 'ID_ASC', + IdDesc = 'ID_DESC', + Natural = 'NATURAL', + PrimaryKeyAsc = 'PRIMARY_KEY_ASC', + PrimaryKeyDesc = 'PRIMARY_KEY_DESC' +} + +export type AccumulatedReward = Node & { + __typename?: 'AccumulatedReward'; + amount: Scalars['BigFloat']['output']; + id: Scalars['String']['output']; + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; +}; + +export type AccumulatedRewardAggregates = { + __typename?: 'AccumulatedRewardAggregates'; + /** Mean average aggregates across the matching connection (ignoring before/after/first/last/offset) */ + average?: Maybe; + /** Distinct count aggregates across the matching connection (ignoring before/after/first/last/offset) */ + distinctCount?: Maybe; + keys?: Maybe>; + /** Maximum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + max?: Maybe; + /** Minimum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + min?: Maybe; + /** Population standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevPopulation?: Maybe; + /** Sample standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevSample?: Maybe; + /** Sum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + sum?: Maybe; + /** Population variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + variancePopulation?: Maybe; + /** Sample variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + varianceSample?: Maybe; +}; + +export type AccumulatedRewardAverageAggregates = { + __typename?: 'AccumulatedRewardAverageAggregates'; + /** Mean average of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedRewardDistinctCountAggregates = { + __typename?: 'AccumulatedRewardDistinctCountAggregates'; + /** Distinct count of amount across the matching connection */ + amount?: Maybe; + /** Distinct count of id across the matching connection */ + id?: Maybe; +}; + +/** A filter to be used against `AccumulatedReward` object types. All fields are combined with a logical ‘and.’ */ +export type AccumulatedRewardFilter = { + /** Filter by the object’s `amount` field. */ + amount?: InputMaybe; + /** Checks for all expressions in this list. */ + and?: InputMaybe>; + /** Filter by the object’s `id` field. */ + id?: InputMaybe; + /** Negates the expression. */ + not?: InputMaybe; + /** Checks for any expressions in this list. */ + or?: InputMaybe>; +}; + +export type AccumulatedRewardMaxAggregates = { + __typename?: 'AccumulatedRewardMaxAggregates'; + /** Maximum of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedRewardMinAggregates = { + __typename?: 'AccumulatedRewardMinAggregates'; + /** Minimum of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedRewardStddevPopulationAggregates = { + __typename?: 'AccumulatedRewardStddevPopulationAggregates'; + /** Population standard deviation of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedRewardStddevSampleAggregates = { + __typename?: 'AccumulatedRewardStddevSampleAggregates'; + /** Sample standard deviation of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedRewardSumAggregates = { + __typename?: 'AccumulatedRewardSumAggregates'; + /** Sum of amount across the matching connection */ + amount: Scalars['BigFloat']['output']; +}; + +export type AccumulatedRewardVariancePopulationAggregates = { + __typename?: 'AccumulatedRewardVariancePopulationAggregates'; + /** Population variance of amount across the matching connection */ + amount?: Maybe; +}; + +export type AccumulatedRewardVarianceSampleAggregates = { + __typename?: 'AccumulatedRewardVarianceSampleAggregates'; + /** Sample variance of amount across the matching connection */ + amount?: Maybe; +}; + +/** A connection to a list of `AccumulatedReward` values. */ +export type AccumulatedRewardsConnection = { + __typename?: 'AccumulatedRewardsConnection'; + /** Aggregates across the matching connection (ignoring before/after/first/last/offset) */ + aggregates?: Maybe; + /** A list of edges which contains the `AccumulatedReward` and cursor to aid in pagination. */ + edges: Array; + /** Grouped aggregates across the matching connection (ignoring before/after/first/last/offset) */ + groupedAggregates?: Maybe>; + /** A list of `AccumulatedReward` objects. */ + nodes: Array>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** The count of *all* `AccumulatedReward` you could get from the connection. */ + totalCount: Scalars['Int']['output']; +}; + + +/** A connection to a list of `AccumulatedReward` values. */ +export type AccumulatedRewardsConnectionGroupedAggregatesArgs = { + groupBy: Array; + having?: InputMaybe; +}; + +/** A `AccumulatedReward` edge in the connection. */ +export type AccumulatedRewardsEdge = { + __typename?: 'AccumulatedRewardsEdge'; + /** A cursor for use in pagination. */ + cursor?: Maybe; + /** The `AccumulatedReward` at the end of the edge. */ + node?: Maybe; +}; + +/** Grouping methods for `AccumulatedReward` for usage during aggregation. */ +export enum AccumulatedRewardsGroupBy { + Amount = 'AMOUNT' +} + +export type AccumulatedRewardsHavingAverageInput = { + amount?: InputMaybe; +}; + +export type AccumulatedRewardsHavingDistinctCountInput = { + amount?: InputMaybe; +}; + +/** Conditions for `AccumulatedReward` aggregates. */ +export type AccumulatedRewardsHavingInput = { + AND?: InputMaybe>; + OR?: InputMaybe>; + average?: InputMaybe; + distinctCount?: InputMaybe; + max?: InputMaybe; + min?: InputMaybe; + stddevPopulation?: InputMaybe; + stddevSample?: InputMaybe; + sum?: InputMaybe; + variancePopulation?: InputMaybe; + varianceSample?: InputMaybe; +}; + +export type AccumulatedRewardsHavingMaxInput = { + amount?: InputMaybe; +}; + +export type AccumulatedRewardsHavingMinInput = { + amount?: InputMaybe; +}; + +export type AccumulatedRewardsHavingStddevPopulationInput = { + amount?: InputMaybe; +}; + +export type AccumulatedRewardsHavingStddevSampleInput = { + amount?: InputMaybe; +}; + +export type AccumulatedRewardsHavingSumInput = { + amount?: InputMaybe; +}; + +export type AccumulatedRewardsHavingVariancePopulationInput = { + amount?: InputMaybe; +}; + +export type AccumulatedRewardsHavingVarianceSampleInput = { + amount?: InputMaybe; +}; + +/** Methods to use when ordering `AccumulatedReward`. */ +export enum AccumulatedRewardsOrderBy { + AmountAsc = 'AMOUNT_ASC', + AmountDesc = 'AMOUNT_DESC', + IdAsc = 'ID_ASC', + IdDesc = 'ID_DESC', + Natural = 'NATURAL', + PrimaryKeyAsc = 'PRIMARY_KEY_ASC', + PrimaryKeyDesc = 'PRIMARY_KEY_DESC' +} + +/** A filter to be used against BigFloat fields. All fields are combined with a logical ‘and.’ */ +export type BigFloatFilter = { + /** Not equal to the specified value, treating null like an ordinary value. */ + distinctFrom?: InputMaybe; + /** Equal to the specified value. */ + equalTo?: InputMaybe; + /** Greater than the specified value. */ + greaterThan?: InputMaybe; + /** Greater than or equal to the specified value. */ + greaterThanOrEqualTo?: InputMaybe; + /** Included in the specified list. */ + in?: InputMaybe>; + /** Is null (if `true` is specified) or is not null (if `false` is specified). */ + isNull?: InputMaybe; + /** Less than the specified value. */ + lessThan?: InputMaybe; + /** Less than or equal to the specified value. */ + lessThanOrEqualTo?: InputMaybe; + /** Equal to the specified value, treating null like an ordinary value. */ + notDistinctFrom?: InputMaybe; + /** Not equal to the specified value. */ + notEqualTo?: InputMaybe; + /** Not included in the specified list. */ + notIn?: InputMaybe>; +}; + +/** A filter to be used against Datetime fields. All fields are combined with a logical ‘and.’ */ +export type DatetimeFilter = { + /** Not equal to the specified value, treating null like an ordinary value. */ + distinctFrom?: InputMaybe; + /** Equal to the specified value. */ + equalTo?: InputMaybe; + /** Greater than the specified value. */ + greaterThan?: InputMaybe; + /** Greater than or equal to the specified value. */ + greaterThanOrEqualTo?: InputMaybe; + /** Included in the specified list. */ + in?: InputMaybe>; + /** Is null (if `true` is specified) or is not null (if `false` is specified). */ + isNull?: InputMaybe; + /** Less than the specified value. */ + lessThan?: InputMaybe; + /** Less than or equal to the specified value. */ + lessThanOrEqualTo?: InputMaybe; + /** Equal to the specified value, treating null like an ordinary value. */ + notDistinctFrom?: InputMaybe; + /** Not equal to the specified value. */ + notEqualTo?: InputMaybe; + /** Not included in the specified list. */ + notIn?: InputMaybe>; +}; + +export type EraValidatorInfo = Node & { + __typename?: 'EraValidatorInfo'; + address: Scalars['String']['output']; + era: Scalars['Int']['output']; + id: Scalars['String']['output']; + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; + others: Scalars['JSON']['output']; + own: Scalars['BigFloat']['output']; + total: Scalars['BigFloat']['output']; +}; + +export type EraValidatorInfoAggregates = { + __typename?: 'EraValidatorInfoAggregates'; + /** Mean average aggregates across the matching connection (ignoring before/after/first/last/offset) */ + average?: Maybe; + /** Distinct count aggregates across the matching connection (ignoring before/after/first/last/offset) */ + distinctCount?: Maybe; + keys?: Maybe>; + /** Maximum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + max?: Maybe; + /** Minimum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + min?: Maybe; + /** Population standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevPopulation?: Maybe; + /** Sample standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevSample?: Maybe; + /** Sum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + sum?: Maybe; + /** Population variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + variancePopulation?: Maybe; + /** Sample variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + varianceSample?: Maybe; +}; + +export type EraValidatorInfoAverageAggregates = { + __typename?: 'EraValidatorInfoAverageAggregates'; + /** Mean average of era across the matching connection */ + era?: Maybe; + /** Mean average of own across the matching connection */ + own?: Maybe; + /** Mean average of total across the matching connection */ + total?: Maybe; +}; + +export type EraValidatorInfoDistinctCountAggregates = { + __typename?: 'EraValidatorInfoDistinctCountAggregates'; + /** Distinct count of address across the matching connection */ + address?: Maybe; + /** Distinct count of era across the matching connection */ + era?: Maybe; + /** Distinct count of id across the matching connection */ + id?: Maybe; + /** Distinct count of others across the matching connection */ + others?: Maybe; + /** Distinct count of own across the matching connection */ + own?: Maybe; + /** Distinct count of total across the matching connection */ + total?: Maybe; +}; + +/** A filter to be used against `EraValidatorInfo` object types. All fields are combined with a logical ‘and.’ */ +export type EraValidatorInfoFilter = { + /** Filter by the object’s `address` field. */ + address?: InputMaybe; + /** Checks for all expressions in this list. */ + and?: InputMaybe>; + /** Filter by the object’s `era` field. */ + era?: InputMaybe; + /** Filter by the object’s `id` field. */ + id?: InputMaybe; + /** Negates the expression. */ + not?: InputMaybe; + /** Checks for any expressions in this list. */ + or?: InputMaybe>; + /** Filter by the object’s `others` field. */ + others?: InputMaybe; + /** Filter by the object’s `own` field. */ + own?: InputMaybe; + /** Filter by the object’s `total` field. */ + total?: InputMaybe; +}; + +export type EraValidatorInfoMaxAggregates = { + __typename?: 'EraValidatorInfoMaxAggregates'; + /** Maximum of era across the matching connection */ + era?: Maybe; + /** Maximum of own across the matching connection */ + own?: Maybe; + /** Maximum of total across the matching connection */ + total?: Maybe; +}; + +export type EraValidatorInfoMinAggregates = { + __typename?: 'EraValidatorInfoMinAggregates'; + /** Minimum of era across the matching connection */ + era?: Maybe; + /** Minimum of own across the matching connection */ + own?: Maybe; + /** Minimum of total across the matching connection */ + total?: Maybe; +}; + +export type EraValidatorInfoStddevPopulationAggregates = { + __typename?: 'EraValidatorInfoStddevPopulationAggregates'; + /** Population standard deviation of era across the matching connection */ + era?: Maybe; + /** Population standard deviation of own across the matching connection */ + own?: Maybe; + /** Population standard deviation of total across the matching connection */ + total?: Maybe; +}; + +export type EraValidatorInfoStddevSampleAggregates = { + __typename?: 'EraValidatorInfoStddevSampleAggregates'; + /** Sample standard deviation of era across the matching connection */ + era?: Maybe; + /** Sample standard deviation of own across the matching connection */ + own?: Maybe; + /** Sample standard deviation of total across the matching connection */ + total?: Maybe; +}; + +export type EraValidatorInfoSumAggregates = { + __typename?: 'EraValidatorInfoSumAggregates'; + /** Sum of era across the matching connection */ + era: Scalars['BigInt']['output']; + /** Sum of own across the matching connection */ + own: Scalars['BigFloat']['output']; + /** Sum of total across the matching connection */ + total: Scalars['BigFloat']['output']; +}; + +export type EraValidatorInfoVariancePopulationAggregates = { + __typename?: 'EraValidatorInfoVariancePopulationAggregates'; + /** Population variance of era across the matching connection */ + era?: Maybe; + /** Population variance of own across the matching connection */ + own?: Maybe; + /** Population variance of total across the matching connection */ + total?: Maybe; +}; + +export type EraValidatorInfoVarianceSampleAggregates = { + __typename?: 'EraValidatorInfoVarianceSampleAggregates'; + /** Sample variance of era across the matching connection */ + era?: Maybe; + /** Sample variance of own across the matching connection */ + own?: Maybe; + /** Sample variance of total across the matching connection */ + total?: Maybe; +}; + +/** A connection to a list of `EraValidatorInfo` values. */ +export type EraValidatorInfosConnection = { + __typename?: 'EraValidatorInfosConnection'; + /** Aggregates across the matching connection (ignoring before/after/first/last/offset) */ + aggregates?: Maybe; + /** A list of edges which contains the `EraValidatorInfo` and cursor to aid in pagination. */ + edges: Array; + /** Grouped aggregates across the matching connection (ignoring before/after/first/last/offset) */ + groupedAggregates?: Maybe>; + /** A list of `EraValidatorInfo` objects. */ + nodes: Array>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** The count of *all* `EraValidatorInfo` you could get from the connection. */ + totalCount: Scalars['Int']['output']; +}; + + +/** A connection to a list of `EraValidatorInfo` values. */ +export type EraValidatorInfosConnectionGroupedAggregatesArgs = { + groupBy: Array; + having?: InputMaybe; +}; + +/** A `EraValidatorInfo` edge in the connection. */ +export type EraValidatorInfosEdge = { + __typename?: 'EraValidatorInfosEdge'; + /** A cursor for use in pagination. */ + cursor?: Maybe; + /** The `EraValidatorInfo` at the end of the edge. */ + node?: Maybe; +}; + +/** Grouping methods for `EraValidatorInfo` for usage during aggregation. */ +export enum EraValidatorInfosGroupBy { + Address = 'ADDRESS', + Era = 'ERA', + Others = 'OTHERS', + Own = 'OWN', + Total = 'TOTAL' +} + +export type EraValidatorInfosHavingAverageInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +export type EraValidatorInfosHavingDistinctCountInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +/** Conditions for `EraValidatorInfo` aggregates. */ +export type EraValidatorInfosHavingInput = { + AND?: InputMaybe>; + OR?: InputMaybe>; + average?: InputMaybe; + distinctCount?: InputMaybe; + max?: InputMaybe; + min?: InputMaybe; + stddevPopulation?: InputMaybe; + stddevSample?: InputMaybe; + sum?: InputMaybe; + variancePopulation?: InputMaybe; + varianceSample?: InputMaybe; +}; + +export type EraValidatorInfosHavingMaxInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +export type EraValidatorInfosHavingMinInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +export type EraValidatorInfosHavingStddevPopulationInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +export type EraValidatorInfosHavingStddevSampleInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +export type EraValidatorInfosHavingSumInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +export type EraValidatorInfosHavingVariancePopulationInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +export type EraValidatorInfosHavingVarianceSampleInput = { + era?: InputMaybe; + own?: InputMaybe; + total?: InputMaybe; +}; + +/** Methods to use when ordering `EraValidatorInfo`. */ +export enum EraValidatorInfosOrderBy { + AddressAsc = 'ADDRESS_ASC', + AddressDesc = 'ADDRESS_DESC', + EraAsc = 'ERA_ASC', + EraDesc = 'ERA_DESC', + IdAsc = 'ID_ASC', + IdDesc = 'ID_DESC', + Natural = 'NATURAL', + OthersAsc = 'OTHERS_ASC', + OthersDesc = 'OTHERS_DESC', + OwnAsc = 'OWN_ASC', + OwnDesc = 'OWN_DESC', + PrimaryKeyAsc = 'PRIMARY_KEY_ASC', + PrimaryKeyDesc = 'PRIMARY_KEY_DESC', + TotalAsc = 'TOTAL_ASC', + TotalDesc = 'TOTAL_DESC' +} + +export type ErrorEvent = Node & { + __typename?: 'ErrorEvent'; + description: Scalars['String']['output']; + id: Scalars['String']['output']; + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; +}; + +export type ErrorEventAggregates = { + __typename?: 'ErrorEventAggregates'; + /** Distinct count aggregates across the matching connection (ignoring before/after/first/last/offset) */ + distinctCount?: Maybe; + keys?: Maybe>; +}; + +export type ErrorEventDistinctCountAggregates = { + __typename?: 'ErrorEventDistinctCountAggregates'; + /** Distinct count of description across the matching connection */ + description?: Maybe; + /** Distinct count of id across the matching connection */ + id?: Maybe; +}; + +/** A filter to be used against `ErrorEvent` object types. All fields are combined with a logical ‘and.’ */ +export type ErrorEventFilter = { + /** Checks for all expressions in this list. */ + and?: InputMaybe>; + /** Filter by the object’s `description` field. */ + description?: InputMaybe; + /** Filter by the object’s `id` field. */ + id?: InputMaybe; + /** Negates the expression. */ + not?: InputMaybe; + /** Checks for any expressions in this list. */ + or?: InputMaybe>; +}; + +/** A connection to a list of `ErrorEvent` values. */ +export type ErrorEventsConnection = { + __typename?: 'ErrorEventsConnection'; + /** Aggregates across the matching connection (ignoring before/after/first/last/offset) */ + aggregates?: Maybe; + /** A list of edges which contains the `ErrorEvent` and cursor to aid in pagination. */ + edges: Array; + /** Grouped aggregates across the matching connection (ignoring before/after/first/last/offset) */ + groupedAggregates?: Maybe>; + /** A list of `ErrorEvent` objects. */ + nodes: Array>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** The count of *all* `ErrorEvent` you could get from the connection. */ + totalCount: Scalars['Int']['output']; +}; + + +/** A connection to a list of `ErrorEvent` values. */ +export type ErrorEventsConnectionGroupedAggregatesArgs = { + groupBy: Array; + having?: InputMaybe; +}; + +/** A `ErrorEvent` edge in the connection. */ +export type ErrorEventsEdge = { + __typename?: 'ErrorEventsEdge'; + /** A cursor for use in pagination. */ + cursor?: Maybe; + /** The `ErrorEvent` at the end of the edge. */ + node?: Maybe; +}; + +/** Grouping methods for `ErrorEvent` for usage during aggregation. */ +export enum ErrorEventsGroupBy { + Description = 'DESCRIPTION' +} + +/** Conditions for `ErrorEvent` aggregates. */ +export type ErrorEventsHavingInput = { + AND?: InputMaybe>; + OR?: InputMaybe>; +}; + +/** Methods to use when ordering `ErrorEvent`. */ +export enum ErrorEventsOrderBy { + DescriptionAsc = 'DESCRIPTION_ASC', + DescriptionDesc = 'DESCRIPTION_DESC', + IdAsc = 'ID_ASC', + IdDesc = 'ID_DESC', + Natural = 'NATURAL', + PrimaryKeyAsc = 'PRIMARY_KEY_ASC', + PrimaryKeyDesc = 'PRIMARY_KEY_DESC' +} + +export type HavingBigfloatFilter = { + equalTo?: InputMaybe; + greaterThan?: InputMaybe; + greaterThanOrEqualTo?: InputMaybe; + lessThan?: InputMaybe; + lessThanOrEqualTo?: InputMaybe; + notEqualTo?: InputMaybe; +}; + +export type HavingDatetimeFilter = { + equalTo?: InputMaybe; + greaterThan?: InputMaybe; + greaterThanOrEqualTo?: InputMaybe; + lessThan?: InputMaybe; + lessThanOrEqualTo?: InputMaybe; + notEqualTo?: InputMaybe; +}; + +export type HavingIntFilter = { + equalTo?: InputMaybe; + greaterThan?: InputMaybe; + greaterThanOrEqualTo?: InputMaybe; + lessThan?: InputMaybe; + lessThanOrEqualTo?: InputMaybe; + notEqualTo?: InputMaybe; +}; + +export type HistoryElement = Node & { + __typename?: 'HistoryElement'; + address: Scalars['String']['output']; + assetTransfer?: Maybe; + blockNumber: Scalars['Int']['output']; + extrinsic?: Maybe; + extrinsicHash?: Maybe; + extrinsicIdx?: Maybe; + id: Scalars['String']['output']; + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; + poolReward?: Maybe; + reward?: Maybe; + swap?: Maybe; + timestamp: Scalars['BigFloat']['output']; + transfer?: Maybe; +}; + +export type HistoryElementAggregates = { + __typename?: 'HistoryElementAggregates'; + /** Mean average aggregates across the matching connection (ignoring before/after/first/last/offset) */ + average?: Maybe; + /** Distinct count aggregates across the matching connection (ignoring before/after/first/last/offset) */ + distinctCount?: Maybe; + keys?: Maybe>; + /** Maximum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + max?: Maybe; + /** Minimum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + min?: Maybe; + /** Population standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevPopulation?: Maybe; + /** Sample standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevSample?: Maybe; + /** Sum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + sum?: Maybe; + /** Population variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + variancePopulation?: Maybe; + /** Sample variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + varianceSample?: Maybe; +}; + +export type HistoryElementAverageAggregates = { + __typename?: 'HistoryElementAverageAggregates'; + /** Mean average of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Mean average of extrinsicIdx across the matching connection */ + extrinsicIdx?: Maybe; + /** Mean average of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type HistoryElementDistinctCountAggregates = { + __typename?: 'HistoryElementDistinctCountAggregates'; + /** Distinct count of address across the matching connection */ + address?: Maybe; + /** Distinct count of assetTransfer across the matching connection */ + assetTransfer?: Maybe; + /** Distinct count of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Distinct count of extrinsic across the matching connection */ + extrinsic?: Maybe; + /** Distinct count of extrinsicHash across the matching connection */ + extrinsicHash?: Maybe; + /** Distinct count of extrinsicIdx across the matching connection */ + extrinsicIdx?: Maybe; + /** Distinct count of id across the matching connection */ + id?: Maybe; + /** Distinct count of poolReward across the matching connection */ + poolReward?: Maybe; + /** Distinct count of reward across the matching connection */ + reward?: Maybe; + /** Distinct count of swap across the matching connection */ + swap?: Maybe; + /** Distinct count of timestamp across the matching connection */ + timestamp?: Maybe; + /** Distinct count of transfer across the matching connection */ + transfer?: Maybe; +}; + +/** A filter to be used against `HistoryElement` object types. All fields are combined with a logical ‘and.’ */ +export type HistoryElementFilter = { + /** Filter by the object’s `address` field. */ + address?: InputMaybe; + /** Checks for all expressions in this list. */ + and?: InputMaybe>; + /** Filter by the object’s `assetTransfer` field. */ + assetTransfer?: InputMaybe; + /** Filter by the object’s `blockNumber` field. */ + blockNumber?: InputMaybe; + /** Filter by the object’s `extrinsic` field. */ + extrinsic?: InputMaybe; + /** Filter by the object’s `extrinsicHash` field. */ + extrinsicHash?: InputMaybe; + /** Filter by the object’s `extrinsicIdx` field. */ + extrinsicIdx?: InputMaybe; + /** Filter by the object’s `id` field. */ + id?: InputMaybe; + /** Negates the expression. */ + not?: InputMaybe; + /** Checks for any expressions in this list. */ + or?: InputMaybe>; + /** Filter by the object’s `poolReward` field. */ + poolReward?: InputMaybe; + /** Filter by the object’s `reward` field. */ + reward?: InputMaybe; + /** Filter by the object’s `swap` field. */ + swap?: InputMaybe; + /** Filter by the object’s `timestamp` field. */ + timestamp?: InputMaybe; + /** Filter by the object’s `transfer` field. */ + transfer?: InputMaybe; +}; + +export type HistoryElementMaxAggregates = { + __typename?: 'HistoryElementMaxAggregates'; + /** Maximum of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Maximum of extrinsicIdx across the matching connection */ + extrinsicIdx?: Maybe; + /** Maximum of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type HistoryElementMinAggregates = { + __typename?: 'HistoryElementMinAggregates'; + /** Minimum of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Minimum of extrinsicIdx across the matching connection */ + extrinsicIdx?: Maybe; + /** Minimum of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type HistoryElementStddevPopulationAggregates = { + __typename?: 'HistoryElementStddevPopulationAggregates'; + /** Population standard deviation of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Population standard deviation of extrinsicIdx across the matching connection */ + extrinsicIdx?: Maybe; + /** Population standard deviation of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type HistoryElementStddevSampleAggregates = { + __typename?: 'HistoryElementStddevSampleAggregates'; + /** Sample standard deviation of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Sample standard deviation of extrinsicIdx across the matching connection */ + extrinsicIdx?: Maybe; + /** Sample standard deviation of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type HistoryElementSumAggregates = { + __typename?: 'HistoryElementSumAggregates'; + /** Sum of blockNumber across the matching connection */ + blockNumber: Scalars['BigInt']['output']; + /** Sum of extrinsicIdx across the matching connection */ + extrinsicIdx: Scalars['BigInt']['output']; + /** Sum of timestamp across the matching connection */ + timestamp: Scalars['BigFloat']['output']; +}; + +export type HistoryElementVariancePopulationAggregates = { + __typename?: 'HistoryElementVariancePopulationAggregates'; + /** Population variance of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Population variance of extrinsicIdx across the matching connection */ + extrinsicIdx?: Maybe; + /** Population variance of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +export type HistoryElementVarianceSampleAggregates = { + __typename?: 'HistoryElementVarianceSampleAggregates'; + /** Sample variance of blockNumber across the matching connection */ + blockNumber?: Maybe; + /** Sample variance of extrinsicIdx across the matching connection */ + extrinsicIdx?: Maybe; + /** Sample variance of timestamp across the matching connection */ + timestamp?: Maybe; +}; + +/** A connection to a list of `HistoryElement` values. */ +export type HistoryElementsConnection = { + __typename?: 'HistoryElementsConnection'; + /** Aggregates across the matching connection (ignoring before/after/first/last/offset) */ + aggregates?: Maybe; + /** A list of edges which contains the `HistoryElement` and cursor to aid in pagination. */ + edges: Array; + /** Grouped aggregates across the matching connection (ignoring before/after/first/last/offset) */ + groupedAggregates?: Maybe>; + /** A list of `HistoryElement` objects. */ + nodes: Array>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** The count of *all* `HistoryElement` you could get from the connection. */ + totalCount: Scalars['Int']['output']; +}; + + +/** A connection to a list of `HistoryElement` values. */ +export type HistoryElementsConnectionGroupedAggregatesArgs = { + groupBy: Array; + having?: InputMaybe; +}; + +/** A `HistoryElement` edge in the connection. */ +export type HistoryElementsEdge = { + __typename?: 'HistoryElementsEdge'; + /** A cursor for use in pagination. */ + cursor?: Maybe; + /** The `HistoryElement` at the end of the edge. */ + node?: Maybe; +}; + +/** Grouping methods for `HistoryElement` for usage during aggregation. */ +export enum HistoryElementsGroupBy { + Address = 'ADDRESS', + AssetTransfer = 'ASSET_TRANSFER', + BlockNumber = 'BLOCK_NUMBER', + Extrinsic = 'EXTRINSIC', + ExtrinsicHash = 'EXTRINSIC_HASH', + ExtrinsicIdx = 'EXTRINSIC_IDX', + PoolReward = 'POOL_REWARD', + Reward = 'REWARD', + Swap = 'SWAP', + Timestamp = 'TIMESTAMP', + Transfer = 'TRANSFER' +} + +export type HistoryElementsHavingAverageInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type HistoryElementsHavingDistinctCountInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +/** Conditions for `HistoryElement` aggregates. */ +export type HistoryElementsHavingInput = { + AND?: InputMaybe>; + OR?: InputMaybe>; + average?: InputMaybe; + distinctCount?: InputMaybe; + max?: InputMaybe; + min?: InputMaybe; + stddevPopulation?: InputMaybe; + stddevSample?: InputMaybe; + sum?: InputMaybe; + variancePopulation?: InputMaybe; + varianceSample?: InputMaybe; +}; + +export type HistoryElementsHavingMaxInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type HistoryElementsHavingMinInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type HistoryElementsHavingStddevPopulationInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type HistoryElementsHavingStddevSampleInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type HistoryElementsHavingSumInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type HistoryElementsHavingVariancePopulationInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +export type HistoryElementsHavingVarianceSampleInput = { + blockNumber?: InputMaybe; + extrinsicIdx?: InputMaybe; + timestamp?: InputMaybe; +}; + +/** Methods to use when ordering `HistoryElement`. */ +export enum HistoryElementsOrderBy { + AddressAsc = 'ADDRESS_ASC', + AddressDesc = 'ADDRESS_DESC', + AssetTransferAsc = 'ASSET_TRANSFER_ASC', + AssetTransferDesc = 'ASSET_TRANSFER_DESC', + BlockNumberAsc = 'BLOCK_NUMBER_ASC', + BlockNumberDesc = 'BLOCK_NUMBER_DESC', + ExtrinsicAsc = 'EXTRINSIC_ASC', + ExtrinsicDesc = 'EXTRINSIC_DESC', + ExtrinsicHashAsc = 'EXTRINSIC_HASH_ASC', + ExtrinsicHashDesc = 'EXTRINSIC_HASH_DESC', + ExtrinsicIdxAsc = 'EXTRINSIC_IDX_ASC', + ExtrinsicIdxDesc = 'EXTRINSIC_IDX_DESC', + IdAsc = 'ID_ASC', + IdDesc = 'ID_DESC', + Natural = 'NATURAL', + PoolRewardAsc = 'POOL_REWARD_ASC', + PoolRewardDesc = 'POOL_REWARD_DESC', + PrimaryKeyAsc = 'PRIMARY_KEY_ASC', + PrimaryKeyDesc = 'PRIMARY_KEY_DESC', + RewardAsc = 'REWARD_ASC', + RewardDesc = 'REWARD_DESC', + SwapAsc = 'SWAP_ASC', + SwapDesc = 'SWAP_DESC', + TimestampAsc = 'TIMESTAMP_ASC', + TimestampDesc = 'TIMESTAMP_DESC', + TransferAsc = 'TRANSFER_ASC', + TransferDesc = 'TRANSFER_DESC' +} + +/** A filter to be used against Int fields. All fields are combined with a logical ‘and.’ */ +export type IntFilter = { + /** Not equal to the specified value, treating null like an ordinary value. */ + distinctFrom?: InputMaybe; + /** Equal to the specified value. */ + equalTo?: InputMaybe; + /** Greater than the specified value. */ + greaterThan?: InputMaybe; + /** Greater than or equal to the specified value. */ + greaterThanOrEqualTo?: InputMaybe; + /** Included in the specified list. */ + in?: InputMaybe>; + /** Is null (if `true` is specified) or is not null (if `false` is specified). */ + isNull?: InputMaybe; + /** Less than the specified value. */ + lessThan?: InputMaybe; + /** Less than or equal to the specified value. */ + lessThanOrEqualTo?: InputMaybe; + /** Equal to the specified value, treating null like an ordinary value. */ + notDistinctFrom?: InputMaybe; + /** Not equal to the specified value. */ + notEqualTo?: InputMaybe; + /** Not included in the specified list. */ + notIn?: InputMaybe>; +}; + +/** A filter to be used against JSON fields. All fields are combined with a logical ‘and.’ */ +export type JsonFilter = { + /** Contained by the specified JSON. */ + containedBy?: InputMaybe; + /** Contains the specified JSON. */ + contains?: InputMaybe; + /** Contains all of the specified keys. */ + containsAllKeys?: InputMaybe>; + /** Contains any of the specified keys. */ + containsAnyKeys?: InputMaybe>; + /** Contains the specified key. */ + containsKey?: InputMaybe; + /** Not equal to the specified value, treating null like an ordinary value. */ + distinctFrom?: InputMaybe; + /** Equal to the specified value. */ + equalTo?: InputMaybe; + /** Greater than the specified value. */ + greaterThan?: InputMaybe; + /** Greater than or equal to the specified value. */ + greaterThanOrEqualTo?: InputMaybe; + /** Included in the specified list. */ + in?: InputMaybe>; + /** Is null (if `true` is specified) or is not null (if `false` is specified). */ + isNull?: InputMaybe; + /** Less than the specified value. */ + lessThan?: InputMaybe; + /** Less than or equal to the specified value. */ + lessThanOrEqualTo?: InputMaybe; + /** Equal to the specified value, treating null like an ordinary value. */ + notDistinctFrom?: InputMaybe; + /** Not equal to the specified value. */ + notEqualTo?: InputMaybe; + /** Not included in the specified list. */ + notIn?: InputMaybe>; +}; + +/** An object with a globally unique `ID`. */ +export type Node = { + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; +}; + +/** Information about pagination in a connection. */ +export type PageInfo = { + __typename?: 'PageInfo'; + /** When paginating forwards, the cursor to continue. */ + endCursor?: Maybe; + /** When paginating forwards, are there more items? */ + hasNextPage: Scalars['Boolean']['output']; + /** When paginating backwards, are there more items? */ + hasPreviousPage: Scalars['Boolean']['output']; + /** When paginating backwards, the cursor to continue. */ + startCursor?: Maybe; +}; + +/** The root query type which gives access points into the data universe. */ +export type Query = Node & { + __typename?: 'Query'; + _metadata?: Maybe<_Metadata>; + _metadatas?: Maybe<_Metadatas>; + _poi?: Maybe<_Poi>; + _poiByChainBlockHash?: Maybe<_Poi>; + _poiByHash?: Maybe<_Poi>; + /** Reads a single `_Poi` using its globally unique `ID`. */ + _poiByNodeId?: Maybe<_Poi>; + _poiByParentHash?: Maybe<_Poi>; + /** Reads and enables pagination through a set of `_Poi`. */ + _pois?: Maybe<_PoisConnection>; + accountPoolReward?: Maybe; + /** Reads a single `AccountPoolReward` using its globally unique `ID`. */ + accountPoolRewardByNodeId?: Maybe; + /** Reads and enables pagination through a set of `AccountPoolReward`. */ + accountPoolRewards?: Maybe; + accountReward?: Maybe; + /** Reads a single `AccountReward` using its globally unique `ID`. */ + accountRewardByNodeId?: Maybe; + /** Reads and enables pagination through a set of `AccountReward`. */ + accountRewards?: Maybe; + accumulatedPoolReward?: Maybe; + /** Reads a single `AccumulatedPoolReward` using its globally unique `ID`. */ + accumulatedPoolRewardByNodeId?: Maybe; + /** Reads and enables pagination through a set of `AccumulatedPoolReward`. */ + accumulatedPoolRewards?: Maybe; + accumulatedReward?: Maybe; + /** Reads a single `AccumulatedReward` using its globally unique `ID`. */ + accumulatedRewardByNodeId?: Maybe; + /** Reads and enables pagination through a set of `AccumulatedReward`. */ + accumulatedRewards?: Maybe; + eraValidatorInfo?: Maybe; + /** Reads a single `EraValidatorInfo` using its globally unique `ID`. */ + eraValidatorInfoByNodeId?: Maybe; + /** Reads and enables pagination through a set of `EraValidatorInfo`. */ + eraValidatorInfos?: Maybe; + errorEvent?: Maybe; + /** Reads a single `ErrorEvent` using its globally unique `ID`. */ + errorEventByNodeId?: Maybe; + /** Reads and enables pagination through a set of `ErrorEvent`. */ + errorEvents?: Maybe; + historyElement?: Maybe; + /** Reads a single `HistoryElement` using its globally unique `ID`. */ + historyElementByNodeId?: Maybe; + /** Reads and enables pagination through a set of `HistoryElement`. */ + historyElements?: Maybe; + /** Fetches an object given its globally unique `ID`. */ + node?: Maybe; + /** The root query type must be a `Node` to work well with Relay 1 mutations. This just resolves to `query`. */ + nodeId: Scalars['ID']['output']; + /** + * Exposes the root query type nested one level down. This is helpful for Relay 1 + * which can only query top level fields if they are in a particular form. + */ + query: Query; +}; + + +/** The root query type which gives access points into the data universe. */ +export type Query_MetadataArgs = { + chainId?: InputMaybe; +}; + + +/** The root query type which gives access points into the data universe. */ +export type Query_MetadatasArgs = { + after?: InputMaybe; + before?: InputMaybe; +}; + + +/** The root query type which gives access points into the data universe. */ +export type Query_PoiArgs = { + id: Scalars['Int']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type Query_PoiByChainBlockHashArgs = { + chainBlockHash: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type Query_PoiByHashArgs = { + hash: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type Query_PoiByNodeIdArgs = { + distinct?: InputMaybe>>; + nodeId: Scalars['ID']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type Query_PoiByParentHashArgs = { + parentHash: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type Query_PoisArgs = { + after?: InputMaybe; + before?: InputMaybe; + distinct?: InputMaybe>>; + filter?: InputMaybe<_PoiFilter>; + first?: InputMaybe; + last?: InputMaybe; + offset?: InputMaybe; + orderBy?: InputMaybe>; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccountPoolRewardArgs = { + id: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccountPoolRewardByNodeIdArgs = { + distinct?: InputMaybe>>; + nodeId: Scalars['ID']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccountPoolRewardsArgs = { + after?: InputMaybe; + before?: InputMaybe; + distinct?: InputMaybe>>; + filter?: InputMaybe; + first?: InputMaybe; + last?: InputMaybe; + offset?: InputMaybe; + orderBy?: InputMaybe>; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccountRewardArgs = { + id: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccountRewardByNodeIdArgs = { + distinct?: InputMaybe>>; + nodeId: Scalars['ID']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccountRewardsArgs = { + after?: InputMaybe; + before?: InputMaybe; + distinct?: InputMaybe>>; + filter?: InputMaybe; + first?: InputMaybe; + last?: InputMaybe; + offset?: InputMaybe; + orderBy?: InputMaybe>; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccumulatedPoolRewardArgs = { + id: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccumulatedPoolRewardByNodeIdArgs = { + distinct?: InputMaybe>>; + nodeId: Scalars['ID']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccumulatedPoolRewardsArgs = { + after?: InputMaybe; + before?: InputMaybe; + distinct?: InputMaybe>>; + filter?: InputMaybe; + first?: InputMaybe; + last?: InputMaybe; + offset?: InputMaybe; + orderBy?: InputMaybe>; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccumulatedRewardArgs = { + id: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccumulatedRewardByNodeIdArgs = { + distinct?: InputMaybe>>; + nodeId: Scalars['ID']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryAccumulatedRewardsArgs = { + after?: InputMaybe; + before?: InputMaybe; + distinct?: InputMaybe>>; + filter?: InputMaybe; + first?: InputMaybe; + last?: InputMaybe; + offset?: InputMaybe; + orderBy?: InputMaybe>; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryEraValidatorInfoArgs = { + id: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryEraValidatorInfoByNodeIdArgs = { + distinct?: InputMaybe>>; + nodeId: Scalars['ID']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryEraValidatorInfosArgs = { + after?: InputMaybe; + before?: InputMaybe; + distinct?: InputMaybe>>; + filter?: InputMaybe; + first?: InputMaybe; + last?: InputMaybe; + offset?: InputMaybe; + orderBy?: InputMaybe>; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryErrorEventArgs = { + id: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryErrorEventByNodeIdArgs = { + distinct?: InputMaybe>>; + nodeId: Scalars['ID']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryErrorEventsArgs = { + after?: InputMaybe; + before?: InputMaybe; + distinct?: InputMaybe>>; + filter?: InputMaybe; + first?: InputMaybe; + last?: InputMaybe; + offset?: InputMaybe; + orderBy?: InputMaybe>; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryHistoryElementArgs = { + id: Scalars['String']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryHistoryElementByNodeIdArgs = { + distinct?: InputMaybe>>; + nodeId: Scalars['ID']['input']; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryHistoryElementsArgs = { + after?: InputMaybe; + before?: InputMaybe; + distinct?: InputMaybe>>; + filter?: InputMaybe; + first?: InputMaybe; + last?: InputMaybe; + offset?: InputMaybe; + orderBy?: InputMaybe>; +}; + + +/** The root query type which gives access points into the data universe. */ +export type QueryNodeArgs = { + nodeId: Scalars['ID']['input']; +}; + +export enum RewardType { + Reward = 'reward', + Slash = 'slash' +} + +/** A filter to be used against RewardType fields. All fields are combined with a logical ‘and.’ */ +export type RewardTypeFilter = { + /** Not equal to the specified value, treating null like an ordinary value. */ + distinctFrom?: InputMaybe; + /** Equal to the specified value. */ + equalTo?: InputMaybe; + /** Greater than the specified value. */ + greaterThan?: InputMaybe; + /** Greater than or equal to the specified value. */ + greaterThanOrEqualTo?: InputMaybe; + /** Included in the specified list. */ + in?: InputMaybe>; + /** Is null (if `true` is specified) or is not null (if `false` is specified). */ + isNull?: InputMaybe; + /** Less than the specified value. */ + lessThan?: InputMaybe; + /** Less than or equal to the specified value. */ + lessThanOrEqualTo?: InputMaybe; + /** Equal to the specified value, treating null like an ordinary value. */ + notDistinctFrom?: InputMaybe; + /** Not equal to the specified value. */ + notEqualTo?: InputMaybe; + /** Not included in the specified list. */ + notIn?: InputMaybe>; +}; + +/** A filter to be used against String fields. All fields are combined with a logical ‘and.’ */ +export type StringFilter = { + /** Not equal to the specified value, treating null like an ordinary value. */ + distinctFrom?: InputMaybe; + /** Not equal to the specified value, treating null like an ordinary value (case-insensitive). */ + distinctFromInsensitive?: InputMaybe; + /** Ends with the specified string (case-sensitive). */ + endsWith?: InputMaybe; + /** Ends with the specified string (case-insensitive). */ + endsWithInsensitive?: InputMaybe; + /** Equal to the specified value. */ + equalTo?: InputMaybe; + /** Equal to the specified value (case-insensitive). */ + equalToInsensitive?: InputMaybe; + /** Greater than the specified value. */ + greaterThan?: InputMaybe; + /** Greater than the specified value (case-insensitive). */ + greaterThanInsensitive?: InputMaybe; + /** Greater than or equal to the specified value. */ + greaterThanOrEqualTo?: InputMaybe; + /** Greater than or equal to the specified value (case-insensitive). */ + greaterThanOrEqualToInsensitive?: InputMaybe; + /** Included in the specified list. */ + in?: InputMaybe>; + /** Included in the specified list (case-insensitive). */ + inInsensitive?: InputMaybe>; + /** Contains the specified string (case-sensitive). */ + includes?: InputMaybe; + /** Contains the specified string (case-insensitive). */ + includesInsensitive?: InputMaybe; + /** Is null (if `true` is specified) or is not null (if `false` is specified). */ + isNull?: InputMaybe; + /** Less than the specified value. */ + lessThan?: InputMaybe; + /** Less than the specified value (case-insensitive). */ + lessThanInsensitive?: InputMaybe; + /** Less than or equal to the specified value. */ + lessThanOrEqualTo?: InputMaybe; + /** Less than or equal to the specified value (case-insensitive). */ + lessThanOrEqualToInsensitive?: InputMaybe; + /** Matches the specified pattern (case-sensitive). An underscore (_) matches any single character; a percent sign (%) matches any sequence of zero or more characters. */ + like?: InputMaybe; + /** Matches the specified pattern (case-insensitive). An underscore (_) matches any single character; a percent sign (%) matches any sequence of zero or more characters. */ + likeInsensitive?: InputMaybe; + /** Equal to the specified value, treating null like an ordinary value. */ + notDistinctFrom?: InputMaybe; + /** Equal to the specified value, treating null like an ordinary value (case-insensitive). */ + notDistinctFromInsensitive?: InputMaybe; + /** Does not end with the specified string (case-sensitive). */ + notEndsWith?: InputMaybe; + /** Does not end with the specified string (case-insensitive). */ + notEndsWithInsensitive?: InputMaybe; + /** Not equal to the specified value. */ + notEqualTo?: InputMaybe; + /** Not equal to the specified value (case-insensitive). */ + notEqualToInsensitive?: InputMaybe; + /** Not included in the specified list. */ + notIn?: InputMaybe>; + /** Not included in the specified list (case-insensitive). */ + notInInsensitive?: InputMaybe>; + /** Does not contain the specified string (case-sensitive). */ + notIncludes?: InputMaybe; + /** Does not contain the specified string (case-insensitive). */ + notIncludesInsensitive?: InputMaybe; + /** Does not match the specified pattern (case-sensitive). An underscore (_) matches any single character; a percent sign (%) matches any sequence of zero or more characters. */ + notLike?: InputMaybe; + /** Does not match the specified pattern (case-insensitive). An underscore (_) matches any single character; a percent sign (%) matches any sequence of zero or more characters. */ + notLikeInsensitive?: InputMaybe; + /** Does not start with the specified string (case-sensitive). */ + notStartsWith?: InputMaybe; + /** Does not start with the specified string (case-insensitive). */ + notStartsWithInsensitive?: InputMaybe; + /** Starts with the specified string (case-sensitive). */ + startsWith?: InputMaybe; + /** Starts with the specified string (case-insensitive). */ + startsWithInsensitive?: InputMaybe; +}; + +export type TableEstimate = { + __typename?: 'TableEstimate'; + estimate?: Maybe; + table?: Maybe; +}; + +export type _Metadata = { + __typename?: '_Metadata'; + chain?: Maybe; + deployments?: Maybe; + dynamicDatasources?: Maybe; + evmChainId?: Maybe; + genesisHash?: Maybe; + indexerHealthy?: Maybe; + indexerNodeVersion?: Maybe; + lastCreatedPoiHeight?: Maybe; + lastFinalizedVerifiedHeight?: Maybe; + lastProcessedHeight?: Maybe; + lastProcessedTimestamp?: Maybe; + latestSyncedPoiHeight?: Maybe; + queryNodeVersion?: Maybe; + rowCountEstimate?: Maybe>>; + specName?: Maybe; + startHeight?: Maybe; + targetHeight?: Maybe; + unfinalizedBlocks?: Maybe; +}; + +export type _Metadatas = { + __typename?: '_Metadatas'; + nodes: Array>; + totalCount: Scalars['Int']['output']; +}; + +export type _Poi = Node & { + __typename?: '_Poi'; + chainBlockHash?: Maybe; + createdAt: Scalars['Datetime']['output']; + hash?: Maybe; + id: Scalars['Int']['output']; + /** A globally unique identifier. Can be used in various places throughout the system to identify this single value. */ + nodeId: Scalars['ID']['output']; + operationHashRoot?: Maybe; + parentHash?: Maybe; + updatedAt: Scalars['Datetime']['output']; +}; + +export type _PoiAggregates = { + __typename?: '_PoiAggregates'; + /** Mean average aggregates across the matching connection (ignoring before/after/first/last/offset) */ + average?: Maybe<_PoiAverageAggregates>; + /** Distinct count aggregates across the matching connection (ignoring before/after/first/last/offset) */ + distinctCount?: Maybe<_PoiDistinctCountAggregates>; + keys?: Maybe>; + /** Maximum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + max?: Maybe<_PoiMaxAggregates>; + /** Minimum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + min?: Maybe<_PoiMinAggregates>; + /** Population standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevPopulation?: Maybe<_PoiStddevPopulationAggregates>; + /** Sample standard deviation aggregates across the matching connection (ignoring before/after/first/last/offset) */ + stddevSample?: Maybe<_PoiStddevSampleAggregates>; + /** Sum aggregates across the matching connection (ignoring before/after/first/last/offset) */ + sum?: Maybe<_PoiSumAggregates>; + /** Population variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + variancePopulation?: Maybe<_PoiVariancePopulationAggregates>; + /** Sample variance aggregates across the matching connection (ignoring before/after/first/last/offset) */ + varianceSample?: Maybe<_PoiVarianceSampleAggregates>; +}; + +export type _PoiAverageAggregates = { + __typename?: '_PoiAverageAggregates'; + /** Mean average of id across the matching connection */ + id?: Maybe; +}; + +export type _PoiDistinctCountAggregates = { + __typename?: '_PoiDistinctCountAggregates'; + /** Distinct count of chainBlockHash across the matching connection */ + chainBlockHash?: Maybe; + /** Distinct count of createdAt across the matching connection */ + createdAt?: Maybe; + /** Distinct count of hash across the matching connection */ + hash?: Maybe; + /** Distinct count of id across the matching connection */ + id?: Maybe; + /** Distinct count of operationHashRoot across the matching connection */ + operationHashRoot?: Maybe; + /** Distinct count of parentHash across the matching connection */ + parentHash?: Maybe; + /** Distinct count of updatedAt across the matching connection */ + updatedAt?: Maybe; +}; + +/** A filter to be used against `_Poi` object types. All fields are combined with a logical ‘and.’ */ +export type _PoiFilter = { + /** Checks for all expressions in this list. */ + and?: InputMaybe>; + /** Filter by the object’s `createdAt` field. */ + createdAt?: InputMaybe; + /** Filter by the object’s `id` field. */ + id?: InputMaybe; + /** Negates the expression. */ + not?: InputMaybe<_PoiFilter>; + /** Checks for any expressions in this list. */ + or?: InputMaybe>; + /** Filter by the object’s `updatedAt` field. */ + updatedAt?: InputMaybe; +}; + +/** Grouping methods for `_Poi` for usage during aggregation. */ +export enum _PoiGroupBy { + CreatedAt = 'CREATED_AT', + CreatedAtTruncatedToDay = 'CREATED_AT_TRUNCATED_TO_DAY', + CreatedAtTruncatedToHour = 'CREATED_AT_TRUNCATED_TO_HOUR', + OperationHashRoot = 'OPERATION_HASH_ROOT', + UpdatedAt = 'UPDATED_AT', + UpdatedAtTruncatedToDay = 'UPDATED_AT_TRUNCATED_TO_DAY', + UpdatedAtTruncatedToHour = 'UPDATED_AT_TRUNCATED_TO_HOUR' +} + +export type _PoiHavingAverageInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +export type _PoiHavingDistinctCountInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +/** Conditions for `_Poi` aggregates. */ +export type _PoiHavingInput = { + AND?: InputMaybe>; + OR?: InputMaybe>; + average?: InputMaybe<_PoiHavingAverageInput>; + distinctCount?: InputMaybe<_PoiHavingDistinctCountInput>; + max?: InputMaybe<_PoiHavingMaxInput>; + min?: InputMaybe<_PoiHavingMinInput>; + stddevPopulation?: InputMaybe<_PoiHavingStddevPopulationInput>; + stddevSample?: InputMaybe<_PoiHavingStddevSampleInput>; + sum?: InputMaybe<_PoiHavingSumInput>; + variancePopulation?: InputMaybe<_PoiHavingVariancePopulationInput>; + varianceSample?: InputMaybe<_PoiHavingVarianceSampleInput>; +}; + +export type _PoiHavingMaxInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +export type _PoiHavingMinInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +export type _PoiHavingStddevPopulationInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +export type _PoiHavingStddevSampleInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +export type _PoiHavingSumInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +export type _PoiHavingVariancePopulationInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +export type _PoiHavingVarianceSampleInput = { + createdAt?: InputMaybe; + id?: InputMaybe; + updatedAt?: InputMaybe; +}; + +export type _PoiMaxAggregates = { + __typename?: '_PoiMaxAggregates'; + /** Maximum of id across the matching connection */ + id?: Maybe; +}; + +export type _PoiMinAggregates = { + __typename?: '_PoiMinAggregates'; + /** Minimum of id across the matching connection */ + id?: Maybe; +}; + +export type _PoiStddevPopulationAggregates = { + __typename?: '_PoiStddevPopulationAggregates'; + /** Population standard deviation of id across the matching connection */ + id?: Maybe; +}; + +export type _PoiStddevSampleAggregates = { + __typename?: '_PoiStddevSampleAggregates'; + /** Sample standard deviation of id across the matching connection */ + id?: Maybe; +}; + +export type _PoiSumAggregates = { + __typename?: '_PoiSumAggregates'; + /** Sum of id across the matching connection */ + id: Scalars['BigInt']['output']; +}; + +export type _PoiVariancePopulationAggregates = { + __typename?: '_PoiVariancePopulationAggregates'; + /** Population variance of id across the matching connection */ + id?: Maybe; +}; + +export type _PoiVarianceSampleAggregates = { + __typename?: '_PoiVarianceSampleAggregates'; + /** Sample variance of id across the matching connection */ + id?: Maybe; +}; + +/** A connection to a list of `_Poi` values. */ +export type _PoisConnection = { + __typename?: '_PoisConnection'; + /** Aggregates across the matching connection (ignoring before/after/first/last/offset) */ + aggregates?: Maybe<_PoiAggregates>; + /** A list of edges which contains the `_Poi` and cursor to aid in pagination. */ + edges: Array<_PoisEdge>; + /** Grouped aggregates across the matching connection (ignoring before/after/first/last/offset) */ + groupedAggregates?: Maybe>; + /** A list of `_Poi` objects. */ + nodes: Array>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** The count of *all* `_Poi` you could get from the connection. */ + totalCount: Scalars['Int']['output']; +}; + + +/** A connection to a list of `_Poi` values. */ +export type _PoisConnectionGroupedAggregatesArgs = { + groupBy: Array<_PoiGroupBy>; + having?: InputMaybe<_PoiHavingInput>; +}; + +/** A `_Poi` edge in the connection. */ +export type _PoisEdge = { + __typename?: '_PoisEdge'; + /** A cursor for use in pagination. */ + cursor?: Maybe; + /** The `_Poi` at the end of the edge. */ + node?: Maybe<_Poi>; +}; + +/** Methods to use when ordering `_Poi`. */ +export enum _PoisOrderBy { + ChainBlockHashAsc = 'CHAIN_BLOCK_HASH_ASC', + ChainBlockHashDesc = 'CHAIN_BLOCK_HASH_DESC', + CreatedAtAsc = 'CREATED_AT_ASC', + CreatedAtDesc = 'CREATED_AT_DESC', + HashAsc = 'HASH_ASC', + HashDesc = 'HASH_DESC', + IdAsc = 'ID_ASC', + IdDesc = 'ID_DESC', + Natural = 'NATURAL', + OperationHashRootAsc = 'OPERATION_HASH_ROOT_ASC', + OperationHashRootDesc = 'OPERATION_HASH_ROOT_DESC', + ParentHashAsc = 'PARENT_HASH_ASC', + ParentHashDesc = 'PARENT_HASH_DESC', + PrimaryKeyAsc = 'PRIMARY_KEY_ASC', + PrimaryKeyDesc = 'PRIMARY_KEY_DESC', + UpdatedAtAsc = 'UPDATED_AT_ASC', + UpdatedAtDesc = 'UPDATED_AT_DESC' +} + +export enum _Poi_Distinct_Enum { + Chainblockhash = 'CHAINBLOCKHASH', + Createdat = 'CREATEDAT', + Hash = 'HASH', + Id = 'ID', + Operationhashroot = 'OPERATIONHASHROOT', + Parenthash = 'PARENTHASH', + Updatedat = 'UPDATEDAT' +} + +export enum Account_Pool_Rewards_Distinct_Enum { + AccumulatedAmount = 'ACCUMULATED_AMOUNT', + Address = 'ADDRESS', + Amount = 'AMOUNT', + BlockNumber = 'BLOCK_NUMBER', + Id = 'ID', + PoolId = 'POOL_ID', + Timestamp = 'TIMESTAMP', + Type = 'TYPE' +} + +export enum Account_Rewards_Distinct_Enum { + AccumulatedAmount = 'ACCUMULATED_AMOUNT', + Address = 'ADDRESS', + Amount = 'AMOUNT', + BlockNumber = 'BLOCK_NUMBER', + Id = 'ID', + Timestamp = 'TIMESTAMP', + Type = 'TYPE' +} + +export enum Accumulated_Pool_Rewards_Distinct_Enum { + Amount = 'AMOUNT', + Id = 'ID' +} + +export enum Accumulated_Rewards_Distinct_Enum { + Amount = 'AMOUNT', + Id = 'ID' +} + +export enum Era_Validator_Infos_Distinct_Enum { + Address = 'ADDRESS', + Era = 'ERA', + Id = 'ID', + Others = 'OTHERS', + Own = 'OWN', + Total = 'TOTAL' +} + +export enum Error_Events_Distinct_Enum { + Description = 'DESCRIPTION', + Id = 'ID' +} + +export enum History_Elements_Distinct_Enum { + Address = 'ADDRESS', + AssetTransfer = 'ASSET_TRANSFER', + BlockNumber = 'BLOCK_NUMBER', + Extrinsic = 'EXTRINSIC', + ExtrinsicHash = 'EXTRINSIC_HASH', + ExtrinsicIdx = 'EXTRINSIC_IDX', + Id = 'ID', + PoolReward = 'POOL_REWARD', + Reward = 'REWARD', + Swap = 'SWAP', + Timestamp = 'TIMESTAMP', + Transfer = 'TRANSFER' +} + +export type GetTransfersByAccountSubQueryQueryVariables = Exact<{ + filter: HistoryElementFilter; + offset?: InputMaybe; + first?: InputMaybe; +}>; + + +export type GetTransfersByAccountSubQueryQuery = { __typename?: 'Query', historyElements?: { __typename?: 'HistoryElementsConnection', nodes: Array<{ __typename?: 'HistoryElement', transfer?: any | null, extrinsicHash?: string | null, extrinsicIdx?: number | null, blockNumber: number, address: string, timestamp: any } | null> } | null }; + +export type GetIndexerLastProcessedHeightSubQueryQueryVariables = Exact<{ [key: string]: never; }>; + + +export type GetIndexerLastProcessedHeightSubQueryQuery = { __typename?: 'Query', _metadata?: { __typename?: '_Metadata', lastProcessedHeight?: number | null } | null }; + + +export const GetTransfersByAccountSubQuery = gql` + query GetTransfersByAccountSubQuery($filter: HistoryElementFilter!, $offset: Int, $first: Int) { + historyElements(filter: $filter, offset: $offset, first: $first) { + nodes { + transfer + extrinsicHash + extrinsicIdx + blockNumber + address + timestamp + } + } +} + `; +export const GetIndexerLastProcessedHeightSubQuery = gql` + query GetIndexerLastProcessedHeightSubQuery { + _metadata { + lastProcessedHeight + } +} + `; \ No newline at end of file diff --git a/src/utils/graphQl/subQueryNova/transfer.dto.ts b/src/utils/graphQl/subQueryNova/transfer.dto.ts new file mode 100644 index 0000000..4416794 --- /dev/null +++ b/src/utils/graphQl/subQueryNova/transfer.dto.ts @@ -0,0 +1,8 @@ +export type TransferDto = { + amount: string; + eventIdx: number; + fee: string; + from: string; + success: boolean; + to: string; +}; diff --git a/test/checkAccountCanRpc.spec.ts b/test/checkAccountCanRpc.spec.ts index 85def47..5b203db 100644 --- a/test/checkAccountCanRpc.spec.ts +++ b/test/checkAccountCanRpc.spec.ts @@ -4,16 +4,23 @@ import { Test, TestingModule } from '@nestjs/testing'; import { transformAndValidateSync } from 'class-transformer-validator'; import { CryptoUtils } from '../src/utils/cryptoUtils'; import * as ethers from 'ethers'; +import { DataSourceUtilsSubQuery } from '../src/utils/dataSources/dataSourceUtils.subQuery'; +import { DataSourceUtils } from '../src/utils/dataSources/dataSourceUtils'; +import { BlockchainTag } from '../src/constants/blockchain'; dotenv.config({ path: `${__dirname}/../.env.local` }); -describe('Check CryptoUtils service', () => { +describe('Aggregator', () => { let cryptoUtils: CryptoUtils = null; + let appConfig: AppConfig = null; + let dataSourceUtils: DataSourceUtils = null; beforeAll(async () => { const module: TestingModule = await Test.createTestingModule({ providers: [ + AppConfig, CryptoUtils, + DataSourceUtils, { provide: AppConfig, useFactory: () => { @@ -23,39 +30,27 @@ describe('Check CryptoUtils service', () => { ], }).compile(); + appConfig = module.get(AppConfig); cryptoUtils = module.get(CryptoUtils); + dataSourceUtils = module.get(DataSourceUtils); }); - test('addressToHex should convert', async () => { - const evmToHexResult = cryptoUtils.addressToHex( - '0xe93685f3bBA03016F02bD1828BaDD6195988D950', - ); - const ss58ToHexResult = cryptoUtils.addressToHex( - '5DksjtJER6oLDWkWKCWcL3f1swPWeNNFsS9zHxa2rPa7LsH9', - ); - const hexToHexResult = cryptoUtils.addressToHex( - '0x4adf51a47b72795366d52285e329229c836ea7bbfe139dbe8fa0700c4f86fc56', - ); - const isValidSubstrateAddress = cryptoUtils.isValidAddress( - '5DksjtJER6oLDWkWKCWcL3f1swPWeNNFsS9zHxa2rPa7LsH9', - ); - const isValidEvmAddress = cryptoUtils.isValidAddress( - '0xe93685f3bBA03016F02bD1828BaDD6195988D950', - ); - const isValidRandomAddress = cryptoUtils.isValidAddress('aaabbb'); + test('fetch transfers history by address', async () => { + const resp = await dataSourceUtils.getTransfersByAccount({ + blockchainTag: BlockchainTag.POLKADOT, + limit: 100, + offset: 0, + // blockNumber_gt: 8556806, + blockNumber_gt: 0, + blockNumber_lt: null, + // address: '15kZjoFoTv1EtrrjB7g9Looc4HRWNnaswqEZDSPtw8QmLB1Q', + publicKey: + '0xd22ab116cec40b31955995c0d03dcdd6b931aed6d91e1907304c671b0779e107', + queryUrl: appConfig.DATA_SOURCE__SUBQUERY__POLKADOT__TRANSFER, + }); - expect(evmToHexResult).toEqual( - '0xe93685f3bba03016f02bd1828badd6195988d950', - ); - expect(ss58ToHexResult).toEqual( - '0x4adf51a47b72795366d52285e329229c836ea7bbfe139dbe8fa0700c4f86fc56', - ); - expect(hexToHexResult).toEqual( - '0x4adf51a47b72795366d52285e329229c836ea7bbfe139dbe8fa0700c4f86fc56', - ); + console.dir(resp, { depth: null }); - expect(isValidSubstrateAddress).toEqual(true); - expect(isValidEvmAddress).toEqual(true); - expect(isValidRandomAddress).toEqual(false); + // expect(isValidRandomAddress).toEqual(false); }); });