From 58862ef1fc0895658eae30545a95c80a34381036 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Tue, 7 Apr 2026 12:53:39 -0300 Subject: [PATCH 01/17] feat(hyperindex-integration): implementation first try --- apps/indexer/abis/ens-governor.json | 787 ++++++++++++++++ apps/indexer/abis/ens-token.json | 840 ++++++++++++++++++ apps/indexer/config.yaml | 37 + apps/indexer/package.json | 2 + apps/indexer/schema.graphql | 202 +++++ apps/indexer/src/eventHandlers/ENSGovernor.ts | 65 ++ apps/indexer/src/eventHandlers/ENSToken.ts | 259 ++++++ apps/indexer/src/eventHandlers/delegation.ts | 238 +++-- apps/indexer/src/eventHandlers/index.ts | 6 +- .../src/eventHandlers/metrics/circulating.ts | 37 +- .../src/eventHandlers/metrics/delegated.ts | 30 +- .../src/eventHandlers/metrics/index.ts | 8 +- .../src/eventHandlers/metrics/supply.ts | 38 +- .../src/eventHandlers/metrics/total.ts | 37 +- apps/indexer/src/eventHandlers/shared.ts | 147 +-- apps/indexer/src/eventHandlers/transfer.ts | 188 ++-- apps/indexer/src/eventHandlers/voting.ts | 220 ++--- apps/indexer/src/lib/constants.ts | 55 +- apps/indexer/src/lib/date-helpers.ts | 2 +- apps/indexer/src/lib/enums.ts | 36 +- apps/indexer/src/lib/query-helpers.ts | 2 +- apps/indexer/src/lib/time-series.ts | 4 +- apps/indexer/tsconfig.json | 13 +- eslint.config.mjs | 18 + generated@0.1.0 | 0 pnpm-lock.yaml | 669 +++++++++++++- ts-node | 0 27 files changed, 3318 insertions(+), 622 deletions(-) create mode 100644 apps/indexer/abis/ens-governor.json create mode 100644 apps/indexer/abis/ens-token.json create mode 100644 apps/indexer/config.yaml create mode 100644 apps/indexer/schema.graphql create mode 100644 apps/indexer/src/eventHandlers/ENSGovernor.ts create mode 100644 apps/indexer/src/eventHandlers/ENSToken.ts create mode 100644 generated@0.1.0 create mode 100644 ts-node diff --git a/apps/indexer/abis/ens-governor.json b/apps/indexer/abis/ens-governor.json new file mode 100644 index 000000000..ad8f4d77a --- /dev/null +++ b/apps/indexer/abis/ens-governor.json @@ -0,0 +1,787 @@ +[ + { + "inputs": [ + { + "internalType": "contract ERC20Votes", + "name": "_token", + "type": "address" + }, + { + "internalType": "contract TimelockController", + "name": "_timelock", + "type": "address" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + } + ], + "name": "ProposalCanceled", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "address", + "name": "proposer", + "type": "address" + }, + { + "indexed": false, + "internalType": "address[]", + "name": "targets", + "type": "address[]" + }, + { + "indexed": false, + "internalType": "uint256[]", + "name": "values", + "type": "uint256[]" + }, + { + "indexed": false, + "internalType": "string[]", + "name": "signatures", + "type": "string[]" + }, + { + "indexed": false, + "internalType": "bytes[]", + "name": "calldatas", + "type": "bytes[]" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "startBlock", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "endBlock", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "string", + "name": "description", + "type": "string" + } + ], + "name": "ProposalCreated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + } + ], + "name": "ProposalExecuted", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "eta", + "type": "uint256" + } + ], + "name": "ProposalQueued", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "oldQuorumNumerator", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "newQuorumNumerator", + "type": "uint256" + } + ], + "name": "QuorumNumeratorUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "oldTimelock", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "newTimelock", + "type": "address" + } + ], + "name": "TimelockChange", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "voter", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint8", + "name": "support", + "type": "uint8" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "weight", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "string", + "name": "reason", + "type": "string" + } + ], + "name": "VoteCast", + "type": "event" + }, + { + "inputs": [], + "name": "BALLOT_TYPEHASH", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "COUNTING_MODE", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "internalType": "uint8", + "name": "support", + "type": "uint8" + } + ], + "name": "castVote", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "internalType": "uint8", + "name": "support", + "type": "uint8" + }, + { + "internalType": "uint8", + "name": "v", + "type": "uint8" + }, + { + "internalType": "bytes32", + "name": "r", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "s", + "type": "bytes32" + } + ], + "name": "castVoteBySig", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "internalType": "uint8", + "name": "support", + "type": "uint8" + }, + { + "internalType": "string", + "name": "reason", + "type": "string" + } + ], + "name": "castVoteWithReason", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "targets", + "type": "address[]" + }, + { + "internalType": "uint256[]", + "name": "values", + "type": "uint256[]" + }, + { + "internalType": "bytes[]", + "name": "calldatas", + "type": "bytes[]" + }, + { + "internalType": "bytes32", + "name": "descriptionHash", + "type": "bytes32" + } + ], + "name": "execute", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + }, + { + "internalType": "uint256", + "name": "blockNumber", + "type": "uint256" + } + ], + "name": "getVotes", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "hasVoted", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "targets", + "type": "address[]" + }, + { + "internalType": "uint256[]", + "name": "values", + "type": "uint256[]" + }, + { + "internalType": "bytes[]", + "name": "calldatas", + "type": "bytes[]" + }, + { + "internalType": "bytes32", + "name": "descriptionHash", + "type": "bytes32" + } + ], + "name": "hashProposal", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + } + ], + "name": "proposalDeadline", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + } + ], + "name": "proposalEta", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + } + ], + "name": "proposalSnapshot", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "proposalThreshold", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + } + ], + "name": "proposalVotes", + "outputs": [ + { + "internalType": "uint256", + "name": "againstVotes", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "forVotes", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "abstainVotes", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "targets", + "type": "address[]" + }, + { + "internalType": "uint256[]", + "name": "values", + "type": "uint256[]" + }, + { + "internalType": "bytes[]", + "name": "calldatas", + "type": "bytes[]" + }, + { + "internalType": "string", + "name": "description", + "type": "string" + } + ], + "name": "propose", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "targets", + "type": "address[]" + }, + { + "internalType": "uint256[]", + "name": "values", + "type": "uint256[]" + }, + { + "internalType": "bytes[]", + "name": "calldatas", + "type": "bytes[]" + }, + { + "internalType": "bytes32", + "name": "descriptionHash", + "type": "bytes32" + } + ], + "name": "queue", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "blockNumber", + "type": "uint256" + } + ], + "name": "quorum", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "quorumDenominator", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [], + "name": "quorumNumerator", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + } + ], + "name": "state", + "outputs": [ + { + "internalType": "enum IGovernor.ProposalState", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "interfaceId", + "type": "bytes4" + } + ], + "name": "supportsInterface", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "timelock", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "token", + "outputs": [ + { + "internalType": "contract ERC20Votes", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "newQuorumNumerator", + "type": "uint256" + } + ], + "name": "updateQuorumNumerator", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "contract TimelockController", + "name": "newTimelock", + "type": "address" + } + ], + "name": "updateTimelock", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "version", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "votingDelay", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [], + "name": "votingPeriod", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + } +] \ No newline at end of file diff --git a/apps/indexer/abis/ens-token.json b/apps/indexer/abis/ens-token.json new file mode 100644 index 000000000..9d2ea9ae6 --- /dev/null +++ b/apps/indexer/abis/ens-token.json @@ -0,0 +1,840 @@ +[ + { + "inputs": [ + { + "internalType": "uint256", + "name": "freeSupply", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "airdropSupply", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_claimPeriodEnds", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "claimant", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "Claim", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "delegator", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "fromDelegate", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "toDelegate", + "type": "address" + } + ], + "name": "DelegateChanged", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "delegate", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "previousBalance", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "newBalance", + "type": "uint256" + } + ], + "name": "DelegateVotesChanged", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "bytes32", + "name": "merkleRoot", + "type": "bytes32" + } + ], + "name": "MerkleRootChanged", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "previousOwner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "OwnershipTransferred", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "inputs": [], + "name": "DOMAIN_SEPARATOR", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "spender", + "type": "address" + } + ], + "name": "allowance", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + }, + { + "internalType": "uint32", + "name": "pos", + "type": "uint32" + } + ], + "name": "checkpoints", + "outputs": [ + { + "components": [ + { + "internalType": "uint32", + "name": "fromBlock", + "type": "uint32" + }, + { + "internalType": "uint224", + "name": "votes", + "type": "uint224" + } + ], + "internalType": "struct ERC20Votes.Checkpoint", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "claimPeriodEnds", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "internalType": "address", + "name": "delegate", + "type": "address" + }, + { + "internalType": "bytes32[]", + "name": "merkleProof", + "type": "bytes32[]" + } + ], + "name": "claimTokens", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "decimals", + "outputs": [ + { + "internalType": "uint8", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "subtractedValue", + "type": "uint256" + } + ], + "name": "decreaseAllowance", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "delegatee", + "type": "address" + } + ], + "name": "delegate", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "delegatee", + "type": "address" + }, + { + "internalType": "uint256", + "name": "nonce", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "expiry", + "type": "uint256" + }, + { + "internalType": "uint8", + "name": "v", + "type": "uint8" + }, + { + "internalType": "bytes32", + "name": "r", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "s", + "type": "bytes32" + } + ], + "name": "delegateBySig", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "delegates", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "blockNumber", + "type": "uint256" + } + ], + "name": "getPastTotalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + }, + { + "internalType": "uint256", + "name": "blockNumber", + "type": "uint256" + } + ], + "name": "getPastVotes", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "getVotes", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "addedValue", + "type": "uint256" + } + ], + "name": "increaseAllowance", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "index", + "type": "uint256" + } + ], + "name": "isClaimed", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "merkleRoot", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "minimumMintInterval", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "dest", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "mint", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "mintCap", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "nextMint", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + } + ], + "name": "nonces", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "numCheckpoints", + "outputs": [ + { + "internalType": "uint32", + "name": "", + "type": "uint32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "owner", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "deadline", + "type": "uint256" + }, + { + "internalType": "uint8", + "name": "v", + "type": "uint8" + }, + { + "internalType": "bytes32", + "name": "r", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "s", + "type": "bytes32" + } + ], + "name": "permit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "renounceOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_merkleRoot", + "type": "bytes32" + } + ], + "name": "setMerkleRoot", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "dest", + "type": "address" + } + ], + "name": "sweep", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "symbol", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "recipient", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transfer", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "address", + "name": "recipient", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "transferOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } +] \ No newline at end of file diff --git a/apps/indexer/config.yaml b/apps/indexer/config.yaml new file mode 100644 index 000000000..a007ff9d4 --- /dev/null +++ b/apps/indexer/config.yaml @@ -0,0 +1,37 @@ +name: anticapture-indexer +description: Anticapture DAO governance indexer +field_selection: + transaction_fields: + - hash + - to + - from + +contracts: + - name: ENSToken + abi_file_path: abis/ens-token.json + handler: src/eventHandlers/ENSToken.ts + events: + - event: "Transfer(address indexed from, address indexed to, uint256 value)" + - event: "DelegateChanged(address indexed delegator, address indexed fromDelegate, address indexed toDelegate)" + - event: "DelegateVotesChanged(address indexed delegate, uint256 previousBalance, uint256 newBalance)" + - name: ENSGovernor + abi_file_path: abis/ens-governor.json + handler: src/eventHandlers/ENSGovernor.ts + events: + - event: "ProposalCreated(uint256 proposalId, address proposer, address[] targets, uint256[] values, string[] signatures, bytes[] calldatas, uint256 startBlock, uint256 endBlock, string description)" + - event: "VoteCast(address indexed voter, uint256 proposalId, uint8 support, uint256 weight, string reason)" + - event: "ProposalCanceled(uint256 proposalId)" + - event: "ProposalExecuted(uint256 proposalId)" + - event: "ProposalQueued(uint256 proposalId, uint256 eta)" + +networks: + - id: 1 + hypersync_config: + url: https://eth.hypersync.xyz + start_block: 9380410 + contracts: + - name: ENSToken + address: "0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72" + - name: ENSGovernor + address: "0x323a76393544d5ecca80cd6ef2a560c6a395b7e3" + start_block: 13533772 diff --git a/apps/indexer/package.json b/apps/indexer/package.json index b8542865e..bf3dd7892 100644 --- a/apps/indexer/package.json +++ b/apps/indexer/package.json @@ -4,6 +4,7 @@ "private": true, "scripts": { "dev": "ponder dev", + "envio": "envio dev", "start": "ponder start --views-schema=anticapture --schema=$RAILWAY_DEPLOYMENT_ID", "db:list": "ponder db list", "db:prune": "ponder db prune", @@ -24,6 +25,7 @@ "@types/node": "^20.16.5", "@types/pg": "^8.15.6", "dotenv": "^16.5.0", + "envio": "^2.32.12", "eslint": "^9", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.2.1", diff --git a/apps/indexer/schema.graphql b/apps/indexer/schema.graphql new file mode 100644 index 000000000..d7c02d714 --- /dev/null +++ b/apps/indexer/schema.graphql @@ -0,0 +1,202 @@ +enum MetricType { + total + delegated + cex + dex + lending + circulating + treasury + non_circulating +} + +enum EventType { + VOTE + PROPOSAL + PROPOSAL_EXTENDED + DELEGATION + DELEGATION_VOTES_CHANGED + TRANSFER +} + +type Token { + id: ID! + name: String + decimals: Int! + totalSupply: BigInt! + delegatedSupply: BigInt! + cexSupply: BigInt! + dexSupply: BigInt! + lendingSupply: BigInt! + circulatingSupply: BigInt! + treasury: BigInt! + nonCirculatingSupply: BigInt! +} + +type Account { + id: ID! +} + +# Composite PK: "{accountId}-{tokenId}" +type AccountBalance { + id: ID! + accountId: String! @index + tokenId: String! @index + balance: BigInt! + delegate: String! +} + +# Composite PK: "{accountId}" +type AccountPower { + id: ID! + accountId: String! @index + daoId: String! + votingPower: BigInt! + votesCount: Int! + proposalsCount: Int! + delegationsCount: Int! + lastVoteTimestamp: BigInt! +} + +# Composite PK: "{txHash}-{accountId}-{logIndex}" +type VotingPowerHistory { + id: ID! + transactionHash: String! @index + daoId: String! + accountId: String! @index + votingPower: BigInt! + delta: BigInt! + deltaMod: BigInt! + timestamp: BigInt! + logIndex: Int! +} + +# Composite PK: "{txHash}-{accountId}-{logIndex}" +type BalanceHistory { + id: ID! + transactionHash: String! @index + daoId: String! + accountId: String! @index + balance: BigInt! + delta: BigInt! + deltaMod: BigInt! + timestamp: BigInt! + logIndex: Int! +} + +# Composite PK: "{txHash}-{delegatorId}-{delegateId}" +type Delegation { + id: ID! + transactionHash: String! @index + daoId: String! + delegateAccountId: String! @index + delegatorAccountId: String! @index + delegatedValue: BigInt! + previousDelegate: String + timestamp: BigInt! @index + logIndex: Int! + isCex: Boolean! + isDex: Boolean! + isLending: Boolean! + isTotal: Boolean! + delegationType: Int +} + +# Composite PK: "{txHash}-{fromId}-{toId}" +type Transfer { + id: ID! + transactionHash: String! @index + daoId: String! + tokenId: String! @index + amount: BigInt! @index + fromAccountId: String! @index + toAccountId: String! @index + timestamp: BigInt! @index + logIndex: Int! + isCex: Boolean! + isDex: Boolean! + isLending: Boolean! + isTotal: Boolean! +} + +# Composite PK: "{voterId}-{proposalId}" +type VoteOnchain { + id: ID! + txHash: String! + daoId: String! + voterAccountId: String! @index + proposalId: String! @index + support: String! + votingPower: BigInt! + reason: String + timestamp: BigInt! +} + +type ProposalOnchain { + id: ID! + txHash: String! + daoId: String! + proposerAccountId: String! @index + targets: Json! + values: Json! + signatures: Json! + calldatas: Json! + startBlock: Int! + endBlock: Int! + title: String! + description: String! + timestamp: BigInt! + logIndex: Int! + endTimestamp: BigInt! + status: String! + forVotes: BigInt! + againstVotes: BigInt! + abstainVotes: BigInt! + proposalType: Int +} + +# Composite PK: "{date}-{tokenId}-{metricType}" +type DaoMetricsDayBucket { + id: ID! + date: BigInt! + daoId: String! + tokenId: String! @index + metricType: MetricType! + openValue: BigInt! + closeValue: BigInt! + low: BigInt! + high: BigInt! + average: BigInt! + volume: BigInt! + count: Int! + lastUpdate: BigInt! +} + +type Transaction { + id: ID! + transactionHash: String! + fromAddress: String + toAddress: String + isCex: Boolean! + isDex: Boolean! + isLending: Boolean! + isTotal: Boolean! + timestamp: BigInt! +} + + +type TokenPrice { + id: ID! + price: BigInt! + timestamp: BigInt! +} + +# Composite PK: "{txHash}-{logIndex}" +type FeedEvent { + id: ID! + txHash: String! @index + logIndex: Int! + eventType: EventType! + value: BigInt! @index + timestamp: BigInt! @index + metadata: Json +} diff --git a/apps/indexer/src/eventHandlers/ENSGovernor.ts b/apps/indexer/src/eventHandlers/ENSGovernor.ts new file mode 100644 index 000000000..3ecdec77f --- /dev/null +++ b/apps/indexer/src/eventHandlers/ENSGovernor.ts @@ -0,0 +1,65 @@ +import { ENSGovernor } from "../../generated/index.js"; +import type { Hex, Address } from "viem"; + +import { ProposalStatus, CONTRACT_ADDRESSES } from "../lib/constants.ts"; +import { DaoIdEnum } from "../lib/enums.ts"; + +import { proposalCreated, updateProposalStatus, voteCast } from "./voting.ts"; + +const DAO_ID = DaoIdEnum.ENS; +const BLOCK_TIME = CONTRACT_ADDRESSES[DAO_ID].blockTime; + +ENSGovernor.VoteCast.handler(async ({ event, context }) => { + await voteCast(context, DAO_ID, { + proposalId: event.params.proposalId.toString(), + voter: event.params.voter as Address, + reason: event.params.reason, + support: Number(event.params.support), + timestamp: BigInt(event.block.timestamp), + txHash: event.transaction.hash as Hex, + votingPower: event.params.weight, + logIndex: event.logIndex, + }); +}); + +ENSGovernor.ProposalCreated.handler(async ({ event, context }) => { + await proposalCreated(context, DAO_ID, BLOCK_TIME, { + proposalId: event.params.proposalId.toString(), + txHash: event.transaction.hash as Hex, + proposer: event.params.proposer as Address, + targets: [...event.params.targets] as Address[], + values: [...event.params.values], + signatures: [...event.params.signatures], + calldatas: [...event.params.calldatas] as Hex[], + startBlock: event.params.startBlock.toString(), + endBlock: event.params.endBlock.toString(), + description: event.params.description, + timestamp: BigInt(event.block.timestamp), + blockNumber: BigInt(event.block.number as number), + logIndex: event.logIndex, + }); +}); + +ENSGovernor.ProposalCanceled.handler(async ({ event, context }) => { + await updateProposalStatus( + context, + event.params.proposalId.toString(), + ProposalStatus.CANCELED, + ); +}); + +ENSGovernor.ProposalExecuted.handler(async ({ event, context }) => { + await updateProposalStatus( + context, + event.params.proposalId.toString(), + ProposalStatus.EXECUTED, + ); +}); + +ENSGovernor.ProposalQueued.handler(async ({ event, context }) => { + await updateProposalStatus( + context, + event.params.proposalId.toString(), + ProposalStatus.QUEUED, + ); +}); diff --git a/apps/indexer/src/eventHandlers/ENSToken.ts b/apps/indexer/src/eventHandlers/ENSToken.ts new file mode 100644 index 000000000..674fbebc8 --- /dev/null +++ b/apps/indexer/src/eventHandlers/ENSToken.ts @@ -0,0 +1,259 @@ +import { ENSToken } from "../../generated/index.js"; +import type { handlerContext } from "../../generated/index.js"; +import type { Address, Hex } from "viem"; +import { getAddress } from "viem"; + +import { + CONTRACT_ADDRESSES, + MetricTypesEnum, + BurningAddresses, + CEXAddresses, + DEXAddresses, + LendingAddresses, + TreasuryAddresses, + NonCirculatingAddresses, +} from "../lib/constants.ts"; +import { DaoIdEnum } from "../lib/enums.ts"; + +import { delegateChanged, delegatedVotesChanged } from "./delegation.ts"; +import { tokenTransfer } from "./transfer.ts"; +import { createAddressSet, handleTransaction } from "./shared.ts"; +import { + updateDelegatedSupply, + updateCirculatingSupply, + updateSupplyMetric, + updateTotalSupply, +} from "./metrics/index.ts"; + +const DAO_ID = DaoIdEnum.ENS; +const ENS_CONTRACTS = CONTRACT_ADDRESSES[DAO_ID]; +const TOKEN_ADDRESS = getAddress(ENS_CONTRACTS.token.address); +const TOKEN_DECIMALS = ENS_CONTRACTS.token.decimals; + +const cexAddressSet = createAddressSet(Object.values(CEXAddresses[DAO_ID])); +const dexAddressSet = createAddressSet(Object.values(DEXAddresses[DAO_ID])); +const lendingAddressSet = createAddressSet( + Object.values(LendingAddresses[DAO_ID]), +); +const burningAddressSet = createAddressSet( + Object.values(BurningAddresses[DAO_ID]), +); +const treasuryAddressSet = createAddressSet( + Object.values(TreasuryAddresses[DAO_ID]), +); +const nonCirculatingAddressSet = createAddressSet( + Object.values(NonCirculatingAddresses[DAO_ID]), +); +const delegationAddressSets = { + cex: cexAddressSet, + dex: dexAddressSet, + lending: lendingAddressSet, + burning: burningAddressSet, +}; + +// Lazy token initialization — replaces Ponder's setup event +const ensureTokenExists = async (context: handlerContext) => { + await context.Token.getOrCreate({ + id: TOKEN_ADDRESS, + name: DAO_ID, + decimals: TOKEN_DECIMALS, + totalSupply: 0n, + delegatedSupply: 0n, + cexSupply: 0n, + dexSupply: 0n, + lendingSupply: 0n, + circulatingSupply: 0n, + treasury: 0n, + nonCirculatingSupply: 0n, + }); +}; + +ENSToken.Transfer.handler(async ({ event, context }) => { + const from = event.params.from as Address; + const to = event.params.to as Address; + const { value } = event.params; + const timestamp = BigInt(event.block.timestamp); + + await ensureTokenExists(context); + + await tokenTransfer( + context, + DAO_ID, + { + from, + to, + value, + token: TOKEN_ADDRESS, + transactionHash: event.transaction.hash as Hex, + timestamp, + logIndex: event.logIndex, + }, + { + cex: cexAddressSet, + dex: dexAddressSet, + lending: lendingAddressSet, + burning: burningAddressSet, + }, + ); + + await updateSupplyMetric( + context, + "lendingSupply", + lendingAddressSet, + MetricTypesEnum.LENDING_SUPPLY, + from, + to, + value, + DAO_ID, + TOKEN_ADDRESS, + timestamp, + ); + await updateSupplyMetric( + context, + "cexSupply", + cexAddressSet, + MetricTypesEnum.CEX_SUPPLY, + from, + to, + value, + DAO_ID, + TOKEN_ADDRESS, + timestamp, + ); + await updateSupplyMetric( + context, + "dexSupply", + dexAddressSet, + MetricTypesEnum.DEX_SUPPLY, + from, + to, + value, + DAO_ID, + TOKEN_ADDRESS, + timestamp, + ); + await updateSupplyMetric( + context, + "treasury", + treasuryAddressSet, + MetricTypesEnum.TREASURY, + from, + to, + value, + DAO_ID, + TOKEN_ADDRESS, + timestamp, + ); + await updateSupplyMetric( + context, + "nonCirculatingSupply", + nonCirculatingAddressSet, + MetricTypesEnum.NON_CIRCULATING_SUPPLY, + from, + to, + value, + DAO_ID, + TOKEN_ADDRESS, + timestamp, + ); + await updateTotalSupply( + context, + burningAddressSet, + MetricTypesEnum.TOTAL_SUPPLY, + from, + to, + value, + DAO_ID, + TOKEN_ADDRESS, + timestamp, + ); + await updateCirculatingSupply(context, DAO_ID, TOKEN_ADDRESS, timestamp); + + if (!event.transaction.to) return; + + await handleTransaction( + context, + event.transaction.hash as Hex, + from, + event.transaction.to as Address, + timestamp, + [from, to], + { + cex: cexAddressSet, + dex: dexAddressSet, + lending: lendingAddressSet, + burning: burningAddressSet, + }, + ); +}); + +ENSToken.DelegateChanged.handler(async ({ event, context }) => { + const delegator = event.params.delegator as Address; + const fromDelegate = event.params.fromDelegate as Address; + const toDelegate = event.params.toDelegate as Address; + const timestamp = BigInt(event.block.timestamp); + + await ensureTokenExists(context); + + await delegateChanged( + context, + DAO_ID, + { + delegator, + delegate: toDelegate, + tokenId: TOKEN_ADDRESS, + previousDelegate: fromDelegate, + txHash: event.transaction.hash as Hex, + timestamp, + logIndex: event.logIndex, + }, + delegationAddressSets, + ); + + if (!event.transaction.to) return; + + await handleTransaction( + context, + event.transaction.hash as Hex, + delegator, + event.transaction.to as Address, + timestamp, + [delegator, toDelegate], + ); +}); + +ENSToken.DelegateVotesChanged.handler(async ({ event, context }) => { + const delegate = event.params.delegate as Address; + const { previousBalance, newBalance } = event.params; + const timestamp = BigInt(event.block.timestamp); + + await ensureTokenExists(context); + + await delegatedVotesChanged(context, DAO_ID, { + delegate, + txHash: event.transaction.hash as Hex, + newBalance, + oldBalance: previousBalance, + timestamp, + logIndex: event.logIndex, + }); + + await updateDelegatedSupply( + context, + DAO_ID, + TOKEN_ADDRESS, + newBalance - previousBalance, + timestamp, + ); + + if (!event.transaction.to) return; + + await handleTransaction( + context, + event.transaction.hash as Hex, + delegate, + event.transaction.to as Address, + timestamp, + [delegate], + ); +}); diff --git a/apps/indexer/src/eventHandlers/delegation.ts b/apps/indexer/src/eventHandlers/delegation.ts index 586d7d963..142c1b215 100644 --- a/apps/indexer/src/eventHandlers/delegation.ts +++ b/apps/indexer/src/eventHandlers/delegation.ts @@ -1,26 +1,21 @@ -import { Context } from "ponder:registry"; -import { - accountBalance, - accountPower, - delegation, - feedEvent, - votingPowerHistory, -} from "ponder:schema"; -import { Address, getAddress, Hex, zeroAddress } from "viem"; +import type { handlerContext } from "../../generated/index.js"; +import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; +import type { Address, Hex } from "viem"; +import { getAddress, zeroAddress } from "viem"; import { BurningAddresses, CEXAddresses, DEXAddresses, LendingAddresses, -} from "@/lib/constants"; -import { DaoIdEnum } from "@/lib/enums"; +} from "../lib/constants.ts"; +import { DaoIdEnum } from "../lib/enums.ts"; import { createAddressSet, ensureAccountExists, ensureAccountsExist, -} from "./shared"; +} from "./shared.ts"; type DelegationAddressSets = { cex: ReadonlySet
; @@ -29,22 +24,8 @@ type DelegationAddressSets = { burning: ReadonlySet
; }; -/** - * ### Creates: - * - New `Account` records (for delegator and delegate if they don't exist) - * - New `Delegation` record with calculated delegated value and flags - * - New `AccountBalance` record (if delegator doesn't have one for this token) - * - New `AccountPower` record (if delegate doesn't have one for this DAO) - * - New `Transaction` record (if this transaction hasn't been processed) - * - * ### Updates: - * - `Delegation`: Adds to existing delegated value if record already exists - * - `AccountBalance`: Changes the delegate assignment for the delegator - * - `AccountPower`: Increments the delegate's delegation count - * - `Transaction`: Updates transaction flags if record already exists - */ export const delegateChanged = async ( - context: Context, + context: handlerContext, daoId: DaoIdEnum, args: { delegator: Address; @@ -72,17 +53,14 @@ export const delegateChanged = async ( const normalizedDelegator = getAddress(delegator); const normalizedDelegate = getAddress(delegate); - // Ensure all required accounts exist in parallel await ensureAccountsExist(context, [delegator, delegate]); - const delegatorBalance = _delegatorBalance + const delegatorBalanceId = `${normalizedDelegator}-${getAddress(tokenId)}`; + const storedBalance = _delegatorBalance ? { balance: _delegatorBalance } - : await context.db.find(accountBalance, { - accountId: normalizedDelegator, - tokenId: getAddress(tokenId), - }); + : await context.AccountBalance.get(delegatorBalanceId); + const delegatedValue = storedBalance?.balance ?? 0n; - // Pre-compute address lists for flag determination (normalized to checksum) const { cex, dex, lending, burning } = addressSets ?? { cex: createAddressSet(Object.values(CEXAddresses[daoId] || {})), dex: createAddressSet(Object.values(DEXAddresses[daoId] || {})), @@ -90,100 +68,90 @@ export const delegateChanged = async ( burning: createAddressSet(Object.values(BurningAddresses[daoId] || {})), }; - // Determine flags for the delegation const isCex = cex.has(normalizedDelegator) || cex.has(normalizedDelegate); const isDex = dex.has(normalizedDelegator) || dex.has(normalizedDelegate); const isLending = lending.has(normalizedDelegator) || lending.has(normalizedDelegate); - const isBurning = + const isTotal = burning.has(normalizedDelegator) || burning.has(normalizedDelegate); - const isTotal = isBurning; - await context.db - .insert(delegation) - .values({ - transactionHash: txHash, - daoId, - delegateAccountId: normalizedDelegate, - delegatorAccountId: normalizedDelegator, - delegatedValue: delegatorBalance?.balance ?? 0n, - previousDelegate: getAddress(previousDelegate), - timestamp, - logIndex, - isCex, - isDex, - isLending, - isTotal, - }) - .onConflictDoUpdate((current) => ({ - delegatedValue: - current.delegatedValue + (delegatorBalance?.balance ?? 0n), - })); - - await context.db - .insert(accountBalance) - .values({ - accountId: normalizedDelegator, - tokenId: getAddress(tokenId), - delegate: normalizedDelegate, - balance: BigInt(0), - }) - .onConflictDoUpdate({ - delegate: normalizedDelegate, - }); + const delegationId = `${txHash}-${normalizedDelegator}-${normalizedDelegate}`; + const existingDelegation = await context.Delegation.get(delegationId); + context.Delegation.set({ + id: delegationId, + transactionHash: txHash, + daoId, + delegateAccountId: normalizedDelegate, + delegatorAccountId: normalizedDelegator, + delegatedValue: (existingDelegation?.delegatedValue ?? 0n) + delegatedValue, + previousDelegate: getAddress(previousDelegate), + timestamp, + logIndex, + isCex, + isDex, + isLending, + isTotal, + delegationType: undefined, + }); + + // Update delegator's balance record to point to new delegate + const existingBalance = await context.AccountBalance.get(delegatorBalanceId); + context.AccountBalance.set({ + id: delegatorBalanceId, + accountId: normalizedDelegator, + tokenId: getAddress(tokenId), + balance: existingBalance?.balance ?? 0n, + delegate: normalizedDelegate, + }); + // Decrement previous delegate's count if (previousDelegate !== zeroAddress) { - await context.db - .insert(accountPower) - .values({ - accountId: getAddress(previousDelegate), - daoId, - }) - .onConflictDoUpdate((current) => ({ - delegationsCount: Math.max(0, current.delegationsCount - 1), - })); + const prevPowerId = getAddress(previousDelegate); + const prevPower = await context.AccountPower.get(prevPowerId); + context.AccountPower.set({ + id: prevPowerId, + accountId: prevPowerId, + daoId, + votingPower: prevPower?.votingPower ?? 0n, + votesCount: prevPower?.votesCount ?? 0, + proposalsCount: prevPower?.proposalsCount ?? 0, + delegationsCount: Math.max(0, (prevPower?.delegationsCount ?? 0) - 1), + lastVoteTimestamp: prevPower?.lastVoteTimestamp ?? 0n, + }); } - await context.db - .insert(accountPower) - .values({ - accountId: normalizedDelegate, - daoId, - delegationsCount: 1, - }) - .onConflictDoUpdate((current) => ({ - delegationsCount: current.delegationsCount + 1, - })); + // Increment new delegate's count + const delegatePowerId = normalizedDelegate; + const delegatePower = await context.AccountPower.get(delegatePowerId); + context.AccountPower.set({ + id: delegatePowerId, + accountId: normalizedDelegate, + daoId, + votingPower: delegatePower?.votingPower ?? 0n, + votesCount: delegatePower?.votesCount ?? 0, + proposalsCount: delegatePower?.proposalsCount ?? 0, + delegationsCount: (delegatePower?.delegationsCount ?? 0) + 1, + lastVoteTimestamp: delegatePower?.lastVoteTimestamp ?? 0n, + }); - await context.db.insert(feedEvent).values({ + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, txHash, logIndex, - type: "DELEGATION", - value: delegatorBalance?.balance ?? 0n, + eventType: "DELEGATION" as EventType_t, + value: delegatedValue, timestamp, metadata: { delegator: normalizedDelegator, delegate: normalizedDelegate, previousDelegate: getAddress(previousDelegate), - amount: delegatorBalance?.balance ?? 0n, + amount: delegatedValue.toString(), }, }); }; -/** - * ### Creates: - * - New `Account` record (for delegate if it doesn't exist) - * - New `VotingPowerHistory` record with voting power change details - * - New `AccountPower` record (if delegate doesn't have one for this DAO) - * - New daily metric records (via `storeDailyBucket`) - * - * ### Updates: - * - `AccountPower`: Sets the delegate's current voting power to new balance - * - `Token`: Adjusts delegated supply by the balance delta - * - Daily bucket metrics for delegated supply tracking - */ export const delegatedVotesChanged = async ( - context: Context, + context: handlerContext, daoId: DaoIdEnum, args: { delegate: Address; @@ -201,43 +169,43 @@ export const delegatedVotesChanged = async ( await ensureAccountExists(context, delegate); - const delta = newBalance - oldBalance; - const deltaMod = delta > 0n ? delta : -delta; + const diff = newBalance - oldBalance; + const deltaMod = diff > 0n ? diff : -diff; + + context.VotingPowerHistory.set({ + id: `${txHash}-${normalizedDelegate}-${logIndex}`, + daoId, + transactionHash: txHash, + accountId: normalizedDelegate, + votingPower: newBalance, + delta: diff, + deltaMod, + timestamp, + logIndex, + }); - await context.db - .insert(votingPowerHistory) - .values({ - daoId, - transactionHash: txHash, - accountId: normalizedDelegate, - votingPower: newBalance, - delta, - deltaMod, - timestamp, - logIndex, - }) - .onConflictDoNothing(); - - await context.db - .insert(accountPower) - .values({ - accountId: normalizedDelegate, - daoId, - votingPower: newBalance, - }) - .onConflictDoUpdate(() => ({ - votingPower: newBalance, - })); + const existingPower = await context.AccountPower.get(normalizedDelegate); + context.AccountPower.set({ + id: normalizedDelegate, + accountId: normalizedDelegate, + daoId, + votingPower: newBalance, + votesCount: existingPower?.votesCount ?? 0, + proposalsCount: existingPower?.proposalsCount ?? 0, + delegationsCount: existingPower?.delegationsCount ?? 0, + lastVoteTimestamp: existingPower?.lastVoteTimestamp ?? 0n, + }); - await context.db.insert(feedEvent).values({ + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, txHash, logIndex, - type: "DELEGATION_VOTES_CHANGED", + eventType: "DELEGATION_VOTES_CHANGED" as EventType_t, value: deltaMod, timestamp, metadata: { - delta, - deltaMod, + delta: diff.toString(), + deltaMod: deltaMod.toString(), delegate: normalizedDelegate, }, }); diff --git a/apps/indexer/src/eventHandlers/index.ts b/apps/indexer/src/eventHandlers/index.ts index 93374519b..a5e8d7c14 100644 --- a/apps/indexer/src/eventHandlers/index.ts +++ b/apps/indexer/src/eventHandlers/index.ts @@ -1,3 +1,3 @@ -export * from "./transfer"; -export * from "./delegation"; -export * from "./voting"; +export * from "./transfer.ts"; +export * from "./delegation.ts"; +export * from "./voting.ts"; diff --git a/apps/indexer/src/eventHandlers/metrics/circulating.ts b/apps/indexer/src/eventHandlers/metrics/circulating.ts index cce3b94d8..24c7b2479 100644 --- a/apps/indexer/src/eventHandlers/metrics/circulating.ts +++ b/apps/indexer/src/eventHandlers/metrics/circulating.ts @@ -1,32 +1,27 @@ -import { Address, getAddress } from "viem"; -import { token } from "ponder:schema"; -import { Context } from "ponder:registry"; +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../../generated/index.js"; -import { storeDailyBucket } from "../shared"; -import { MetricTypesEnum } from "@/lib/constants"; +import { storeDailyBucket } from "../shared.ts"; +import { MetricTypesEnum } from "../../lib/constants.ts"; export const updateCirculatingSupply = async ( - context: Context, + context: handlerContext, daoId: string, tokenAddress: Address, timestamp: bigint, ) => { - let currentCirculatingSupply = 0n; - let newCirculatingSupply = 0n; - await context.db - .update(token, { id: getAddress(tokenAddress) }) - .set((current) => { - currentCirculatingSupply = current.circulatingSupply; - newCirculatingSupply = - current.totalSupply - current.treasury - current.nonCirculatingSupply; - return { - circulatingSupply: newCirculatingSupply, - }; - }); + const tokenId = getAddress(tokenAddress); + const token = await context.Token.get(tokenId); + if (!token) return false; - if (currentCirculatingSupply === newCirculatingSupply) { - return false; - } + const currentCirculatingSupply = token.circulatingSupply; + const newCirculatingSupply = + token.totalSupply - token.treasury - token.nonCirculatingSupply; + + if (currentCirculatingSupply === newCirculatingSupply) return false; + + context.Token.set({ ...token, circulatingSupply: newCirculatingSupply }); await storeDailyBucket( context, diff --git a/apps/indexer/src/eventHandlers/metrics/delegated.ts b/apps/indexer/src/eventHandlers/metrics/delegated.ts index d41c64f67..675d39025 100644 --- a/apps/indexer/src/eventHandlers/metrics/delegated.ts +++ b/apps/indexer/src/eventHandlers/metrics/delegated.ts @@ -1,28 +1,26 @@ -import { Address, getAddress } from "viem"; -import { token } from "ponder:schema"; -import { Context } from "ponder:registry"; +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../../generated/index.js"; -import { DaoIdEnum } from "@/lib/enums"; -import { MetricTypesEnum } from "@/lib/constants"; -import { storeDailyBucket } from "@/eventHandlers/shared"; +import { DaoIdEnum } from "../../lib/enums.ts"; +import { MetricTypesEnum } from "../../lib/constants.ts"; +import { storeDailyBucket } from "../shared.ts"; export const updateDelegatedSupply = async ( - context: Context, + context: handlerContext, daoId: DaoIdEnum, tokenId: Address, amount: bigint, timestamp: bigint, ) => { - let currentDelegatedSupply = 0n; + const normalizedId = getAddress(tokenId); + const token = await context.Token.get(normalizedId); + if (!token) return; - const { delegatedSupply: newDelegatedSupply } = await context.db - .update(token, { id: getAddress(tokenId) }) - .set((current) => { - currentDelegatedSupply = current.delegatedSupply; - return { - delegatedSupply: current.delegatedSupply + amount, - }; - }); + const currentDelegatedSupply = token.delegatedSupply; + const newDelegatedSupply = currentDelegatedSupply + amount; + + context.Token.set({ ...token, delegatedSupply: newDelegatedSupply }); await storeDailyBucket( context, diff --git a/apps/indexer/src/eventHandlers/metrics/index.ts b/apps/indexer/src/eventHandlers/metrics/index.ts index b6dcd69be..a4126ee00 100644 --- a/apps/indexer/src/eventHandlers/metrics/index.ts +++ b/apps/indexer/src/eventHandlers/metrics/index.ts @@ -1,4 +1,4 @@ -export * from "./delegated"; -export * from "./total"; -export * from "./supply"; -export * from "./circulating"; +export * from "./delegated.ts"; +export * from "./total.ts"; +export * from "./supply.ts"; +export * from "./circulating.ts"; diff --git a/apps/indexer/src/eventHandlers/metrics/supply.ts b/apps/indexer/src/eventHandlers/metrics/supply.ts index 2067b7058..ecac65839 100644 --- a/apps/indexer/src/eventHandlers/metrics/supply.ts +++ b/apps/indexer/src/eventHandlers/metrics/supply.ts @@ -1,12 +1,16 @@ -import { Address, getAddress } from "viem"; -import { token } from "ponder:schema"; -import { Context } from "ponder:registry"; +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../../generated/index.js"; -import { AddressCollection, storeDailyBucket, toAddressSet } from "../shared"; -import { MetricTypesEnum } from "@/lib/constants"; +import { + AddressCollection, + storeDailyBucket, + toAddressSet, +} from "../shared.ts"; +import { MetricTypesEnum } from "../../lib/constants.ts"; export const updateSupplyMetric = async ( - context: Context, + context: handlerContext, supplyField: | "lendingSupply" | "cexSupply" @@ -27,18 +31,16 @@ export const updateSupplyMetric = async ( const isFromRelevant = normalizedAddressList.has(getAddress(from)); if ((isToRelevant || isFromRelevant) && !(isToRelevant && isFromRelevant)) { - let currentSupply: bigint = 0n; - - const { [supplyField]: newSupply } = await context.db - .update(token, { id: getAddress(tokenAddress) }) - .set((current) => { - currentSupply = current[supplyField]; - return { - [supplyField]: isToRelevant - ? current[supplyField] + value - : current[supplyField] - value, - }; - }); + const tokenId = getAddress(tokenAddress); + const token = await context.Token.get(tokenId); + if (!token) return false; + + const currentSupply = token[supplyField]; + const newSupply = isToRelevant + ? currentSupply + value + : currentSupply - value; + + context.Token.set({ ...token, [supplyField]: newSupply }); await storeDailyBucket( context, diff --git a/apps/indexer/src/eventHandlers/metrics/total.ts b/apps/indexer/src/eventHandlers/metrics/total.ts index 7019c9e0e..d80f01a61 100644 --- a/apps/indexer/src/eventHandlers/metrics/total.ts +++ b/apps/indexer/src/eventHandlers/metrics/total.ts @@ -1,17 +1,17 @@ -import { Address, getAddress } from "viem"; -import { token } from "ponder:schema"; -import { Context } from "ponder:registry"; +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../../generated/index.js"; -import { DaoIdEnum } from "@/lib/enums"; -import { MetricTypesEnum } from "@/lib/constants"; +import { DaoIdEnum } from "../../lib/enums.ts"; +import { MetricTypesEnum } from "../../lib/constants.ts"; import { AddressCollection, storeDailyBucket, toAddressSet, -} from "@/eventHandlers/shared"; +} from "../shared.ts"; export const updateTotalSupply = async ( - context: Context, + context: handlerContext, addressList: AddressCollection, metricType: MetricTypesEnum, from: Address, @@ -30,19 +30,16 @@ export const updateTotalSupply = async ( if (isTotalSupplyTransaction) { const isBurningTokens = normalizedAddressList.has(getAddress(to)); - let currentTotalSupply = 0n; - const newTotalSupply = ( - await context.db - .update(token, { id: getAddress(tokenAddress) }) - .set((row) => { - currentTotalSupply = row.totalSupply; - return { - totalSupply: isBurningTokens - ? row.totalSupply - value - : row.totalSupply + value, - }; - }) - ).totalSupply; + const tokenId = getAddress(tokenAddress); + const token = await context.Token.get(tokenId); + if (!token) return false; + + const currentTotalSupply = token.totalSupply; + const newTotalSupply = isBurningTokens + ? currentTotalSupply - value + : currentTotalSupply + value; + + context.Token.set({ ...token, totalSupply: newTotalSupply }); await storeDailyBucket( context, diff --git a/apps/indexer/src/eventHandlers/shared.ts b/apps/indexer/src/eventHandlers/shared.ts index 8efc79295..5771e329b 100644 --- a/apps/indexer/src/eventHandlers/shared.ts +++ b/apps/indexer/src/eventHandlers/shared.ts @@ -1,10 +1,22 @@ -import { Address, getAddress } from "viem"; -import { Context } from "ponder:registry"; -import { account, daoMetricsDayBucket, transaction } from "ponder:schema"; +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../generated/index.js"; +import type { MetricType_t } from "../../generated/src/db/Enums.gen.ts"; -import { MetricTypesEnum } from "@/lib/constants"; -import { delta, max, min } from "@/lib/utils"; -import { truncateTimestampToMidnight } from "@/lib/date-helpers"; +import { MetricTypesEnum } from "../lib/constants.ts"; +import { delta, max, min } from "../lib/utils.ts"; +import { truncateTimestampToMidnight } from "../lib/date-helpers.ts"; + +const METRIC_TYPE_MAP: Record = { + [MetricTypesEnum.TOTAL_SUPPLY]: "total", + [MetricTypesEnum.DELEGATED_SUPPLY]: "delegated", + [MetricTypesEnum.CEX_SUPPLY]: "cex", + [MetricTypesEnum.DEX_SUPPLY]: "dex", + [MetricTypesEnum.LENDING_SUPPLY]: "lending", + [MetricTypesEnum.CIRCULATING_SUPPLY]: "circulating", + [MetricTypesEnum.TREASURY]: "treasury", + [MetricTypesEnum.NON_CIRCULATING_SUPPLY]: "non_circulating", +}; export type AddressCollection = readonly Address[] | ReadonlySet
; @@ -15,7 +27,7 @@ const normalizeAddressCollection = ( return [...new Set(addresses.map((address) => getAddress(address)))]; } - return [...addresses]; + return [...(addresses as ReadonlySet
)]; }; export const createAddressSet = ( @@ -34,37 +46,28 @@ export const toAddressSet = ( }; export const ensureAccountExists = async ( - context: Context, + context: handlerContext, address: Address, ): Promise => { - await context.db - .insert(account) - .values({ - id: getAddress(address), - }) - .onConflictDoNothing(); + await context.Account.getOrCreate({ id: getAddress(address) }); }; /** - * Helper function to ensure multiple accounts exist in parallel + * Helper function to ensure multiple accounts exist */ export const ensureAccountsExist = async ( - context: Context, + context: handlerContext, addresses: Address[], ): Promise => { - const normalizedAddresses = normalizeAddressCollection(addresses); - if (normalizedAddresses.length === 0) { - return; - } - - await context.db - .insert(account) - .values(normalizedAddresses.map((id) => ({ id }))) - .onConflictDoNothing(); + const normalized = normalizeAddressCollection(addresses); + if (normalized.length === 0) return; + await Promise.all( + normalized.map((id) => context.Account.getOrCreate({ id })), + ); }; export const storeDailyBucket = async ( - context: Context, + context: handlerContext, metricType: MetricTypesEnum, currentValue: bigint, newValue: bigint, @@ -72,42 +75,51 @@ export const storeDailyBucket = async ( timestamp: bigint, tokenAddress: Address, ) => { - const volume = delta(newValue, currentValue); - await context.db - .insert(daoMetricsDayBucket) - .values({ - date: BigInt(truncateTimestampToMidnight(Number(timestamp))), - tokenId: getAddress(tokenAddress), - metricType, + const vol = delta(newValue, currentValue); + const date = BigInt(truncateTimestampToMidnight(Number(timestamp))); + const tokenId = getAddress(tokenAddress); + const id = `${date}-${tokenId}-${metricType}`; + + const existing = await context.DaoMetricsDayBucket.get(id); + if (existing) { + context.DaoMetricsDayBucket.set({ + ...existing, + average: + (existing.average * BigInt(existing.count) + newValue) / + BigInt(existing.count + 1), + high: max(newValue, existing.high), + low: min(newValue, existing.low), + closeValue: newValue, + volume: existing.volume + vol, + count: existing.count + 1, + lastUpdate: timestamp, + }); + } else { + context.DaoMetricsDayBucket.set({ + id, + date, + tokenId, + metricType: METRIC_TYPE_MAP[metricType], daoId, average: newValue, - open: newValue, + openValue: newValue, high: newValue, low: newValue, - close: newValue, - volume, + closeValue: newValue, + volume: vol, count: 1, lastUpdate: timestamp, - }) - .onConflictDoUpdate((row) => ({ - average: - (row.average * BigInt(row.count) + newValue) / BigInt(row.count + 1), - high: max(newValue, row.high), - low: min(newValue, row.low), - close: newValue, - volume: row.volume + volume, - count: row.count + 1, - lastUpdate: timestamp, - })); + }); + } }; export const handleTransaction = async ( - context: Context, + context: handlerContext, transactionHash: string, from: Address, to: Address, timestamp: bigint, - addresses: AddressCollection, // The addresses involved in this event + addresses: AddressCollection, { cex = [], dex = [], @@ -118,12 +130,7 @@ export const handleTransaction = async ( dex?: AddressCollection; lending?: AddressCollection; burning?: AddressCollection; - } = { - cex: [], - dex: [], - lending: [], - burning: [], - }, + } = {}, ) => { const normalizedAddresses = normalizeAddressCollection(addresses); const normalizedCex = toAddressSet(cex); @@ -144,22 +151,16 @@ export const handleTransaction = async ( return; } - await context.db - .insert(transaction) - .values({ - transactionHash, - fromAddress: getAddress(from), - toAddress: getAddress(to), - timestamp, - isCex, - isDex, - isLending, - isTotal, - }) - .onConflictDoUpdate((existing) => ({ - isCex: existing.isCex || isCex, - isDex: existing.isDex || isDex, - isLending: existing.isLending || isLending, - isTotal: existing.isTotal || isTotal, - })); + const existing = await context.Transaction.get(transactionHash); + context.Transaction.set({ + id: transactionHash, + transactionHash, + fromAddress: getAddress(from), + toAddress: getAddress(to), + timestamp, + isCex: (existing?.isCex ?? false) || isCex, + isDex: (existing?.isDex ?? false) || isDex, + isLending: (existing?.isLending ?? false) || isLending, + isTotal: (existing?.isTotal ?? false) || isTotal, + }); }; diff --git a/apps/indexer/src/eventHandlers/transfer.ts b/apps/indexer/src/eventHandlers/transfer.ts index d72ed1664..f25566823 100644 --- a/apps/indexer/src/eventHandlers/transfer.ts +++ b/apps/indexer/src/eventHandlers/transfer.ts @@ -1,37 +1,18 @@ -import { Context } from "ponder:registry"; -import { - accountBalance, - balanceHistory, - feedEvent, - transfer, -} from "ponder:schema"; -import { Address, getAddress, Hex, zeroAddress } from "viem"; +import type { handlerContext } from "../../generated/index.js"; +import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; +import type { Address, Hex } from "viem"; +import { getAddress, zeroAddress } from "viem"; -import { DaoIdEnum } from "@/lib/enums"; +import { DaoIdEnum } from "../lib/enums.ts"; -import { AddressCollection, ensureAccountsExist, toAddressSet } from "./shared"; +import { + AddressCollection, + ensureAccountsExist, + toAddressSet, +} from "./shared.ts"; -/** - * ### Creates: - * - New `Account` records (for sender and receiver if they don't exist) - * - New `accountBalance` record (for receiver if it doesn't exist) - * - New `accountBalance` record (for sender if it doesn't exist and not minting) - * - New `transfer` record with transaction details and classification flags - * - New daily metric records for supply tracking (via `updateSupplyMetric` calls) - * - * ### Updates: - * - `accountBalance`: Increments receiver's token balance by transfer value - * - `accountBalance`: Decrements sender's token balance by transfer value (if not minting from zero address) - * - `Token`: Adjusts lending supply based on transfers involving lending addresses - * - `Token`: Adjusts CEX supply based on transfers involving centralized exchange addresses - * - `Token`: Adjusts DEX supply based on transfers involving decentralized exchange addresses - * - `Token`: Adjusts treasury balance based on transfers involving treasury addresses - * - `Token`: Adjusts total supply based on transfers involving burning addresses - * - `Token`: Recalculates circulating supply after all supply changes - * - Daily bucket metrics for all supply types (lending, CEX, DEX, treasury, total, circulating) - */ export const tokenTransfer = async ( - context: Context, + context: handlerContext, daoId: DaoIdEnum, args: { from: Address; @@ -70,58 +51,59 @@ export const tokenTransfer = async ( await ensureAccountsExist(context, [from, to]); - const { balance: currentReceiverBalance } = await context.db - .insert(accountBalance) - .values({ - accountId: normalizedTo, + // Upsert receiver balance and track current balance for history + const receiverBalanceId = `${normalizedTo}-${normalizedTokenId}`; + const existingReceiverBalance = + await context.AccountBalance.get(receiverBalanceId); + const currentReceiverBalance = existingReceiverBalance + ? existingReceiverBalance.balance + value + : value; + context.AccountBalance.set({ + id: receiverBalanceId, + accountId: normalizedTo, + tokenId: normalizedTokenId, + balance: currentReceiverBalance, + delegate: existingReceiverBalance?.delegate ?? zeroAddress, + }); + + context.BalanceHistory.set({ + id: `${transactionHash}-${normalizedTo}-${logIndex}`, + daoId, + transactionHash, + accountId: normalizedTo, + balance: currentReceiverBalance, + delta: value, + deltaMod: value > 0n ? value : -value, + timestamp, + logIndex, + }); + + if (from !== zeroAddress) { + const senderBalanceId = `${normalizedFrom}-${normalizedTokenId}`; + const existingSenderBalance = + await context.AccountBalance.get(senderBalanceId); + const currentSenderBalance = existingSenderBalance + ? existingSenderBalance.balance - value + : -value; + context.AccountBalance.set({ + id: senderBalanceId, + accountId: normalizedFrom, tokenId: normalizedTokenId, - balance: value, - delegate: zeroAddress, - }) - .onConflictDoUpdate((current) => ({ - balance: current.balance + value, - })); + balance: currentSenderBalance, + delegate: existingSenderBalance?.delegate ?? zeroAddress, + }); - await context.db - .insert(balanceHistory) - .values({ + context.BalanceHistory.set({ + id: `${transactionHash}-${normalizedFrom}-${logIndex}`, daoId, - transactionHash: transactionHash, - accountId: normalizedTo, - balance: currentReceiverBalance, - delta: value, + transactionHash, + accountId: normalizedFrom, + balance: currentSenderBalance, + delta: -value, deltaMod: value > 0n ? value : -value, timestamp, logIndex, - }) - .onConflictDoNothing(); - - if (from !== zeroAddress) { - const { balance: currentSenderBalance } = await context.db - .insert(accountBalance) - .values({ - accountId: normalizedFrom, - tokenId: normalizedTokenId, - balance: -value, - delegate: zeroAddress, - }) - .onConflictDoUpdate((current) => ({ - balance: current.balance - value, - })); - - await context.db - .insert(balanceHistory) - .values({ - daoId, - transactionHash: transactionHash, - accountId: normalizedFrom, - balance: currentSenderBalance, - delta: -value, - deltaMod: value > 0n ? value : -value, - timestamp, - logIndex, - }) - .onConflictDoNothing(); + }); } const normalizedCex = toAddressSet(cex); @@ -129,43 +111,39 @@ export const tokenTransfer = async ( const normalizedLending = toAddressSet(lending); const normalizedBurning = toAddressSet(burning); - await context.db - .insert(transfer) - .values({ - transactionHash, - daoId, - tokenId: normalizedTokenId, - amount: value, - fromAccountId: normalizedFrom, - toAccountId: normalizedTo, - timestamp, - logIndex, - isCex: - normalizedCex.has(normalizedFrom) || normalizedCex.has(normalizedTo), - isDex: - normalizedDex.has(normalizedFrom) || normalizedDex.has(normalizedTo), - isLending: - normalizedLending.has(normalizedFrom) || - normalizedLending.has(normalizedTo), - isTotal: - normalizedBurning.has(normalizedFrom) || - normalizedBurning.has(normalizedTo), - }) - .onConflictDoUpdate((current) => ({ - amount: current.amount + value, - })); + const transferId = `${transactionHash}-${normalizedFrom}-${normalizedTo}`; + const existingTransfer = await context.Transfer.get(transferId); + context.Transfer.set({ + id: transferId, + transactionHash, + daoId, + tokenId: normalizedTokenId, + amount: (existingTransfer?.amount ?? 0n) + value, + fromAccountId: normalizedFrom, + toAccountId: normalizedTo, + timestamp, + logIndex, + isCex: normalizedCex.has(normalizedFrom) || normalizedCex.has(normalizedTo), + isDex: normalizedDex.has(normalizedFrom) || normalizedDex.has(normalizedTo), + isLending: + normalizedLending.has(normalizedFrom) || + normalizedLending.has(normalizedTo), + isTotal: + normalizedBurning.has(normalizedFrom) || + normalizedBurning.has(normalizedTo), + }); - // Insert feed event for activity feed - await context.db.insert(feedEvent).values({ + context.FeedEvent.set({ + id: `${transactionHash}-${logIndex}`, txHash: transactionHash, logIndex, - type: "TRANSFER", + eventType: "TRANSFER" as EventType_t, value, timestamp, metadata: { from: normalizedFrom, to: normalizedTo, - amount: value, + amount: value.toString(), }, }); }; diff --git a/apps/indexer/src/eventHandlers/voting.ts b/apps/indexer/src/eventHandlers/voting.ts index c2ff06422..d3b82ad26 100644 --- a/apps/indexer/src/eventHandlers/voting.ts +++ b/apps/indexer/src/eventHandlers/voting.ts @@ -1,32 +1,14 @@ -import { Context } from "ponder:registry"; -import { - accountPower, - feedEvent, - proposalsOnchain, - votesOnchain, -} from "ponder:schema"; -import { Address, getAddress, Hex } from "viem"; +import type { handlerContext } from "../../generated/index.js"; +import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; +import type { Address, Hex } from "viem"; +import { getAddress } from "viem"; -import { ProposalStatus } from "@/lib/constants"; +import { ProposalStatus } from "../lib/constants.ts"; -import { ensureAccountExists } from "./shared"; +import { ensureAccountExists } from "./shared.ts"; -/** - * ### Creates: - * - New `Account` record (for voter if it doesn't exist) - * - New `AccountPower` record (if voter doesn't have one for this DAO) - * - New `votesOnchain` record with vote details (transaction hash, support, voting power, reason) - * - * ### Updates: - * - `AccountPower`: Increments voter's total vote count by 1 - * - `AccountPower`: Sets last vote timestamp to current vote timestamp - * - `AccountPower`: Sets first vote timestamp (only if voter has never voted before) - * - `proposalsOnchain`: Increments `againstVotes` if support is 0 (against) - * - `proposalsOnchain`: Increments `forVotes` if support is 1 (for) - * - `proposalsOnchain`: Increments `abstainVotes` if support is 2 (abstain) - */ export const voteCast = async ( - context: Context, + context: handlerContext, daoId: string, args: { proposalId: string; @@ -52,26 +34,26 @@ export const voteCast = async ( await ensureAccountExists(context, voter); - // Update account power with vote statistics - await context.db - .insert(accountPower) - .values({ - accountId: getAddress(voter), - daoId, - votesCount: 1, - lastVoteTimestamp: timestamp, - }) - .onConflictDoUpdate((current) => ({ - votesCount: current.votesCount + 1, - lastVoteTimestamp: timestamp, - })); + const normalizedVoter = getAddress(voter); + const powerId = normalizedVoter; + const existingPower = await context.AccountPower.get(powerId); + context.AccountPower.set({ + id: powerId, + accountId: normalizedVoter, + daoId, + votingPower: existingPower?.votingPower ?? 0n, + votesCount: (existingPower?.votesCount ?? 0) + 1, + proposalsCount: existingPower?.proposalsCount ?? 0, + delegationsCount: existingPower?.delegationsCount ?? 0, + lastVoteTimestamp: timestamp, + }); - // Create vote record - await context.db.insert(votesOnchain).values({ - txHash: txHash, + context.VoteOnchain.set({ + id: `${normalizedVoter}-${proposalId}`, + txHash, daoId, proposalId, - voterAccountId: getAddress(voter), + voterAccountId: normalizedVoter, support: support.toString(), votingPower, reason, @@ -79,61 +61,49 @@ export const voteCast = async ( }); // Update proposal vote totals - await context.db - .update(proposalsOnchain, { id: proposalId }) - .set((current) => ({ - againstVotes: current.againstVotes + (support === 0 ? votingPower : 0n), - forVotes: current.forVotes + (support === 1 ? votingPower : 0n), - abstainVotes: current.abstainVotes + (support === 2 ? votingPower : 0n), - })); - - const proposal = await context.db.find(proposalsOnchain, { id: proposalId }); + const proposal = await context.ProposalOnchain.get(proposalId); + if (proposal) { + context.ProposalOnchain.set({ + ...proposal, + againstVotes: proposal.againstVotes + (support === 0 ? votingPower : 0n), + forVotes: proposal.forVotes + (support === 1 ? votingPower : 0n), + abstainVotes: proposal.abstainVotes + (support === 2 ? votingPower : 0n), + }); + } - await context.db.insert(feedEvent).values({ + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, txHash, logIndex, - type: "VOTE", + eventType: "VOTE" as EventType_t, value: votingPower, timestamp, metadata: { - voter: getAddress(voter), + voter: normalizedVoter, reason, support, - votingPower, + votingPower: votingPower.toString(), proposalId, - title: proposal?.title ?? undefined, + title: proposal?.title ?? null, }, }); }; const MAX_TITLE_LENGTH = 200; -/** - * Extracts a proposal title from a markdown description. - * - * Strategy: - * 1. Normalize literal `\n` sequences to real newlines (some proposers - * submit descriptions with escaped newlines). - * 2. If the first non-empty line is an H1 (`# Title`), use it. - * 3. Otherwise, use the first non-empty line that is not a section header - * (H2+), truncated to MAX_TITLE_LENGTH characters. - */ function parseProposalTitle(description: string): string { - // Normalize literal "\n" (two chars) into real newlines const normalized = description.replace(/\\n/g, "\n"); const lines = normalized.split("\n"); - // Pass 1: look for an H1 among leading lines (before any content) for (const line of lines) { const trimmed = line.trim(); if (!trimmed) continue; if (/^# /.test(trimmed)) { return trimmed.replace(/^# +/, ""); } - break; // stop at first non-empty, non-H1 line + break; } - // Pass 2: no H1 found — use first non-empty, non-header line for (const line of lines) { const trimmed = line.trim(); if (!trimmed || /^#{1,6}\s/.test(trimmed)) continue; @@ -145,21 +115,8 @@ function parseProposalTitle(description: string): string { return ""; } -/** - * ### Creates: - * - New `Account` record (for proposer if it doesn't exist) - * - New `proposalsOnchain` record with proposal details (targets, values, signatures, calldatas, blocks, description, status) - * - New `AccountPower` record (if proposer doesn't have one for this DAO) - * - * ### Updates: - * - `AccountPower`: Increments proposer's total proposals count by 1 - * - * ### Calculates: - * - Proposal end timestamp based on block delta and average block time - * - Sets initial proposal status to PENDING - */ export const proposalCreated = async ( - context: Context, + context: handlerContext, daoId: string, blockTime: number, args: { @@ -199,13 +156,14 @@ export const proposalCreated = async ( const title = parseProposalTitle(description); const blockDelta = parseInt(endBlock) - Number(blockNumber); - await context.db.insert(proposalsOnchain).values({ + + context.ProposalOnchain.set({ id: proposalId, txHash, daoId, proposerAccountId: getAddress(proposer), targets: targets.map((a) => getAddress(a)), - values, + values: values.map((v) => v.toString()), signatures, calldatas, startBlock: parseInt(startBlock), @@ -217,56 +175,54 @@ export const proposalCreated = async ( status: ProposalStatus.PENDING, endTimestamp: timestamp + BigInt(blockDelta * blockTime), proposalType: args.proposalType, + forVotes: 0n, + againstVotes: 0n, + abstainVotes: 0n, }); - // Update proposer's proposal count - const { votingPower: proposerVotingPower } = await context.db - .insert(accountPower) - .values({ - accountId: getAddress(proposer), - daoId, - proposalsCount: 1, - }) - .onConflictDoUpdate((current) => ({ - proposalsCount: current.proposalsCount + 1, - })); + const powerId = getAddress(proposer); + const existingPower = await context.AccountPower.get(powerId); + const proposerVotingPower = existingPower?.votingPower ?? 0n; + context.AccountPower.set({ + id: powerId, + accountId: powerId, + daoId, + votingPower: proposerVotingPower, + votesCount: existingPower?.votesCount ?? 0, + proposalsCount: (existingPower?.proposalsCount ?? 0) + 1, + delegationsCount: existingPower?.delegationsCount ?? 0, + lastVoteTimestamp: existingPower?.lastVoteTimestamp ?? 0n, + }); - // Insert feed event for activity feed - // Proposals are always high relevance as they are significant governance actions - await context.db.insert(feedEvent).values({ + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, txHash, logIndex, - type: "PROPOSAL", + eventType: "PROPOSAL" as EventType_t, + value: 0n, timestamp, metadata: { id: proposalId, proposer: getAddress(proposer), - votingPower: proposerVotingPower, + votingPower: proposerVotingPower.toString(), title, }, }); }; -/** - * ### Updates: - * - `proposalsOnchain`: Sets the proposal status to the provided status value - */ export const updateProposalStatus = async ( - context: Context, + context: handlerContext, proposalId: string, status: string, ) => { - await context.db.update(proposalsOnchain, { id: proposalId }).set({ - status, - }); + const proposal = await context.ProposalOnchain.get(proposalId); + if (proposal) { + context.ProposalOnchain.set({ ...proposal, status }); + } }; -/** - * ### Updates: - * - `proposalsOnchain`: Sets the new deadline (endBlock) and endTimestamp - */ export const proposalExtended = async ( - context: Context, + context: handlerContext, proposalId: string, blockTime: number, extendedDeadline: bigint, @@ -274,32 +230,32 @@ export const proposalExtended = async ( logIndex: number, timestamp: bigint, ) => { - let endTimestamp: bigint | undefined; + const proposal = await context.ProposalOnchain.get(proposalId); + if (!proposal) return; - await context.db.update(proposalsOnchain, { id: proposalId }).set((row) => { - endTimestamp = - row.endTimestamp + - BigInt((Number(extendedDeadline) - row.endBlock) * blockTime); - return { - row, - endBlock: Number(extendedDeadline), - endTimestamp, - }; - }); + const endTimestamp = + proposal.endTimestamp + + BigInt((Number(extendedDeadline) - proposal.endBlock) * blockTime); - const proposal = await context.db.find(proposalsOnchain, { id: proposalId }); + context.ProposalOnchain.set({ + ...proposal, + endBlock: Number(extendedDeadline), + endTimestamp, + }); - await context.db.insert(feedEvent).values({ + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, txHash, logIndex, - type: "PROPOSAL_EXTENDED", + eventType: "PROPOSAL_EXTENDED" as EventType_t, + value: 0n, timestamp, metadata: { id: proposalId, - title: proposal?.title ?? undefined, + title: proposal.title, endBlock: Number(extendedDeadline), - endTimestamp, - proposer: getAddress(proposal!.proposerAccountId), + endTimestamp: endTimestamp.toString(), + proposer: getAddress(proposal.proposerAccountId), }, }); }; diff --git a/apps/indexer/src/lib/constants.ts b/apps/indexer/src/lib/constants.ts index 0562ff8fa..a01a4773a 100644 --- a/apps/indexer/src/lib/constants.ts +++ b/apps/indexer/src/lib/constants.ts @@ -1,6 +1,7 @@ -import { Address, zeroAddress } from "viem"; +import type { Address } from "viem"; +import { zeroAddress } from "viem"; -import { DaoIdEnum } from "./enums"; +import { DaoIdEnum } from "./enums.ts"; export const CONTRACT_ADDRESSES = { [DaoIdEnum.UNI]: { @@ -874,28 +875,34 @@ export const NonCirculatingAddresses: Record< [DaoIdEnum.SHU]: {}, }; -export enum ProposalStatus { - PENDING = "PENDING", - ACTIVE = "ACTIVE", - CANCELED = "CANCELED", - DEFEATED = "DEFEATED", - SUCCEEDED = "SUCCEEDED", - QUEUED = "QUEUED", - EXPIRED = "EXPIRED", - EXECUTED = "EXECUTED", - VETOED = "VETOED", - NO_QUORUM = "NO_QUORUM", -} +export const ProposalStatus = { + PENDING: "PENDING", + ACTIVE: "ACTIVE", + CANCELED: "CANCELED", + DEFEATED: "DEFEATED", + SUCCEEDED: "SUCCEEDED", + QUEUED: "QUEUED", + EXPIRED: "EXPIRED", + EXECUTED: "EXECUTED", + VETOED: "VETOED", + NO_QUORUM: "NO_QUORUM", +} as const; + +export type ProposalStatus = + (typeof ProposalStatus)[keyof typeof ProposalStatus]; + +export const MetricTypesEnum = { + TOTAL_SUPPLY: "TOTAL_SUPPLY", + DELEGATED_SUPPLY: "DELEGATED_SUPPLY", + CEX_SUPPLY: "CEX_SUPPLY", + DEX_SUPPLY: "DEX_SUPPLY", + LENDING_SUPPLY: "LENDING_SUPPLY", + CIRCULATING_SUPPLY: "CIRCULATING_SUPPLY", + TREASURY: "TREASURY", + NON_CIRCULATING_SUPPLY: "NON_CIRCULATING_SUPPLY", +} as const; -export enum MetricTypesEnum { - TOTAL_SUPPLY = "TOTAL_SUPPLY", - DELEGATED_SUPPLY = "DELEGATED_SUPPLY", - CEX_SUPPLY = "CEX_SUPPLY", - DEX_SUPPLY = "DEX_SUPPLY", - LENDING_SUPPLY = "LENDING_SUPPLY", - CIRCULATING_SUPPLY = "CIRCULATING_SUPPLY", - TREASURY = "TREASURY", - NON_CIRCULATING_SUPPLY = "NON_CIRCULATING_SUPPLY", -} +export type MetricTypesEnum = + (typeof MetricTypesEnum)[keyof typeof MetricTypesEnum]; export const metricTypeArray = Object.values(MetricTypesEnum); diff --git a/apps/indexer/src/lib/date-helpers.ts b/apps/indexer/src/lib/date-helpers.ts index f884e70a6..4ef8104d5 100644 --- a/apps/indexer/src/lib/date-helpers.ts +++ b/apps/indexer/src/lib/date-helpers.ts @@ -2,7 +2,7 @@ * Date and timestamp utilities for time-series data processing. */ -import { SECONDS_IN_DAY } from "./enums"; +import { SECONDS_IN_DAY } from "./enums.ts"; /** * Truncate timestamp (seconds) to midnight UTC diff --git a/apps/indexer/src/lib/enums.ts b/apps/indexer/src/lib/enums.ts index 3580fd6f3..84f040ae6 100644 --- a/apps/indexer/src/lib/enums.ts +++ b/apps/indexer/src/lib/enums.ts @@ -1,19 +1,21 @@ -export enum DaoIdEnum { - AAVE = "AAVE", - UNI = "UNI", - ENS = "ENS", - ARB = "ARB", - OP = "OP", - GTC = "GTC", - NOUNS = "NOUNS", - TEST = "TEST", - SCR = "SCR", - COMP = "COMP", - OBOL = "OBOL", - ZK = "ZK", - SHU = "SHU", - FLUID = "FLUID", - LIL_NOUNS = "LIL_NOUNS", -} +export const DaoIdEnum = { + AAVE: "AAVE", + UNI: "UNI", + ENS: "ENS", + ARB: "ARB", + OP: "OP", + GTC: "GTC", + NOUNS: "NOUNS", + TEST: "TEST", + SCR: "SCR", + COMP: "COMP", + OBOL: "OBOL", + ZK: "ZK", + SHU: "SHU", + FLUID: "FLUID", + LIL_NOUNS: "LIL_NOUNS", +} as const; + +export type DaoIdEnum = (typeof DaoIdEnum)[keyof typeof DaoIdEnum]; export const SECONDS_IN_DAY = 24 * 60 * 60; diff --git a/apps/indexer/src/lib/query-helpers.ts b/apps/indexer/src/lib/query-helpers.ts index 987cbbfa5..0d17b1c81 100644 --- a/apps/indexer/src/lib/query-helpers.ts +++ b/apps/indexer/src/lib/query-helpers.ts @@ -2,7 +2,7 @@ * Query helpers for pagination and data filtering in time-series APIs. */ -import { truncateTimestampToMidnight } from "./date-helpers"; +import { truncateTimestampToMidnight } from "./date-helpers.ts"; /** * Filter data by cutoff date with fallback to last value before cutoff. diff --git a/apps/indexer/src/lib/time-series.ts b/apps/indexer/src/lib/time-series.ts index 5776ca935..0cb1554de 100644 --- a/apps/indexer/src/lib/time-series.ts +++ b/apps/indexer/src/lib/time-series.ts @@ -4,8 +4,8 @@ * Forward-fill: Use the last known value for any missing data points. */ -import { SECONDS_IN_DAY } from "./enums"; -import { truncateTimestampToMidnight } from "./date-helpers"; +import { SECONDS_IN_DAY } from "./enums.ts"; +import { truncateTimestampToMidnight } from "./date-helpers.ts"; /** * Forward-fill sparse data across a master timeline. diff --git a/apps/indexer/tsconfig.json b/apps/indexer/tsconfig.json index c5588ac63..a7a5c1851 100644 --- a/apps/indexer/tsconfig.json +++ b/apps/indexer/tsconfig.json @@ -23,6 +23,17 @@ "@/*": ["./src/*"] } }, - "include": ["./**/*.ts"], + "ts-node": { + "compilerOptions": { + "module": "CommonJS", + "moduleResolution": "node" + } + }, + "include": [ + "src/eventHandlers/**/*.ts", + "src/lib/**/*.ts", + "src/env.ts", + "generated/**/*.ts" + ], "exclude": ["node_modules", "test"] } diff --git a/eslint.config.mjs b/eslint.config.mjs index a0f56dca2..e303951e0 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -31,6 +31,16 @@ export default [ "apps/api-gateway/schema.graphql", "**/storybook-static/**", "**/.storybook/**", + // HyperIndex: ignore generated files and old Ponder files during migration + "apps/indexer/generated/**", + "apps/indexer/src/indexer/**", + "apps/indexer/src/index.ts", + "apps/indexer/src/metrics.ts", + "apps/indexer/src/api/**", + "apps/indexer/ponder.config.ts", + "apps/indexer/ponder.schema.ts", + "apps/indexer/ponder-env.d.ts", + "apps/indexer/config/**", ], }, @@ -126,6 +136,14 @@ export default [ }, }, + // Indexer lib — allow const + type with same name (enum-as-const pattern) + { + files: ["apps/indexer/src/lib/**/*.{js,ts}"], + rules: { + "@typescript-eslint/no-redeclare": "off", + }, + }, + // API mappers - allow Zod schema + type with same name pattern { files: ["apps/api/src/mappers/**/*.{js,ts}"], diff --git a/generated@0.1.0 b/generated@0.1.0 new file mode 100644 index 000000000..e69de29bb diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7ec30b750..f6ff734ef 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -102,7 +102,7 @@ importers: version: 0.31.9 drizzle-orm: specifier: ~0.41.0 - version: 0.41.0(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0) + version: 0.41.0(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0)(postgres@3.4.1) hono: specifier: ^4.7.10 version: 4.12.7 @@ -163,7 +163,7 @@ importers: version: 0.31.9 drizzle-orm: specifier: ^0.45.1 - version: 0.45.1(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0) + version: 0.45.1(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0)(postgres@3.4.1) hono: specifier: ^4.7.10 version: 4.12.7 @@ -288,7 +288,7 @@ importers: version: link:../../packages/graphql-client "@apollo/client": specifier: ^3.13.8 - version: 3.14.0(@types/react@19.2.8)(graphql-ws@6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)))(graphql@16.13.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) + version: 3.14.0(@types/react@19.2.8)(graphql-ws@6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)))(graphql@16.13.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) "@ethersproject/providers": specifier: ^5.8.0 version: 5.8.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) @@ -324,7 +324,7 @@ importers: version: 1.2.8(@types/react-dom@19.2.3(@types/react@19.2.8))(@types/react@19.2.8)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) "@rainbow-me/rainbowkit": specifier: ^2.2.0 - version: 2.2.10(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(typescript@5.9.3)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)) + version: 2.2.10(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(typescript@5.9.3)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)) "@snapshot-labs/snapshot.js": specifier: ^0.12.62 version: 0.12.65(bufferutil@4.0.9)(utf-8-validate@5.0.10) @@ -408,7 +408,7 @@ importers: version: 2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) wagmi: specifier: ^2.12.25 - version: 2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) + version: 2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) zod: specifier: ^3.25.76 version: 3.25.76 @@ -560,7 +560,7 @@ importers: version: 4.12.7 ponder: specifier: ^0.16.2 - version: 0.16.3(@opentelemetry/api@1.9.0)(@types/node@20.19.37)(@types/pg@8.18.0)(bufferutil@4.0.9)(hono@4.12.7)(lightningcss@1.31.1)(terser@5.46.0)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(zod@3.25.76) + version: 0.16.3(@opentelemetry/api@1.9.0)(@types/node@20.19.37)(@types/pg@8.18.0)(bufferutil@4.0.9)(hono@4.12.7)(lightningcss@1.31.1)(postgres@3.4.1)(terser@5.46.0)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(zod@3.25.76) viem: specifier: ^2.37.11 version: 2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) @@ -577,6 +577,9 @@ importers: dotenv: specifier: ^16.5.0 version: 16.6.1 + envio: + specifier: ^2.32.12 + version: 2.32.12(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) eslint: specifier: ^9 version: 9.39.4(jiti@2.6.1) @@ -609,7 +612,7 @@ importers: version: 1.13.6 drizzle-orm: specifier: ^0.45.1 - version: 0.45.1(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0) + version: 0.45.1(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0)(postgres@3.4.1) pg: specifier: ^8.17.2 version: 8.20.0 @@ -689,7 +692,7 @@ importers: version: 4.9.6 forge-std: specifier: github:foundry-rs/forge-std - version: https://codeload.github.com/foundry-rs/forge-std/tar.gz/4540e4aadda88eeb19a54d2b5ad2117c2c7632ec + version: https://codeload.github.com/foundry-rs/forge-std/tar.gz/f494b0c2c045dda3df3d761bc82209b9a015c4e7 packages/observability: dependencies: @@ -756,6 +759,12 @@ packages: integrity: sha512-VQKMkwriZbaOgVCby1UDY/LDk5fIjhQicCvVPFqfe+69fWaPWydbWJ3wRt59/YzIwda1I81loas3oCoHxnqvdA==, } + "@adraffy/ens-normalize@1.10.0": + resolution: + { + integrity: sha512-nA9XHtlAkYfJxY7bce8DcN7eKxWWCWkU+1GR9d+U6MbNpfwQp8TI7vqOsBsMcHoT4mBu2kypKoSKnghEzOOq5Q==, + } + "@adraffy/ens-normalize@1.10.1": resolution: { @@ -2199,6 +2208,20 @@ packages: peerDependencies: "@noble/ciphers": ^1.0.0 + "@elastic/ecs-helpers@1.1.0": + resolution: + { + integrity: sha512-MDLb2aFeGjg46O5mLpdCzT5yOUDnXToJSrco2ShqGIXxNJaM8uJjX+4nd+hRYV4Vex8YJyDtOFEVBldQct6ndg==, + } + engines: { node: ">=10" } + + "@elastic/ecs-pino-format@1.4.0": + resolution: + { + integrity: sha512-eCSBUTgl8KbPyxky8cecDRLCYu2C1oFV4AZ72bEsI+TxXEvaljaL2kgttfzfu7gW+M89eCz55s49uF2t+YMTWA==, + } + engines: { node: ">=10" } + "@electric-sql/pglite@0.2.13": resolution: { @@ -2289,6 +2312,119 @@ packages: } engines: { node: ">=18.0.0" } + "@envio-dev/hyperfuel-client-darwin-arm64@1.2.2": + resolution: + { + integrity: sha512-eQyd9kJCIz/4WCTjkjpQg80DA3pdneHP7qhJIVQ2ZG+Jew9o5XDG+uI0Y16AgGzZ6KGmJSJF6wyUaaAjJfbO1Q==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [darwin] + + "@envio-dev/hyperfuel-client-darwin-x64@1.2.2": + resolution: + { + integrity: sha512-l7lRMSoyIiIvKZgQPfgqg7H1xnrQ37A8yUp4S2ys47R8f/wSCSrmMaY1u7n6CxVYCpR9fajwy0/356UgwwhVKw==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [darwin] + + "@envio-dev/hyperfuel-client-linux-arm64-gnu@1.2.2": + resolution: + { + integrity: sha512-kNiC/1fKuXnoSxp8yEsloDw4Ot/mIcNoYYGLl2CipSIpBtSuiBH5nb6eBcxnRZdKOwf5dKZtZ7MVPL9qJocNJw==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [linux] + + "@envio-dev/hyperfuel-client-linux-x64-gnu@1.2.2": + resolution: + { + integrity: sha512-XDkvkBG/frS+xiZkJdY4KqOaoAwyxPdi2MysDQgF8NmZdssi32SWch0r4LTqKWLLlCBg9/R55POeXL5UAjg2wQ==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@envio-dev/hyperfuel-client-linux-x64-musl@1.2.2": + resolution: + { + integrity: sha512-DKnKJJSwsYtA7YT0EFGhFB5Eqoo42X0l0vZBv4lDuxngEXiiNjeLemXoKQVDzhcbILD7eyXNa5jWUc+2hpmkEg==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@envio-dev/hyperfuel-client-win32-x64-msvc@1.2.2": + resolution: + { + integrity: sha512-SwIgTAVM9QhCFPyHwL+e1yQ6o3paV6q25klESkXw+r/KW9QPhOOyA6Yr8nfnur3uqMTLJHAKHTLUnkyi/Nh7Aw==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [win32] + + "@envio-dev/hyperfuel-client@1.2.2": + resolution: + { + integrity: sha512-raKA6DshYSle0sAOHBV1OkSRFMN+Mkz8sFiMmS3k+m5nP6pP56E17CRRePBL5qmR6ZgSEvGOz/44QUiKNkK9Pg==, + } + engines: { node: ">= 10" } + + "@envio-dev/hypersync-client-darwin-arm64@1.3.0": + resolution: + { + integrity: sha512-JZwiVRbMSuJnKsVUpfjTHc3YgAMvGlyuqWQxVc7Eok4Xp/sZLUCXRQUykbCh6fOUWRmoa2JG/ykP/NotoTRCBg==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [darwin] + + "@envio-dev/hypersync-client-darwin-x64@1.3.0": + resolution: + { + integrity: sha512-2eSzQqqqFBMK2enVucYGcny5Ep4DEKYxf3Xme7z9qp2d3c6fMcbVvM4Gt8KOzb7ySjwJ2gU+qY2h545T2NiJXQ==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [darwin] + + "@envio-dev/hypersync-client-linux-arm64-gnu@1.3.0": + resolution: + { + integrity: sha512-gsjMp3WKekwnA89HvJXvcTM3BE5wVFG/qTF4rmk3rGiXhZ+MGaZQKrYRAhnzQZblueFtF/xnnBYpO35Z3ZFThg==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [linux] + + "@envio-dev/hypersync-client-linux-x64-gnu@1.3.0": + resolution: + { + integrity: sha512-Lkvi4lRVwCyFOXf9LYH2X91zmW2l1vbfojKhTwKgqFWv6PMN5atlYjt+/NcUCAAhk5EUavWGjoikwnvLp870cg==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@envio-dev/hypersync-client-linux-x64-musl@1.3.0": + resolution: + { + integrity: sha512-UIjB/gUX2sl23EMXLBxqtkgMnOjNSiaHK+CSU5vXMXkzL3fOGbz24bvyaPsSv82cxCFEE0yTwlSKkCX6/L8o6Q==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@envio-dev/hypersync-client@1.3.0": + resolution: + { + integrity: sha512-wUdfZzbsFPbGq6n/1mmUMsWuiAil+m+fL/GBX5LGUyMJV86TXy2SBtAqYYNyDxWLO6gvGr6PYKrP8pLVAUZDZg==, + } + engines: { node: ">= 10" } + "@esbuild-kit/core-utils@3.3.2": resolution: { @@ -5442,6 +5578,12 @@ packages: integrity: sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==, } + "@noble/curves@1.4.0": + resolution: + { + integrity: sha512-p+4cb332SFCrReJkCYe8Xzm0OWi4Jji5jVdIZRL/PmacmDkFNw6MrrV+gGpiPxLHbV+zKFRywUWbaseT+tZRXg==, + } + "@noble/curves@1.4.2": resolution: { @@ -9182,6 +9324,20 @@ packages: zod: optional: true + abitype@1.0.5: + resolution: + { + integrity: sha512-YzDhti7cjlfaBhHutMaboYB21Ha3rXR9QTkNJFzYC4kC8YclaiwPBBBJY8ejFdu2wnJeZCVZSMlQJ7fi8S6hsw==, + } + peerDependencies: + typescript: ">=5.0.4" + zod: ^3 >=3.22.0 + peerDependenciesMeta: + typescript: + optional: true + zod: + optional: true + abitype@1.0.6: resolution: { @@ -9835,6 +9991,12 @@ packages: integrity: sha512-y/ie+Faknx7sZA5MfGA2xKlu0GDv8RWrXGsmlteyJQ2lvoKv9GBK/fpRMc2qlSoBAgNxrixICFCBefIq8WCQpQ==, } + bignumber.js@9.1.2: + resolution: + { + integrity: sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==, + } + bintrees@1.0.2: resolution: { @@ -11767,6 +11929,45 @@ packages: } engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 } + envio-darwin-arm64@2.32.12: + resolution: + { + integrity: sha512-TLs9jjXUHVqKcBReMHgD7C06lbfWfnMkit3uT55XmgiJYc8zS85T0XmDCnCX4BRbZN7uzMNORqnUc2J3/LR9sQ==, + } + cpu: [arm64] + os: [darwin] + + envio-darwin-x64@2.32.12: + resolution: + { + integrity: sha512-JfKU3LaqxO/aabEAIvpHGKhDGNEiVGvcmmi98cZfG1/vP4S5lO+8KDEp563CaB986N6KtGJRKnDWivvCsseZMw==, + } + cpu: [x64] + os: [darwin] + + envio-linux-arm64@2.32.12: + resolution: + { + integrity: sha512-3sBfuR6JLcAkrFcoEfw2WiaPU3VyXGy4kf26HB5BJE/iJUqha+wHoDbv46MfFGuaC0QyM34QvlG0yGRES0ohPw==, + } + cpu: [arm64] + os: [linux] + + envio-linux-x64@2.32.12: + resolution: + { + integrity: sha512-886q+yztKVrhgkwOfoFKARDStbjk1032YBtA6tqrCN8uWjqgzAf30ZDPurJGlq26hQqYNKRp2LhgxChpivsvFw==, + } + cpu: [x64] + os: [linux] + + envio@2.32.12: + resolution: + { + integrity: sha512-bk9y/AjU+kYxO1a9c/jg8RFDrKKKWU0wCffnwtoXo7KGKmPDKq1WyNzVw6sTeboSfGB0i82hJ97WgSAwRAnR1Q==, + } + hasBin: true + environment@1.1.0: resolution: { @@ -12282,6 +12483,20 @@ packages: } engines: { node: ">=0.8.x" } + eventsource-parser@3.0.6: + resolution: + { + integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==, + } + engines: { node: ">=18.0.0" } + + eventsource@4.1.0: + resolution: + { + integrity: sha512-2GuF51iuHX6A9xdTccMTsNb7VO0lHZihApxhvQzJB5A03DvHDd2FQepodbMaztPBmBcE/ox7o2gqaxGhYB9LhQ==, + } + engines: { node: ">=20.0.0" } + evp_bytestokey@1.0.3: resolution: { @@ -12344,6 +12559,12 @@ packages: } engines: { node: "> 0.1.90" } + fast-copy@3.0.2: + resolution: + { + integrity: sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ==, + } + fast-copy@4.0.2: resolution: { @@ -12401,6 +12622,13 @@ packages: integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==, } + fast-json-stringify@2.7.13: + resolution: + { + integrity: sha512-ar+hQ4+OIurUGjSJD1anvYSDcUflywhKjfxnsW4TBTD7+u0tJufv6DKRWoQk3vI6YBOWMoz0TQtfbe7dxbQmvA==, + } + engines: { node: ">= 10.0.0" } + fast-json-stringify@5.16.1: resolution: { @@ -12660,10 +12888,10 @@ packages: } engines: { node: ">=14" } - forge-std@https://codeload.github.com/foundry-rs/forge-std/tar.gz/4540e4aadda88eeb19a54d2b5ad2117c2c7632ec: + forge-std@https://codeload.github.com/foundry-rs/forge-std/tar.gz/f494b0c2c045dda3df3d761bc82209b9a015c4e7: resolution: { - tarball: https://codeload.github.com/foundry-rs/forge-std/tar.gz/4540e4aadda88eeb19a54d2b5ad2117c2c7632ec, + tarball: https://codeload.github.com/foundry-rs/forge-std/tar.gz/f494b0c2c045dda3df3d761bc82209b9a015c4e7, } version: 1.15.0 @@ -12892,6 +13120,14 @@ packages: } deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me + glob@8.1.0: + resolution: + { + integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==, + } + engines: { node: ">=12" } + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me + global-directory@4.0.1: resolution: { @@ -13195,6 +13431,12 @@ packages: integrity: sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ==, } + help-me@4.2.0: + resolution: + { + integrity: sha512-TAOnTB8Tz5Dw8penUuzHVrKNKlCIbwwbHnXraNJxPwf8LRtE2HlM84RYuezMFcwOJmoYOCWVDyJ8TQGxn9PgxA==, + } + help-me@5.0.0: resolution: { @@ -13924,6 +14166,14 @@ packages: peerDependencies: ws: "*" + isows@1.0.4: + resolution: + { + integrity: sha512-hEzjY+x9u9hPmBom9IIAqdJCwNLax+xrPb51vEPpERoFlIxgmZcHzsT5jKG06nvInKOBGvReAVz80Umed5CczQ==, + } + peerDependencies: + ws: "*" + isows@1.0.6: resolution: { @@ -15225,6 +15475,13 @@ packages: integrity: sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==, } + minimatch@5.1.9: + resolution: + { + integrity: sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==, + } + engines: { node: ">=10" } + minimatch@9.0.9: resolution: { @@ -16223,6 +16480,12 @@ packages: integrity: sha512-+KAgmVeqXYbTtU2FScx1XS3kNyfZ5TrXY07V96QnUSFqo2gAqlvmaxH67Lj7SWazqsMabf+58ctdTcBgnOLUOQ==, } + pino-abstract-transport@1.1.0: + resolution: + { + integrity: sha512-lsleG3/2a/JIWUtf9Q5gUNErBqwIu1tUKTT3dUzaf5DySw9ra1wcqKjJjLX1VTY64Wk1eEOYsVGSaGfCK85ekA==, + } + pino-abstract-transport@1.2.0: resolution: { @@ -16255,6 +16518,13 @@ packages: peerDependencies: pino: ^8.21.0 || ^9.0.0 + pino-pretty@10.2.3: + resolution: + { + integrity: sha512-4jfIUc8TC1GPUfDyMSlW1STeORqkoxec71yhxIpLDQapUu8WOuoz2TTCoidrIssyz78LZC69whBMPIKCMbi3cw==, + } + hasBin: true + pino-pretty@13.1.3: resolution: { @@ -16287,6 +16557,13 @@ packages: } hasBin: true + pino@8.16.1: + resolution: + { + integrity: sha512-3bKsVhBmgPjGV9pyn4fO/8RtoVDR8ssW1ev819FsRXlRNgW8gR/9Kx+gCK4UPWd4JjrRDLWpzd/pb1AyWm3MGA==, + } + hasBin: true + pino@8.21.0: resolution: { @@ -16556,6 +16833,13 @@ packages: } engines: { node: ">=0.10.0" } + postgres@3.4.1: + resolution: + { + integrity: sha512-Wasjv6WEzrZXbwKByR2RGD7MBfj7VBqco3hYWz8ifzSAp6tb2L6MlmcKFzkmgV1jT7/vKlcSa+lxXZeTdeVMzQ==, + } + engines: { node: ">=12" } + preact@10.24.2: resolution: { @@ -16694,6 +16978,12 @@ packages: integrity: sha512-du4wfLyj4yCZq1VupnVSZmRsPJsNuxoDQFdCFHLaYiEbFBD7QE0a+I4D7hOxrVnh78QE/YipFAj9lXHiXocV+Q==, } + process-warning@2.3.2: + resolution: + { + integrity: sha512-n9wh8tvBe5sFmsqlg+XQhaQLumwpqoAUruLwjCopgTmUBjJ/fjtBsJzKleCaIGBOMXYEhp1YfKl4d7rJ5ZKJGA==, + } + process-warning@3.0.0: resolution: { @@ -16713,6 +17003,13 @@ packages: } engines: { node: ">= 0.6.0" } + prom-client@15.0.0: + resolution: + { + integrity: sha512-UocpgIrKyA2TKLVZDSfm8rGkL13C19YrQBAiG3xo3aDFWcHedxRxI3z+cIcucoxpSO0h5lff5iv/SXoxyeopeA==, + } + engines: { node: ^16 || ^18 || >=20 } + prom-client@15.1.3: resolution: { @@ -17302,6 +17599,25 @@ packages: integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==, } + rescript-schema@9.3.0: + resolution: + { + integrity: sha512-NiHAjlhFKZCmNhx/Ij40YltCEJJgVNhBWTN/ZfagTg5hdWWuvCiUacxZv+Q/QQolrAhTnHnCrL7RDvZBogHl5A==, + } + peerDependencies: + rescript: 11.x + peerDependenciesMeta: + rescript: + optional: true + + rescript@11.1.3: + resolution: + { + integrity: sha512-bI+yxDcwsv7qE34zLuXeO8Qkc2+1ng5ErlSjnUIZdrAWKoGzHXpJ6ZxiiRBUoYnoMsgRwhqvrugIFyNgWasmsw==, + } + engines: { node: ">=10" } + hasBin: true + resend@6.9.3: resolution: { @@ -17599,6 +17915,12 @@ packages: integrity: sha512-MuCAyrGZcTLfQoH2XoBlQ8C6bzwN88XT/0slOGz0pn8+gIP85BOAfYa44ZXQUTOwRwPU0QvgU+V+OSajl/59Xg==, } + secure-json-parse@2.7.0: + resolution: + { + integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==, + } + secure-json-parse@4.1.0: resolution: { @@ -18057,6 +18379,13 @@ packages: } engines: { node: ">=10" } + string-similarity@4.0.4: + resolution: + { + integrity: sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==, + } + deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. + string-width@4.2.3: resolution: { @@ -19486,6 +19815,17 @@ packages: integrity: sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ==, } + viem@2.21.0: + resolution: + { + integrity: sha512-9g3Gw2nOU6t4bNuoDI5vwVExzIxseU0J7Jjx10gA2RNQVrytIrLxggW++tWEe3w4mnnm/pS1WgZFjQ/QKf/nHw==, + } + peerDependencies: + typescript: ">=5.0.4" + peerDependenciesMeta: + typescript: + optional: true + viem@2.23.2: resolution: { @@ -19734,6 +20074,12 @@ packages: } engines: { node: ">= 8" } + webauthn-p256@0.0.5: + resolution: + { + integrity: sha512-drMGNWKdaixZNobeORVIqq7k5DsRC9FnG201K2QjeOoQLmtSDaSsVZdkg6n5jUALJKcAG++zBPJXmv6hy0nWFg==, + } + webextension-polyfill@0.10.0: resolution: { @@ -20289,6 +20635,8 @@ packages: snapshots: "@adobe/css-tools@4.4.3": {} + "@adraffy/ens-normalize@1.10.0": {} + "@adraffy/ens-normalize@1.10.1": {} "@adraffy/ens-normalize@1.11.1": {} @@ -20300,7 +20648,7 @@ snapshots: "@jridgewell/gen-mapping": 0.3.13 "@jridgewell/trace-mapping": 0.3.31 - "@apollo/client@3.14.0(@types/react@19.2.8)(graphql-ws@6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)))(graphql@16.13.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)": + "@apollo/client@3.14.0(@types/react@19.2.8)(graphql-ws@6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)))(graphql@16.13.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)": dependencies: "@graphql-typed-document-node/core": 3.2.0(graphql@16.13.1) "@wry/caches": 1.0.1 @@ -20317,7 +20665,7 @@ snapshots: tslib: 2.8.1 zen-observable-ts: 1.2.5 optionalDependencies: - graphql-ws: 6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + graphql-ws: 6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) react: 19.2.3 react-dom: 19.2.3(react@19.2.3) transitivePeerDependencies: @@ -21443,9 +21791,9 @@ snapshots: "@babel/helper-string-parser": 7.27.1 "@babel/helper-validator-identifier": 7.28.5 - "@base-org/account@2.4.0(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)": + "@base-org/account@2.4.0(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)": dependencies: - "@coinbase/cdp-sdk": 1.38.6(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(utf-8-validate@5.0.10)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + "@coinbase/cdp-sdk": 1.38.6(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(utf-8-validate@5.0.10)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) "@noble/hashes": 1.4.0 clsx: 1.2.1 eventemitter3: 5.0.1 @@ -21484,11 +21832,11 @@ snapshots: - "@chromatic-com/cypress" - "@chromatic-com/playwright" - "@coinbase/cdp-sdk@1.38.6(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(utf-8-validate@5.0.10)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))": + "@coinbase/cdp-sdk@1.38.6(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(utf-8-validate@5.0.10)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))": dependencies: - "@solana-program/system": 0.8.1(@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))) - "@solana-program/token": 0.6.0(@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))) - "@solana/kit": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + "@solana-program/system": 0.8.1(@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))) + "@solana-program/token": 0.6.0(@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))) + "@solana/kit": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) "@solana/web3.js": 1.98.4(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10) abitype: 1.0.6(typescript@5.9.3)(zod@3.25.76) axios: 1.13.6 @@ -21685,6 +22033,14 @@ snapshots: dependencies: "@noble/ciphers": 1.3.0 + "@elastic/ecs-helpers@1.1.0": + dependencies: + fast-json-stringify: 2.7.13 + + "@elastic/ecs-pino-format@1.4.0": + dependencies: + "@elastic/ecs-helpers": 1.1.0 + "@electric-sql/pglite@0.2.13": {} "@electric-sql/pglite@0.3.16": {} @@ -21748,6 +22104,56 @@ snapshots: "@whatwg-node/promise-helpers": 1.3.2 tslib: 2.8.1 + "@envio-dev/hyperfuel-client-darwin-arm64@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-darwin-x64@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-linux-arm64-gnu@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-linux-x64-gnu@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-linux-x64-musl@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-win32-x64-msvc@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client@1.2.2": + optionalDependencies: + "@envio-dev/hyperfuel-client-darwin-arm64": 1.2.2 + "@envio-dev/hyperfuel-client-darwin-x64": 1.2.2 + "@envio-dev/hyperfuel-client-linux-arm64-gnu": 1.2.2 + "@envio-dev/hyperfuel-client-linux-x64-gnu": 1.2.2 + "@envio-dev/hyperfuel-client-linux-x64-musl": 1.2.2 + "@envio-dev/hyperfuel-client-win32-x64-msvc": 1.2.2 + + "@envio-dev/hypersync-client-darwin-arm64@1.3.0": + optional: true + + "@envio-dev/hypersync-client-darwin-x64@1.3.0": + optional: true + + "@envio-dev/hypersync-client-linux-arm64-gnu@1.3.0": + optional: true + + "@envio-dev/hypersync-client-linux-x64-gnu@1.3.0": + optional: true + + "@envio-dev/hypersync-client-linux-x64-musl@1.3.0": + optional: true + + "@envio-dev/hypersync-client@1.3.0": + optionalDependencies: + "@envio-dev/hypersync-client-darwin-arm64": 1.3.0 + "@envio-dev/hypersync-client-darwin-x64": 1.3.0 + "@envio-dev/hypersync-client-linux-arm64-gnu": 1.3.0 + "@envio-dev/hypersync-client-linux-x64-gnu": 1.3.0 + "@envio-dev/hypersync-client-linux-x64-musl": 1.3.0 + "@esbuild-kit/core-utils@3.3.2": dependencies: esbuild: 0.18.20 @@ -24552,6 +24958,10 @@ snapshots: dependencies: "@noble/hashes": 1.3.2 + "@noble/curves@1.4.0": + dependencies: + "@noble/hashes": 1.4.0 + "@noble/curves@1.4.2": dependencies: "@noble/hashes": 1.4.0 @@ -25421,7 +25831,7 @@ snapshots: node-fetch: 3.3.2 tar: 6.2.1 - "@rainbow-me/rainbowkit@2.2.10(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(typescript@5.9.3)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76))": + "@rainbow-me/rainbowkit@2.2.10(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(typescript@5.9.3)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76))": dependencies: "@tanstack/react-query": 5.90.21(react@19.2.3) "@vanilla-extract/css": 1.17.3 @@ -25434,7 +25844,7 @@ snapshots: react-remove-scroll: 2.6.2(@types/react@19.2.8)(react@19.2.3) ua-parser-js: 1.0.40 viem: 2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) - wagmi: 2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) + wagmi: 2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) transitivePeerDependencies: - "@types/react" - babel-plugin-macros @@ -25867,13 +26277,13 @@ snapshots: "@socket.io/component-emitter@3.1.2": {} - "@solana-program/system@0.8.1(@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)))": + "@solana-program/system@0.8.1(@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)))": dependencies: - "@solana/kit": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + "@solana/kit": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) - "@solana-program/token@0.6.0(@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)))": + "@solana-program/token@0.6.0(@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)))": dependencies: - "@solana/kit": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + "@solana/kit": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) "@solana/accounts@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)": dependencies: @@ -26003,7 +26413,7 @@ snapshots: transitivePeerDependencies: - fastestsmallesttextencoderdecoder - "@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))": + "@solana/kit@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))": dependencies: "@solana/accounts": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/addresses": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) @@ -26017,11 +26427,11 @@ snapshots: "@solana/rpc": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/rpc-parsed-types": 3.0.3(typescript@5.9.3) "@solana/rpc-spec-types": 3.0.3(typescript@5.9.3) - "@solana/rpc-subscriptions": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + "@solana/rpc-subscriptions": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) "@solana/rpc-types": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/signers": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/sysvars": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) - "@solana/transaction-confirmation": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + "@solana/transaction-confirmation": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) "@solana/transaction-messages": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/transactions": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) typescript: 5.9.3 @@ -26100,14 +26510,14 @@ snapshots: transitivePeerDependencies: - fastestsmallesttextencoderdecoder - "@solana/rpc-subscriptions-channel-websocket@3.0.3(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))": + "@solana/rpc-subscriptions-channel-websocket@3.0.3(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))": dependencies: "@solana/errors": 3.0.3(typescript@5.9.3) "@solana/functional": 3.0.3(typescript@5.9.3) "@solana/rpc-subscriptions-spec": 3.0.3(typescript@5.9.3) "@solana/subscribable": 3.0.3(typescript@5.9.3) typescript: 5.9.3 - ws: 7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10) + ws: 8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) "@solana/rpc-subscriptions-spec@3.0.3(typescript@5.9.3)": dependencies: @@ -26117,7 +26527,7 @@ snapshots: "@solana/subscribable": 3.0.3(typescript@5.9.3) typescript: 5.9.3 - "@solana/rpc-subscriptions@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))": + "@solana/rpc-subscriptions@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))": dependencies: "@solana/errors": 3.0.3(typescript@5.9.3) "@solana/fast-stable-stringify": 3.0.3(typescript@5.9.3) @@ -26125,7 +26535,7 @@ snapshots: "@solana/promises": 3.0.3(typescript@5.9.3) "@solana/rpc-spec-types": 3.0.3(typescript@5.9.3) "@solana/rpc-subscriptions-api": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) - "@solana/rpc-subscriptions-channel-websocket": 3.0.3(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + "@solana/rpc-subscriptions-channel-websocket": 3.0.3(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) "@solana/rpc-subscriptions-spec": 3.0.3(typescript@5.9.3) "@solana/rpc-transformers": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/rpc-types": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) @@ -26210,7 +26620,7 @@ snapshots: transitivePeerDependencies: - fastestsmallesttextencoderdecoder - "@solana/transaction-confirmation@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))": + "@solana/transaction-confirmation@3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))": dependencies: "@solana/addresses": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/codecs-strings": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) @@ -26218,7 +26628,7 @@ snapshots: "@solana/keys": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/promises": 3.0.3(typescript@5.9.3) "@solana/rpc": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) - "@solana/rpc-subscriptions": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + "@solana/rpc-subscriptions": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) "@solana/rpc-types": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/transaction-messages": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) "@solana/transactions": 3.0.3(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.9.3) @@ -27260,9 +27670,9 @@ snapshots: convert-source-map: 2.0.0 tinyrainbow: 3.0.3 - "@wagmi/connectors@6.2.0(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(@wagmi/core@2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)))(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)": + "@wagmi/connectors@6.2.0(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(@wagmi/core@2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)))(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)": dependencies: - "@base-org/account": 2.4.0(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) + "@base-org/account": 2.4.0(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) "@coinbase/wallet-sdk": 4.3.6(@types/react@19.2.8)(bufferutil@4.0.9)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(zod@3.25.76) "@gemini-wallet/core": 0.3.2(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)) "@metamask/sdk": 0.33.1(bufferutil@4.0.9)(utf-8-validate@5.0.10) @@ -27271,7 +27681,7 @@ snapshots: "@wagmi/core": 2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)) "@walletconnect/ethereum-provider": 2.21.1(@types/react@19.2.8)(bufferutil@4.0.9)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) cbw-sdk: "@coinbase/wallet-sdk@3.9.3" - porto: 0.2.35(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(@wagmi/core@2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)))(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)) + porto: 0.2.35(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(@wagmi/core@2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)))(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)) viem: 2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) optionalDependencies: typescript: 5.9.3 @@ -28024,6 +28434,11 @@ snapshots: typescript: 5.9.3 zod: 3.25.76 + abitype@1.0.5(typescript@5.9.3)(zod@3.25.76): + optionalDependencies: + typescript: 5.9.3 + zod: 3.25.76 + abitype@1.0.6(typescript@5.9.3)(zod@3.25.76): optionalDependencies: typescript: 5.9.3 @@ -28482,6 +28897,8 @@ snapshots: big.js@6.2.2: {} + bignumber.js@9.1.2: {} + bintrees@1.0.2: {} bl@4.1.0: @@ -29383,29 +29800,32 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.41.0(@electric-sql/pglite@0.2.13)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0): + drizzle-orm@0.41.0(@electric-sql/pglite@0.2.13)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0)(postgres@3.4.1): optionalDependencies: "@electric-sql/pglite": 0.2.13 "@opentelemetry/api": 1.9.0 "@types/pg": 8.18.0 kysely: 0.26.3 pg: 8.20.0 + postgres: 3.4.1 - drizzle-orm@0.41.0(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0): + drizzle-orm@0.41.0(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0)(postgres@3.4.1): optionalDependencies: "@electric-sql/pglite": 0.3.16 "@opentelemetry/api": 1.9.0 "@types/pg": 8.18.0 kysely: 0.26.3 pg: 8.20.0 + postgres: 3.4.1 - drizzle-orm@0.45.1(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0): + drizzle-orm@0.45.1(@electric-sql/pglite@0.3.16)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0)(postgres@3.4.1): optionalDependencies: "@electric-sql/pglite": 0.3.16 "@opentelemetry/api": 1.9.0 "@types/pg": 8.18.0 kysely: 0.26.3 pg: 8.20.0 + postgres: 3.4.1 dset@3.1.4: {} @@ -29497,6 +29917,42 @@ snapshots: env-paths@3.0.0: {} + envio-darwin-arm64@2.32.12: + optional: true + + envio-darwin-x64@2.32.12: + optional: true + + envio-linux-arm64@2.32.12: + optional: true + + envio-linux-x64@2.32.12: + optional: true + + envio@2.32.12(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76): + dependencies: + "@elastic/ecs-pino-format": 1.4.0 + "@envio-dev/hyperfuel-client": 1.2.2 + "@envio-dev/hypersync-client": 1.3.0 + bignumber.js: 9.1.2 + eventsource: 4.1.0 + pino: 8.16.1 + pino-pretty: 10.2.3 + prom-client: 15.0.0 + rescript: 11.1.3 + rescript-schema: 9.3.0(rescript@11.1.3) + viem: 2.21.0(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) + optionalDependencies: + envio-darwin-arm64: 2.32.12 + envio-darwin-x64: 2.32.12 + envio-linux-arm64: 2.32.12 + envio-linux-x64: 2.32.12 + transitivePeerDependencies: + - bufferutil + - typescript + - utf-8-validate + - zod + environment@1.1.0: {} error-ex@1.3.2: @@ -30106,6 +30562,12 @@ snapshots: events@3.3.0: {} + eventsource-parser@3.0.6: {} + + eventsource@4.1.0: + dependencies: + eventsource-parser: 3.0.6 + evp_bytestokey@1.0.3: dependencies: md5.js: 1.3.5 @@ -30150,6 +30612,8 @@ snapshots: eyes@0.1.8: {} + fast-copy@3.0.2: {} + fast-copy@4.0.2: {} fast-deep-equal@3.1.3: {} @@ -30182,6 +30646,13 @@ snapshots: fast-json-stable-stringify@2.1.0: {} + fast-json-stringify@2.7.13: + dependencies: + ajv: 6.14.0 + deepmerge: 4.3.1 + rfdc: 1.4.1 + string-similarity: 4.0.4 + fast-json-stringify@5.16.1: dependencies: "@fastify/merge-json-schemas": 0.1.1 @@ -30330,7 +30801,7 @@ snapshots: cross-spawn: 7.0.6 signal-exit: 4.1.0 - forge-std@https://codeload.github.com/foundry-rs/forge-std/tar.gz/4540e4aadda88eeb19a54d2b5ad2117c2c7632ec: + forge-std@https://codeload.github.com/foundry-rs/forge-std/tar.gz/f494b0c2c045dda3df3d761bc82209b9a015c4e7: {} fork-ts-checker-webpack-plugin@9.1.0(typescript@5.9.3)(webpack@5.105.4): @@ -30490,6 +30961,14 @@ snapshots: once: 1.4.0 path-is-absolute: 1.0.1 + glob@8.1.0: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 5.1.9 + once: 1.4.0 + global-directory@4.0.1: dependencies: ini: 4.1.1 @@ -30595,12 +31074,12 @@ snapshots: dependencies: graphql: 16.13.1 - graphql-ws@6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)): + graphql-ws@6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)): dependencies: graphql: 16.13.1 optionalDependencies: crossws: 0.3.5 - ws: 7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10) + ws: 8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) optional: true graphql-ws@6.0.7(crossws@0.3.5)(graphql@16.13.1)(ws@8.19.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)): @@ -30712,6 +31191,11 @@ snapshots: headers-polyfill@4.0.3: {} + help-me@4.2.0: + dependencies: + glob: 8.1.0 + readable-stream: 3.6.2 + help-me@5.0.0: {} hermes-estree@0.25.1: {} @@ -31109,6 +31593,10 @@ snapshots: dependencies: ws: 8.19.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + isows@1.0.4(ws@8.17.1(bufferutil@4.0.9)(utf-8-validate@5.0.10)): + dependencies: + ws: 8.17.1(bufferutil@4.0.9)(utf-8-validate@5.0.10) + isows@1.0.6(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)): dependencies: ws: 8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) @@ -32026,6 +32514,10 @@ snapshots: dependencies: brace-expansion: 1.1.12 + minimatch@5.1.9: + dependencies: + brace-expansion: 2.0.2 + minimatch@9.0.9: dependencies: brace-expansion: 2.0.2 @@ -32057,7 +32549,7 @@ snapshots: mlly@1.8.0: dependencies: - acorn: 8.15.0 + acorn: 8.16.0 pathe: 2.0.3 pkg-types: 1.3.1 ufo: 1.6.1 @@ -32699,6 +33191,11 @@ snapshots: duplexify: 4.1.3 split2: 4.2.0 + pino-abstract-transport@1.1.0: + dependencies: + readable-stream: 4.7.0 + split2: 4.2.0 + pino-abstract-transport@1.2.0: dependencies: readable-stream: 4.7.0 @@ -32727,6 +33224,23 @@ snapshots: transitivePeerDependencies: - "@opentelemetry/api" + pino-pretty@10.2.3: + dependencies: + colorette: 2.0.20 + dateformat: 4.6.3 + fast-copy: 3.0.2 + fast-safe-stringify: 2.1.1 + help-me: 4.2.0 + joycon: 3.1.1 + minimist: 1.2.8 + on-exit-leak-free: 2.1.2 + pino-abstract-transport: 1.2.0 + pump: 3.0.3 + readable-stream: 4.7.0 + secure-json-parse: 2.7.0 + sonic-boom: 3.8.1 + strip-json-comments: 3.1.1 + pino-pretty@13.1.3: dependencies: colorette: 2.0.20 @@ -32763,6 +33277,20 @@ snapshots: sonic-boom: 2.8.0 thread-stream: 0.15.2 + pino@8.16.1: + dependencies: + atomic-sleep: 1.0.0 + fast-redact: 3.5.0 + on-exit-leak-free: 2.1.2 + pino-abstract-transport: 1.1.0 + pino-std-serializers: 6.2.2 + process-warning: 2.3.2 + quick-format-unescaped: 4.0.4 + real-require: 0.2.0 + safe-stable-stringify: 2.5.0 + sonic-boom: 3.8.1 + thread-stream: 2.7.0 + pino@8.21.0: dependencies: atomic-sleep: 1.0.0 @@ -32819,7 +33347,7 @@ snapshots: pngjs@5.0.0: {} - ponder@0.16.3(@opentelemetry/api@1.9.0)(@types/node@20.19.37)(@types/pg@8.18.0)(bufferutil@4.0.9)(hono@4.12.7)(lightningcss@1.31.1)(terser@5.46.0)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(zod@3.25.76): + ponder@0.16.3(@opentelemetry/api@1.9.0)(@types/node@20.19.37)(@types/pg@8.18.0)(bufferutil@4.0.9)(hono@4.12.7)(lightningcss@1.31.1)(postgres@3.4.1)(terser@5.46.0)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(zod@3.25.76): dependencies: "@babel/code-frame": 7.27.1 "@commander-js/extra-typings": 12.1.0(commander@12.1.0) @@ -32836,7 +33364,7 @@ snapshots: dataloader: 2.2.3 detect-package-manager: 3.0.2 dotenv: 16.6.1 - drizzle-orm: 0.41.0(@electric-sql/pglite@0.2.13)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0) + drizzle-orm: 0.41.0(@electric-sql/pglite@0.2.13)(@opentelemetry/api@1.9.0)(@types/pg@8.18.0)(kysely@0.26.3)(pg@8.20.0)(postgres@3.4.1) glob: 10.5.0 graphql: 16.8.2 graphql-yoga: 5.17.1(graphql@16.8.2) @@ -32903,7 +33431,7 @@ snapshots: pony-cause@2.1.11: {} - porto@0.2.35(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(@wagmi/core@2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)))(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)): + porto@0.2.35(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(@wagmi/core@2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)))(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76)): dependencies: "@wagmi/core": 2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)) hono: 4.12.7 @@ -32917,7 +33445,7 @@ snapshots: "@tanstack/react-query": 5.90.21(react@19.2.3) react: 19.2.3 typescript: 5.9.3 - wagmi: 2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) + wagmi: 2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) transitivePeerDependencies: - "@types/react" - immer @@ -32997,6 +33525,9 @@ snapshots: dependencies: xtend: 4.0.2 + postgres@3.4.1: + optional: true + preact@10.24.2: {} preact@10.29.0: {} @@ -33036,12 +33567,19 @@ snapshots: process-warning@1.0.0: {} + process-warning@2.3.2: {} + process-warning@3.0.0: {} process-warning@5.0.0: {} process@0.11.10: {} + prom-client@15.0.0: + dependencies: + "@opentelemetry/api": 1.9.0 + tdigest: 0.1.2 + prom-client@15.1.3: dependencies: "@opentelemetry/api": 1.9.0 @@ -33464,6 +34002,12 @@ snapshots: requires-port@1.0.0: {} + rescript-schema@9.3.0(rescript@11.1.3): + optionalDependencies: + rescript: 11.1.3 + + rescript@11.1.3: {} + resend@6.9.3: dependencies: postal-mime: 2.7.3 @@ -33653,6 +34197,8 @@ snapshots: scuid@1.1.0: {} + secure-json-parse@2.7.0: {} + secure-json-parse@4.1.0: {} semver-compare@1.0.0: {} @@ -33967,6 +34513,8 @@ snapshots: char-regex: 1.0.2 strip-ansi: 6.0.1 + string-similarity@4.0.4: {} + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -34804,6 +35352,24 @@ snapshots: d3-time: 3.1.0 d3-timer: 3.0.1 + viem@2.21.0(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76): + dependencies: + "@adraffy/ens-normalize": 1.10.0 + "@noble/curves": 1.4.0 + "@noble/hashes": 1.4.0 + "@scure/bip32": 1.4.0 + "@scure/bip39": 1.3.0 + abitype: 1.0.5(typescript@5.9.3)(zod@3.25.76) + isows: 1.0.4(ws@8.17.1(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + webauthn-p256: 0.0.5 + ws: 8.17.1(bufferutil@4.0.9)(utf-8-validate@5.0.10) + optionalDependencies: + typescript: 5.9.3 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + - zod + viem@2.23.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76): dependencies: "@noble/curves": 1.8.1 @@ -35045,10 +35611,10 @@ snapshots: dependencies: xml-name-validator: 5.0.0 - wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76): + wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76): dependencies: "@tanstack/react-query": 5.90.21(react@19.2.3) - "@wagmi/connectors": 6.2.0(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(@wagmi/core@2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)))(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76))(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) + "@wagmi/connectors": 6.2.0(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(@wagmi/core@2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)))(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(wagmi@2.19.5(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.21(react@19.2.3))(@types/react@19.2.8)(bufferutil@4.0.9)(fastestsmallesttextencoderdecoder@1.0.22)(react@19.2.3)(typescript@5.9.3)(utf-8-validate@5.0.10)(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76))(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@3.25.76) "@wagmi/core": 2.22.1(@tanstack/query-core@5.90.20)(@types/react@19.2.8)(react@19.2.3)(typescript@5.9.3)(use-sync-external-store@1.4.0(react@19.2.3))(viem@2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76)) react: 19.2.3 use-sync-external-store: 1.4.0(react@19.2.3) @@ -35106,6 +35672,11 @@ snapshots: web-streams-polyfill@3.3.3: {} + webauthn-p256@0.0.5: + dependencies: + "@noble/curves": 1.9.7 + "@noble/hashes": 1.8.0 + webextension-polyfill@0.10.0: {} webidl-conversions@3.0.1: {} diff --git a/ts-node b/ts-node new file mode 100644 index 000000000..e69de29bb From 5fe39871322b6a07ed48239663f4a12982c5bc1e Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 13:19:40 -0300 Subject: [PATCH 02/17] feat: move implementation to separate package --- .../abis/ens-governor.json | 2 +- .../abis/ens-token.json | 2 +- .../config.yaml | 0 apps/hypersync-indexer/generated/.gitignore | 32 + apps/hypersync-indexer/generated/.npmrc | 1 + apps/hypersync-indexer/generated/LICENSE.md | 67 + .../generated/docker-compose.yaml | 52 + apps/hypersync-indexer/generated/index.d.ts | 26 + apps/hypersync-indexer/generated/index.js | 13 + apps/hypersync-indexer/generated/package.json | 49 + .../generated/persisted_state.envio.json | 7 + .../generated/pnpm-lock.yaml | 3012 +++++++++++++++++ .../hypersync-indexer/generated/rescript.json | 35 + .../generated/schema.graphql | 1258 +++++++ .../generated/src/Benchmark.res | 394 +++ .../generated/src/ConfigYAML.gen.ts | 38 + .../generated/src/ConfigYAML.res | 92 + apps/hypersync-indexer/generated/src/Env.res | 247 ++ .../generated/src/EventProcessing.res | 478 +++ .../generated/src/Generated.res | 206 ++ .../generated/src/GqlDbCustomTypes.gen.ts | 8 + .../generated/src/GqlDbCustomTypes.res | 11 + .../generated/src/Handlers.gen.ts | 165 + .../generated/src/Handlers.res | 23 + apps/hypersync-indexer/generated/src/IO.res | 396 +++ .../generated/src/Index.bs.js | 209 ++ .../hypersync-indexer/generated/src/Index.res | 332 ++ .../generated/src/Js.shim.ts | 11 + .../generated/src/LoadLayer.res | 444 +++ .../generated/src/LoadLayer.resi | 32 + apps/hypersync-indexer/generated/src/Path.res | 1 + .../generated/src/PersistedState.res | 25 + .../generated/src/TestHelpers.gen.ts | 188 + .../generated/src/TestHelpers.res | 504 +++ .../src/TestHelpers_MockAddresses.gen.ts | 12 + .../src/TestHelpers_MockAddresses.res | 30 + .../generated/src/TestHelpers_MockDb.gen.ts | 87 + .../generated/src/TestHelpers_MockDb.res | 820 +++++ .../generated/src/Types.gen.ts | 557 +++ .../hypersync-indexer/generated/src/Types.res | 957 ++++++ apps/hypersync-indexer/generated/src/Types.ts | 832 +++++ .../generated/src/UserContext.res | 360 ++ .../generated/src/bindings/Dotenv.res | 17 + .../generated/src/bindings/Ethers.gen.ts | 15 + .../generated/src/bindings/OpaqueTypes.ts | 5 + .../generated/src/bindings/RescriptMocha.res | 123 + .../generated/src/bindings/Yargs.res | 8 + .../hypersync-indexer/generated/src/db/Db.res | 30 + .../generated/src/db/DbFunctions.res | 33 + .../generated/src/db/DbFunctionsEntities.res | 22 + .../src/db/DbFunctionsImplementation.js | 17 + .../generated/src/db/Entities.gen.ts | 233 ++ .../generated/src/db/Entities.res | 2077 ++++++++++++ .../generated/src/db/Enums.gen.ts | 41 + .../generated/src/db/Enums.res | 108 + .../generated/src/db/Migrations.res | 57 + .../src/eventFetching/ChainFetcher.res | 464 +++ .../src/eventFetching/ChainManager.res | 174 + .../src/eventFetching/NetworkSources.res | 95 + .../generated/src/globalState/GlobalState.res | 1188 +++++++ .../src/globalState/GlobalStateManager.res | 76 + .../src/globalState/GlobalStateManager.resi | 7 + .../generated/src/ink/EnvioInkApp.res | 67 + .../generated/src/ink/bindings/DateFns.res | 71 + .../generated/src/ink/bindings/Ink.res | 355 ++ .../generated/src/ink/bindings/Style.res | 123 + .../ink/components/BufferedProgressBar.res | 40 + .../src/ink/components/ChainData.res | 161 + .../src/ink/components/CustomHooks.res | 114 + .../generated/src/ink/components/Messages.res | 41 + .../generated/src/ink/components/SyncETA.res | 198 ++ apps/hypersync-indexer/package.json | 30 + .../schema.graphql | 0 .../src/eventHandlers/ENSGovernor.ts | 0 .../src/eventHandlers/ENSToken.ts | 0 .../src/eventHandlers/delegation.ts | 212 ++ .../src/eventHandlers/index.ts | 3 + .../src/eventHandlers/metrics/circulating.ts | 37 + .../src/eventHandlers/metrics/delegated.ts | 34 + .../src/eventHandlers/metrics/index.ts | 4 + .../src/eventHandlers/metrics/supply.ts | 59 + .../src/eventHandlers/metrics/total.ts | 58 + .../src/eventHandlers/shared.ts | 166 + .../src/eventHandlers/transfer.ts | 149 + .../src/eventHandlers/voting.ts | 261 ++ apps/hypersync-indexer/src/lib/constants.ts | 908 +++++ .../hypersync-indexer/src/lib/date-helpers.ts | 62 + apps/hypersync-indexer/src/lib/enums.ts | 21 + apps/hypersync-indexer/src/lib/utils.ts | 32 + apps/hypersync-indexer/tsconfig.json | 34 + apps/indexer/package.json | 2 - apps/indexer/src/eventHandlers/delegation.ts | 238 +- apps/indexer/src/eventHandlers/index.ts | 6 +- .../src/eventHandlers/metrics/circulating.ts | 37 +- .../src/eventHandlers/metrics/delegated.ts | 30 +- .../src/eventHandlers/metrics/index.ts | 8 +- .../src/eventHandlers/metrics/supply.ts | 38 +- .../src/eventHandlers/metrics/total.ts | 37 +- apps/indexer/src/eventHandlers/shared.ts | 147 +- apps/indexer/src/eventHandlers/transfer.ts | 188 +- apps/indexer/src/eventHandlers/voting.ts | 220 +- apps/indexer/src/lib/blockTime.ts | 15 + apps/indexer/src/lib/constants.ts | 55 +- apps/indexer/src/lib/date-helpers.ts | 2 +- apps/indexer/src/lib/enums.ts | 44 +- apps/indexer/src/lib/query-helpers.ts | 2 +- apps/indexer/src/lib/time-series.ts | 4 +- apps/indexer/tsconfig.json | 13 +- eslint.config.mjs | 19 +- package.json | 1 + pnpm-lock.yaml | 86 +- 111 files changed, 20693 insertions(+), 544 deletions(-) rename apps/{indexer => hypersync-indexer}/abis/ens-governor.json (99%) rename apps/{indexer => hypersync-indexer}/abis/ens-token.json (99%) rename apps/{indexer => hypersync-indexer}/config.yaml (100%) create mode 100644 apps/hypersync-indexer/generated/.gitignore create mode 100644 apps/hypersync-indexer/generated/.npmrc create mode 100644 apps/hypersync-indexer/generated/LICENSE.md create mode 100644 apps/hypersync-indexer/generated/docker-compose.yaml create mode 100644 apps/hypersync-indexer/generated/index.d.ts create mode 100644 apps/hypersync-indexer/generated/index.js create mode 100644 apps/hypersync-indexer/generated/package.json create mode 100644 apps/hypersync-indexer/generated/persisted_state.envio.json create mode 100644 apps/hypersync-indexer/generated/pnpm-lock.yaml create mode 100644 apps/hypersync-indexer/generated/rescript.json create mode 100644 apps/hypersync-indexer/generated/schema.graphql create mode 100644 apps/hypersync-indexer/generated/src/Benchmark.res create mode 100644 apps/hypersync-indexer/generated/src/ConfigYAML.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/ConfigYAML.res create mode 100644 apps/hypersync-indexer/generated/src/Env.res create mode 100644 apps/hypersync-indexer/generated/src/EventProcessing.res create mode 100644 apps/hypersync-indexer/generated/src/Generated.res create mode 100644 apps/hypersync-indexer/generated/src/GqlDbCustomTypes.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/GqlDbCustomTypes.res create mode 100644 apps/hypersync-indexer/generated/src/Handlers.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/Handlers.res create mode 100644 apps/hypersync-indexer/generated/src/IO.res create mode 100644 apps/hypersync-indexer/generated/src/Index.bs.js create mode 100644 apps/hypersync-indexer/generated/src/Index.res create mode 100644 apps/hypersync-indexer/generated/src/Js.shim.ts create mode 100644 apps/hypersync-indexer/generated/src/LoadLayer.res create mode 100644 apps/hypersync-indexer/generated/src/LoadLayer.resi create mode 100644 apps/hypersync-indexer/generated/src/Path.res create mode 100644 apps/hypersync-indexer/generated/src/PersistedState.res create mode 100644 apps/hypersync-indexer/generated/src/TestHelpers.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/TestHelpers.res create mode 100644 apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.res create mode 100644 apps/hypersync-indexer/generated/src/TestHelpers_MockDb.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/TestHelpers_MockDb.res create mode 100644 apps/hypersync-indexer/generated/src/Types.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/Types.res create mode 100644 apps/hypersync-indexer/generated/src/Types.ts create mode 100644 apps/hypersync-indexer/generated/src/UserContext.res create mode 100644 apps/hypersync-indexer/generated/src/bindings/Dotenv.res create mode 100644 apps/hypersync-indexer/generated/src/bindings/Ethers.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/bindings/OpaqueTypes.ts create mode 100644 apps/hypersync-indexer/generated/src/bindings/RescriptMocha.res create mode 100644 apps/hypersync-indexer/generated/src/bindings/Yargs.res create mode 100644 apps/hypersync-indexer/generated/src/db/Db.res create mode 100644 apps/hypersync-indexer/generated/src/db/DbFunctions.res create mode 100644 apps/hypersync-indexer/generated/src/db/DbFunctionsEntities.res create mode 100644 apps/hypersync-indexer/generated/src/db/DbFunctionsImplementation.js create mode 100644 apps/hypersync-indexer/generated/src/db/Entities.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/db/Entities.res create mode 100644 apps/hypersync-indexer/generated/src/db/Enums.gen.ts create mode 100644 apps/hypersync-indexer/generated/src/db/Enums.res create mode 100644 apps/hypersync-indexer/generated/src/db/Migrations.res create mode 100644 apps/hypersync-indexer/generated/src/eventFetching/ChainFetcher.res create mode 100644 apps/hypersync-indexer/generated/src/eventFetching/ChainManager.res create mode 100644 apps/hypersync-indexer/generated/src/eventFetching/NetworkSources.res create mode 100644 apps/hypersync-indexer/generated/src/globalState/GlobalState.res create mode 100644 apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.res create mode 100644 apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.resi create mode 100644 apps/hypersync-indexer/generated/src/ink/EnvioInkApp.res create mode 100644 apps/hypersync-indexer/generated/src/ink/bindings/DateFns.res create mode 100644 apps/hypersync-indexer/generated/src/ink/bindings/Ink.res create mode 100644 apps/hypersync-indexer/generated/src/ink/bindings/Style.res create mode 100644 apps/hypersync-indexer/generated/src/ink/components/BufferedProgressBar.res create mode 100644 apps/hypersync-indexer/generated/src/ink/components/ChainData.res create mode 100644 apps/hypersync-indexer/generated/src/ink/components/CustomHooks.res create mode 100644 apps/hypersync-indexer/generated/src/ink/components/Messages.res create mode 100644 apps/hypersync-indexer/generated/src/ink/components/SyncETA.res create mode 100644 apps/hypersync-indexer/package.json rename apps/{indexer => hypersync-indexer}/schema.graphql (100%) rename apps/{indexer => hypersync-indexer}/src/eventHandlers/ENSGovernor.ts (100%) rename apps/{indexer => hypersync-indexer}/src/eventHandlers/ENSToken.ts (100%) create mode 100644 apps/hypersync-indexer/src/eventHandlers/delegation.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/index.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/metrics/circulating.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/metrics/delegated.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/metrics/index.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/metrics/supply.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/metrics/total.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/shared.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/transfer.ts create mode 100644 apps/hypersync-indexer/src/eventHandlers/voting.ts create mode 100644 apps/hypersync-indexer/src/lib/constants.ts create mode 100644 apps/hypersync-indexer/src/lib/date-helpers.ts create mode 100644 apps/hypersync-indexer/src/lib/enums.ts create mode 100644 apps/hypersync-indexer/src/lib/utils.ts create mode 100644 apps/hypersync-indexer/tsconfig.json create mode 100644 apps/indexer/src/lib/blockTime.ts diff --git a/apps/indexer/abis/ens-governor.json b/apps/hypersync-indexer/abis/ens-governor.json similarity index 99% rename from apps/indexer/abis/ens-governor.json rename to apps/hypersync-indexer/abis/ens-governor.json index ad8f4d77a..b9ef26c6f 100644 --- a/apps/indexer/abis/ens-governor.json +++ b/apps/hypersync-indexer/abis/ens-governor.json @@ -784,4 +784,4 @@ "stateMutability": "pure", "type": "function" } -] \ No newline at end of file +] diff --git a/apps/indexer/abis/ens-token.json b/apps/hypersync-indexer/abis/ens-token.json similarity index 99% rename from apps/indexer/abis/ens-token.json rename to apps/hypersync-indexer/abis/ens-token.json index 9d2ea9ae6..0b7ab9802 100644 --- a/apps/indexer/abis/ens-token.json +++ b/apps/hypersync-indexer/abis/ens-token.json @@ -837,4 +837,4 @@ "stateMutability": "nonpayable", "type": "function" } -] \ No newline at end of file +] diff --git a/apps/indexer/config.yaml b/apps/hypersync-indexer/config.yaml similarity index 100% rename from apps/indexer/config.yaml rename to apps/hypersync-indexer/config.yaml diff --git a/apps/hypersync-indexer/generated/.gitignore b/apps/hypersync-indexer/generated/.gitignore new file mode 100644 index 000000000..69f7d54c4 --- /dev/null +++ b/apps/hypersync-indexer/generated/.gitignore @@ -0,0 +1,32 @@ +*.exe +*.obj +*.out +*.compile +*.native +*.byte +*.cmo +*.annot +*.cmi +*.cmx +*.cmt +*.cmti +*.cma +*.a +*.cmxa +*.obj +*~ +*.annot +*.cmj +*.bak +/lib/ +*.mlast +*.mliast +.vscode +.merlin +.bsb.lock +/node_modules/ +/benchmarks/ +*.res.js +*.res.mjs +logs/* +*BenchmarkCache.json diff --git a/apps/hypersync-indexer/generated/.npmrc b/apps/hypersync-indexer/generated/.npmrc new file mode 100644 index 000000000..d3a714f25 --- /dev/null +++ b/apps/hypersync-indexer/generated/.npmrc @@ -0,0 +1 @@ +save-exact=true # Since we want specific versions of envio to be completely stable. diff --git a/apps/hypersync-indexer/generated/LICENSE.md b/apps/hypersync-indexer/generated/LICENSE.md new file mode 100644 index 000000000..418103b21 --- /dev/null +++ b/apps/hypersync-indexer/generated/LICENSE.md @@ -0,0 +1,67 @@ +## HyperIndex End-User License Agreement (EULA) + +This agreement describes the users' rights and the conditions upon which the Software and Generated Code may be used. The user should review the entire agreement, including any supplemental license terms that accompany the Software, since all of the terms are important and together create this agreement that applies to them. + +### 1. Definitions + +**Software:** HyperIndex, a copyrightable work created by Envio and licensed under this End User License Agreement (“EULA”). + +**Generated Code:** In the context of this license agreement, the term "generated code" refers to computer programming code that is produced automatically by the Software based on input provided by the user. + +**Licensed Material:** The Software and Generated Code defined here will be collectively referred to as “Licensed Material”. + +### 2. Installation and User Rights + +**License:** The Software is provided under this EULA. By agreeing to the EULA terms, you are granted the right to install and operate one instance of the Software on your device (referred to as the licensed device), for the use of one individual at a time, on the condition that you adhere to all terms outlined in this agreement. +The licensor provides you with a non-exclusive, royalty-free, worldwide license that is non-sublicensable and non-transferable. This license allows you to use the Software subject to the limitations and conditions outlined in this EULA. +With one license, the user can only use the Software on a single device. + +**Device:** In this agreement, "device" refers to a hardware system, whether physical or virtual, equipped with an internal storage device capable of executing the Software. This includes hardware partitions, which are considered as individual devices for the purposes of this agreement. Updates may be provided to the Software, and these updates may alter the minimum hardware requirements necessary for the Software. It is the responsibility of users to comply with any changing hardware requirements. + +**Updates:** The Software may be updated automatically. With each update, the EULA may be amended, and it is the users' responsibility to comply with the amendments. + +**Limitations:** Envio reserves all rights, including those under intellectual property laws, not expressly granted in this agreement. For instance, this license does not confer upon you the right to, and you are prohibited from: + +(i) Publishing, copying (other than the permitted backup copy), renting, leasing, or lending the Software; + +(ii) Transferring the Software (except as permitted by this agreement); + +(iii) Circumventing any technical restrictions or limitations in the Software; + +(iv) Using the Software as server Software, for commercial hosting, making the Software available for simultaneous use by multiple users over a network, installing the Software on a server and allowing users to access it remotely, or installing the Software on a device solely for remote user use; + +(v) Reverse engineering, decompiling, or disassembling the Software, or attempting to do so, except and only to the extent that the foregoing restriction is (a) permitted by applicable law; (b) permitted by licensing terms governing the use of open-source components that may be included with the Software and + +(vi) When using the Software, you may not use any features in any manner that could interfere with anyone else's use of them, or attempt to gain unauthorized access to or use of any service, data, account, or network. + +These limitations apply specifically to the Software and do not extend to the Generated Code. Details regarding the use of the Generated Code, including associated limitations, are provided below. + +### 3. Use of the Generated Code + +**Limitations:** Users can use, copy, distribute, make available, and create derivative works of the Generated Code freely, subject to the limitations and conditions specified below. + +(i) The user is prohibited from offering the Generated Code or any software that includes the Generated Code to third parties as a hosted or managed service, where the service grants users access to a significant portion of the Software's features or functionality. + +(ii) The user is not permitted to tamper with, alter, disable, or bypass the functionality of the license key in the Software. Additionally, the user may not eliminate or conceal any functionality within the Software that is safeguarded by the license key. + +(iii) Any modification, removal, or concealment of licensing, copyright, or other notices belonging to the licensor in the Software is strictly forbidden. The use of the licensor's trademarks is subject to relevant laws. + +**Credit:** If the user utilizes the Generated Code to develop and release a new software, product or service, the license agreement for said software, product or service must include proper credit to HyperIndex. + +**Liability:** Envio does not provide any assurance that the Generated Code functions correctly, nor does it assume any responsibility in this regard. + +Additionally, it will be the responsibility of the user to assess whether the Generated Code is suitable for the products and services provided by the user. Envio will not bear any responsibility if the Generated Code is found unsuitable for the products and services provided by the user. + +### 4. Additional Terms + +**Disclaimer of Warranties and Limitation of Liability:** + +(i) Unless expressly undertaken by the Licensor separately, the Licensed Material is provided on an as-is, as-available basis, and the Licensor makes no representations or warranties of any kind regarding the Licensed Material, whether express, implied, statutory, or otherwise. This encompasses, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether known or discoverable. If disclaimers of warranties are not permitted in whole or in part, this disclaimer may not apply to You. + +(ii) To the fullest extent permitted by law, under no circumstances shall the Licensor be liable to You under any legal theory (including, but not limited to, negligence) for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising from the use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. If limitations of liability are not permitted in whole or in part, this limitation may not apply to You. + +(iii) The disclaimers of warranties and limitations of liability outlined above shall be construed in a manner that most closely approximates an absolute disclaimer and waiver of all liability, to the fullest extent permitted by law. + +**Applicable Law and Competent Courts:** This EULA and shall be governed by and construed in accordance with the laws of England. The courts of England shall have exclusive jurisdiction to settle any dispute arising out of or in connection with this EULA. + +**Additional Agreements:** If the user chooses to use the Software, it may be required to agree to additional terms or agreements outside of this EULA. diff --git a/apps/hypersync-indexer/generated/docker-compose.yaml b/apps/hypersync-indexer/generated/docker-compose.yaml new file mode 100644 index 000000000..9ecc5fe16 --- /dev/null +++ b/apps/hypersync-indexer/generated/docker-compose.yaml @@ -0,0 +1,52 @@ +services: + envio-postgres: + image: postgres:17.5 + restart: always + ports: + - "${ENVIO_PG_PORT:-5433}:5432" + volumes: + - db_data:/var/lib/postgresql/data + environment: + POSTGRES_PASSWORD: ${ENVIO_PG_PASSWORD:-testing} + POSTGRES_USER: ${ENVIO_PG_USER:-postgres} + POSTGRES_DB: ${ENVIO_PG_DATABASE:-envio-dev} + networks: + - my-proxy-net + graphql-engine: + image: hasura/graphql-engine:v2.43.0 + ports: + - "${HASURA_EXTERNAL_PORT:-8080}:8080" + user: 1001:1001 + depends_on: + - "envio-postgres" + restart: always + environment: + # TODO: refine migrations. For now we will run hasura setup via custom scripts, rather than standard migrations. + # See details of this image here: https://hasura.io/docs/latest/graphql/core/migrations/advanced/auto-apply-migrations.html + # HASURA_GRAPHQL_MIGRATIONS_DIR: /hasura-migrations + # HASURA_GRAPHQL_METADATA_DIR: /hasura-metadata + HASURA_GRAPHQL_DATABASE_URL: postgres://${ENVIO_PG_USER:-postgres}:${ENVIO_PG_PASSWORD:-testing}@envio-postgres:5432/${ENVIO_PG_DATABASE:-envio-dev} + HASURA_GRAPHQL_ENABLE_CONSOLE: ${HASURA_GRAPHQL_ENABLE_CONSOLE:-true} # can make this default to false once we are further in our development. + HASURA_GRAPHQL_ENABLED_LOG_TYPES: + startup, http-log, webhook-log, websocket-log, + query-log + HASURA_GRAPHQL_NO_OF_RETRIES: 10 + HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-testing} + HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES: "true" + PORT: 8080 + HASURA_GRAPHQL_UNAUTHORIZED_ROLE: public + healthcheck: + # CMD from a GH issue thread, no curl or wget installed with image + # but hasura comes with a /healthz public endpoint for checking server health + test: timeout 1s bash -c ':> /dev/tcp/127.0.0.1/8080' || exit 1 + interval: 5s + timeout: 2s + retries: 50 + start_period: 5s + networks: + - my-proxy-net +volumes: + db_data: +networks: + my-proxy-net: + name: local_test_network diff --git a/apps/hypersync-indexer/generated/index.d.ts b/apps/hypersync-indexer/generated/index.d.ts new file mode 100644 index 000000000..66185c668 --- /dev/null +++ b/apps/hypersync-indexer/generated/index.d.ts @@ -0,0 +1,26 @@ +export { + ENSGovernor, + ENSToken, + onBlock +} from "./src/Handlers.gen"; +export type * from "./src/Types.gen"; +import { + ENSGovernor, + ENSToken, + MockDb, + Addresses +} from "./src/TestHelpers.gen"; + +export const TestHelpers = { + ENSGovernor, + ENSToken, + MockDb, + Addresses +}; + +export { + EventType, + MetricType, +} from "./src/Enum.gen"; + +export {default as BigDecimal} from 'bignumber.js'; diff --git a/apps/hypersync-indexer/generated/index.js b/apps/hypersync-indexer/generated/index.js new file mode 100644 index 000000000..075fc5440 --- /dev/null +++ b/apps/hypersync-indexer/generated/index.js @@ -0,0 +1,13 @@ +/** + This file serves as an entry point when referencing generated as a node module + */ + +const handlers = require("./src/Handlers.res.js"); +const TestHelpers = require("./src/TestHelpers.res.js"); +const BigDecimal = require("bignumber.js"); + +module.exports = { + ...handlers, + BigDecimal, + TestHelpers, +}; diff --git a/apps/hypersync-indexer/generated/package.json b/apps/hypersync-indexer/generated/package.json new file mode 100644 index 000000000..00e12e16f --- /dev/null +++ b/apps/hypersync-indexer/generated/package.json @@ -0,0 +1,49 @@ +{ + "name": "generated", + "version": "0.1.0", + "main": "index.js", + "types": "index.d.ts", + "private": true, + "scripts": { + "clean": "rescript clean", + "build": "rescript", + "watch": "rescript -w", + "format": "rescript format -all", + "db-up": "node -e 'require(`./src/db/Migrations.res.js`).runUpMigrations(true)'", + "db-down": "node -e 'require(`./src/db/Migrations.res.js`).runDownMigrations(true)'", + "db-setup": "node -e 'require(`./src/db/Migrations.res.js`).runUpMigrations(true, true)'", + "print-benchmark-summary": "node -e 'require(`./src/Benchmark.res.js`).Summary.printSummary()'", + "start": "ts-node src/Index.res.js" + }, + "keywords": [ + "ReScript" + ], + "engines": { + "node": ">=18.0.0" + }, + "author": "", + "license": "MIT", + "dependencies": { + "ts-node": "10.9.1", + "@rescript/react": "0.12.1", + "bignumber.js": "9.1.2", + "date-fns": "3.3.1", + "dotenv": "16.4.5", + "ethers": "6.8.0", + "express": "4.19.2", + "ink": "3.2.0", + "ink-big-text": "1.2.0", + "ink-spinner": "4.0.3", + "js-sdsl": "4.4.2", + "pino": "8.16.1", + "postgres": "3.4.1", + "react": "18.2.0", + "rescript": "11.1.3", + "rescript-envsafe": "5.0.0", + "rescript-schema": "9.3.0", + "envio": "2.32.12", + "viem": "2.21.0", + "yargs": "17.7.2", + "prom-client": "15.0.0" + } +} diff --git a/apps/hypersync-indexer/generated/persisted_state.envio.json b/apps/hypersync-indexer/generated/persisted_state.envio.json new file mode 100644 index 000000000..05e2dd24b --- /dev/null +++ b/apps/hypersync-indexer/generated/persisted_state.envio.json @@ -0,0 +1,7 @@ +{ + "envio_version": "2.32.12", + "config_hash": "[236, 193, 166, 118, 4, 248, 28, 111, 95, 5, 106, 36, 46, 36, 209, 74, 248, 27, 4, 69, 60, 66, 65, 99, 48, 89, 175, 255, 204, 241, 113, 249]", + "schema_hash": "0713695db6e57c692ec36029c80572226826c973473597b63e3b085219363624", + "handler_files_hash": "a3b06148f0aa5235020dcf46a3d27d54c5cd69afcae844b9c31a8fd32d4bb275", + "abi_files_hash": "17400171c10de0ef7089db6132608fce07e5447f9329200c91096baffed85f32" +} diff --git a/apps/hypersync-indexer/generated/pnpm-lock.yaml b/apps/hypersync-indexer/generated/pnpm-lock.yaml new file mode 100644 index 000000000..9c65e6b8d --- /dev/null +++ b/apps/hypersync-indexer/generated/pnpm-lock.yaml @@ -0,0 +1,3012 @@ +lockfileVersion: "9.0" + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + .: + dependencies: + "@rescript/react": + specifier: 0.12.1 + version: 0.12.1(react-dom@19.2.4(react@18.2.0))(react@18.2.0) + bignumber.js: + specifier: 9.1.2 + version: 9.1.2 + date-fns: + specifier: 3.3.1 + version: 3.3.1 + dotenv: + specifier: 16.4.5 + version: 16.4.5 + envio: + specifier: 2.32.12 + version: 2.32.12(typescript@6.0.2) + ethers: + specifier: 6.8.0 + version: 6.8.0 + express: + specifier: 4.19.2 + version: 4.19.2 + ink: + specifier: 3.2.0 + version: 3.2.0(react@18.2.0) + ink-big-text: + specifier: 1.2.0 + version: 1.2.0(ink@3.2.0(react@18.2.0))(react@18.2.0) + ink-spinner: + specifier: 4.0.3 + version: 4.0.3(ink@3.2.0(react@18.2.0))(react@18.2.0) + js-sdsl: + specifier: 4.4.2 + version: 4.4.2 + pino: + specifier: 8.16.1 + version: 8.16.1 + postgres: + specifier: 3.4.1 + version: 3.4.1 + prom-client: + specifier: 15.0.0 + version: 15.0.0 + react: + specifier: 18.2.0 + version: 18.2.0 + rescript: + specifier: 11.1.3 + version: 11.1.3 + rescript-envsafe: + specifier: 5.0.0 + version: 5.0.0(rescript-schema@9.3.0(rescript@11.1.3))(rescript@11.1.3) + rescript-schema: + specifier: 9.3.0 + version: 9.3.0(rescript@11.1.3) + ts-node: + specifier: 10.9.1 + version: 10.9.1(@types/node@18.15.13)(typescript@6.0.2) + viem: + specifier: 2.21.0 + version: 2.21.0(typescript@6.0.2) + yargs: + specifier: 17.7.2 + version: 17.7.2 + +packages: + "@adraffy/ens-normalize@1.10.0": + resolution: + { + integrity: sha512-nA9XHtlAkYfJxY7bce8DcN7eKxWWCWkU+1GR9d+U6MbNpfwQp8TI7vqOsBsMcHoT4mBu2kypKoSKnghEzOOq5Q==, + } + + "@cspotcode/source-map-support@0.8.1": + resolution: + { + integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==, + } + engines: { node: ">=12" } + + "@elastic/ecs-helpers@1.1.0": + resolution: + { + integrity: sha512-MDLb2aFeGjg46O5mLpdCzT5yOUDnXToJSrco2ShqGIXxNJaM8uJjX+4nd+hRYV4Vex8YJyDtOFEVBldQct6ndg==, + } + engines: { node: ">=10" } + + "@elastic/ecs-pino-format@1.4.0": + resolution: + { + integrity: sha512-eCSBUTgl8KbPyxky8cecDRLCYu2C1oFV4AZ72bEsI+TxXEvaljaL2kgttfzfu7gW+M89eCz55s49uF2t+YMTWA==, + } + engines: { node: ">=10" } + + "@envio-dev/hyperfuel-client-darwin-arm64@1.2.2": + resolution: + { + integrity: sha512-eQyd9kJCIz/4WCTjkjpQg80DA3pdneHP7qhJIVQ2ZG+Jew9o5XDG+uI0Y16AgGzZ6KGmJSJF6wyUaaAjJfbO1Q==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [darwin] + + "@envio-dev/hyperfuel-client-darwin-x64@1.2.2": + resolution: + { + integrity: sha512-l7lRMSoyIiIvKZgQPfgqg7H1xnrQ37A8yUp4S2ys47R8f/wSCSrmMaY1u7n6CxVYCpR9fajwy0/356UgwwhVKw==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [darwin] + + "@envio-dev/hyperfuel-client-linux-arm64-gnu@1.2.2": + resolution: + { + integrity: sha512-kNiC/1fKuXnoSxp8yEsloDw4Ot/mIcNoYYGLl2CipSIpBtSuiBH5nb6eBcxnRZdKOwf5dKZtZ7MVPL9qJocNJw==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [linux] + + "@envio-dev/hyperfuel-client-linux-x64-gnu@1.2.2": + resolution: + { + integrity: sha512-XDkvkBG/frS+xiZkJdY4KqOaoAwyxPdi2MysDQgF8NmZdssi32SWch0r4LTqKWLLlCBg9/R55POeXL5UAjg2wQ==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@envio-dev/hyperfuel-client-linux-x64-musl@1.2.2": + resolution: + { + integrity: sha512-DKnKJJSwsYtA7YT0EFGhFB5Eqoo42X0l0vZBv4lDuxngEXiiNjeLemXoKQVDzhcbILD7eyXNa5jWUc+2hpmkEg==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@envio-dev/hyperfuel-client-win32-x64-msvc@1.2.2": + resolution: + { + integrity: sha512-SwIgTAVM9QhCFPyHwL+e1yQ6o3paV6q25klESkXw+r/KW9QPhOOyA6Yr8nfnur3uqMTLJHAKHTLUnkyi/Nh7Aw==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [win32] + + "@envio-dev/hyperfuel-client@1.2.2": + resolution: + { + integrity: sha512-raKA6DshYSle0sAOHBV1OkSRFMN+Mkz8sFiMmS3k+m5nP6pP56E17CRRePBL5qmR6ZgSEvGOz/44QUiKNkK9Pg==, + } + engines: { node: ">= 10" } + + "@envio-dev/hypersync-client-darwin-arm64@1.3.0": + resolution: + { + integrity: sha512-JZwiVRbMSuJnKsVUpfjTHc3YgAMvGlyuqWQxVc7Eok4Xp/sZLUCXRQUykbCh6fOUWRmoa2JG/ykP/NotoTRCBg==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [darwin] + + "@envio-dev/hypersync-client-darwin-x64@1.3.0": + resolution: + { + integrity: sha512-2eSzQqqqFBMK2enVucYGcny5Ep4DEKYxf3Xme7z9qp2d3c6fMcbVvM4Gt8KOzb7ySjwJ2gU+qY2h545T2NiJXQ==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [darwin] + + "@envio-dev/hypersync-client-linux-arm64-gnu@1.3.0": + resolution: + { + integrity: sha512-gsjMp3WKekwnA89HvJXvcTM3BE5wVFG/qTF4rmk3rGiXhZ+MGaZQKrYRAhnzQZblueFtF/xnnBYpO35Z3ZFThg==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [linux] + + "@envio-dev/hypersync-client-linux-x64-gnu@1.3.0": + resolution: + { + integrity: sha512-Lkvi4lRVwCyFOXf9LYH2X91zmW2l1vbfojKhTwKgqFWv6PMN5atlYjt+/NcUCAAhk5EUavWGjoikwnvLp870cg==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@envio-dev/hypersync-client-linux-x64-musl@1.3.0": + resolution: + { + integrity: sha512-UIjB/gUX2sl23EMXLBxqtkgMnOjNSiaHK+CSU5vXMXkzL3fOGbz24bvyaPsSv82cxCFEE0yTwlSKkCX6/L8o6Q==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@envio-dev/hypersync-client@1.3.0": + resolution: + { + integrity: sha512-wUdfZzbsFPbGq6n/1mmUMsWuiAil+m+fL/GBX5LGUyMJV86TXy2SBtAqYYNyDxWLO6gvGr6PYKrP8pLVAUZDZg==, + } + engines: { node: ">= 10" } + + "@jridgewell/resolve-uri@3.1.2": + resolution: + { + integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==, + } + engines: { node: ">=6.0.0" } + + "@jridgewell/sourcemap-codec@1.5.5": + resolution: + { + integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==, + } + + "@jridgewell/trace-mapping@0.3.9": + resolution: + { + integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==, + } + + "@noble/curves@1.2.0": + resolution: + { + integrity: sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==, + } + + "@noble/curves@1.4.0": + resolution: + { + integrity: sha512-p+4cb332SFCrReJkCYe8Xzm0OWi4Jji5jVdIZRL/PmacmDkFNw6MrrV+gGpiPxLHbV+zKFRywUWbaseT+tZRXg==, + } + + "@noble/hashes@1.3.2": + resolution: + { + integrity: sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==, + } + engines: { node: ">= 16" } + + "@noble/hashes@1.4.0": + resolution: + { + integrity: sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==, + } + engines: { node: ">= 16" } + + "@opentelemetry/api@1.9.1": + resolution: + { + integrity: sha512-gLyJlPHPZYdAk1JENA9LeHejZe1Ti77/pTeFm/nMXmQH/HFZlcS/O2XJB+L8fkbrNSqhdtlvjBVjxwUYanNH5Q==, + } + engines: { node: ">=8.0.0" } + + "@rescript/react@0.12.1": + resolution: + { + integrity: sha512-ZD7nhDr5FZgLYqRH9s4CNM+LRz/3IMuTb+LH12fd2Akk0xYkYUP+DZveB2VQUC2UohJnTf/c8yPSNsiFihVCCg==, + } + peerDependencies: + react: ">=18.0.0" + react-dom: ">=18.0.0" + + "@scure/base@1.1.9": + resolution: + { + integrity: sha512-8YKhl8GHiNI/pU2VMaofa2Tor7PJRAjwQLBBuilkJ9L5+13yVbC7JO/wS7piioAvPSwR3JKM1IJ/u4xQzbcXKg==, + } + + "@scure/bip32@1.4.0": + resolution: + { + integrity: sha512-sVUpc0Vq3tXCkDGYVWGIZTRfnvu8LoTDaev7vbwh0omSvVORONr960MQWdKqJDCReIEmTj3PAr73O3aoxz7OPg==, + } + + "@scure/bip39@1.3.0": + resolution: + { + integrity: sha512-disdg7gHuTDZtY+ZdkmLpPCk7fxZSu3gBiEGuoC1XYxv9cGx3Z6cpTggCgW6odSOOIXCiDjuGejW+aJKCY/pIQ==, + } + + "@tsconfig/node10@1.0.12": + resolution: + { + integrity: sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==, + } + + "@tsconfig/node12@1.0.11": + resolution: + { + integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==, + } + + "@tsconfig/node14@1.0.3": + resolution: + { + integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==, + } + + "@tsconfig/node16@1.0.4": + resolution: + { + integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==, + } + + "@types/node@18.15.13": + resolution: + { + integrity: sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==, + } + + "@types/yoga-layout@1.9.2": + resolution: + { + integrity: sha512-S9q47ByT2pPvD65IvrWp7qppVMpk9WGMbVq9wbWZOHg6tnXSD4vyhao6nOSBwwfDdV2p3Kx9evA9vI+XWTfDvw==, + } + + abitype@1.0.5: + resolution: + { + integrity: sha512-YzDhti7cjlfaBhHutMaboYB21Ha3rXR9QTkNJFzYC4kC8YclaiwPBBBJY8ejFdu2wnJeZCVZSMlQJ7fi8S6hsw==, + } + peerDependencies: + typescript: ">=5.0.4" + zod: ^3 >=3.22.0 + peerDependenciesMeta: + typescript: + optional: true + zod: + optional: true + + abort-controller@3.0.0: + resolution: + { + integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==, + } + engines: { node: ">=6.5" } + + accepts@1.3.8: + resolution: + { + integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==, + } + engines: { node: ">= 0.6" } + + acorn-walk@8.3.5: + resolution: + { + integrity: sha512-HEHNfbars9v4pgpW6SO1KSPkfoS0xVOM/9UzkJltjlsHZmJasxg8aXkuZa7SMf8vKGIBhpUsPluQSqhJFCqebw==, + } + engines: { node: ">=0.4.0" } + + acorn@8.16.0: + resolution: + { + integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==, + } + engines: { node: ">=0.4.0" } + hasBin: true + + aes-js@4.0.0-beta.5: + resolution: + { + integrity: sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==, + } + + ajv@6.14.0: + resolution: + { + integrity: sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==, + } + + ansi-escapes@4.3.2: + resolution: + { + integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==, + } + engines: { node: ">=8" } + + ansi-regex@5.0.1: + resolution: + { + integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==, + } + engines: { node: ">=8" } + + ansi-styles@4.3.0: + resolution: + { + integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==, + } + engines: { node: ">=8" } + + arg@4.1.3: + resolution: + { + integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==, + } + + array-flatten@1.1.1: + resolution: + { + integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==, + } + + astral-regex@2.0.0: + resolution: + { + integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==, + } + engines: { node: ">=8" } + + atomic-sleep@1.0.0: + resolution: + { + integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==, + } + engines: { node: ">=8.0.0" } + + auto-bind@4.0.0: + resolution: + { + integrity: sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ==, + } + engines: { node: ">=8" } + + balanced-match@1.0.2: + resolution: + { + integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==, + } + + base64-js@1.5.1: + resolution: + { + integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==, + } + + bignumber.js@9.1.2: + resolution: + { + integrity: sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==, + } + + bintrees@1.0.2: + resolution: + { + integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==, + } + + body-parser@1.20.2: + resolution: + { + integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==, + } + engines: { node: ">= 0.8", npm: 1.2.8000 || >= 1.4.16 } + + brace-expansion@2.0.3: + resolution: + { + integrity: sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==, + } + + buffer@6.0.3: + resolution: + { + integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==, + } + + bytes@3.1.2: + resolution: + { + integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==, + } + engines: { node: ">= 0.8" } + + call-bind-apply-helpers@1.0.2: + resolution: + { + integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==, + } + engines: { node: ">= 0.4" } + + call-bound@1.0.4: + resolution: + { + integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==, + } + engines: { node: ">= 0.4" } + + cfonts@2.10.1: + resolution: + { + integrity: sha512-l5IcLv4SaOdL/EGR6BpOF5SEro88VcGJJ6+xbvJb+wXi19YC6UeHE/brv7a4vIcLZopnt3Ys3zWeNnyfB04UPg==, + } + engines: { node: ">=10" } + hasBin: true + + chalk@4.1.2: + resolution: + { + integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==, + } + engines: { node: ">=10" } + + ci-info@2.0.0: + resolution: + { + integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==, + } + + cli-boxes@2.2.1: + resolution: + { + integrity: sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==, + } + engines: { node: ">=6" } + + cli-cursor@3.1.0: + resolution: + { + integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==, + } + engines: { node: ">=8" } + + cli-spinners@2.9.2: + resolution: + { + integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==, + } + engines: { node: ">=6" } + + cli-truncate@2.1.0: + resolution: + { + integrity: sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==, + } + engines: { node: ">=8" } + + cliui@8.0.1: + resolution: + { + integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==, + } + engines: { node: ">=12" } + + code-excerpt@3.0.0: + resolution: + { + integrity: sha512-VHNTVhd7KsLGOqfX3SyeO8RyYPMp1GJOg194VITk04WMYCv4plV68YWe6TJZxd9MhobjtpMRnVky01gqZsalaw==, + } + engines: { node: ">=10" } + + color-convert@2.0.1: + resolution: + { + integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==, + } + engines: { node: ">=7.0.0" } + + color-name@1.1.4: + resolution: + { + integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==, + } + + colorette@2.0.20: + resolution: + { + integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==, + } + + content-disposition@0.5.4: + resolution: + { + integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==, + } + engines: { node: ">= 0.6" } + + content-type@1.0.5: + resolution: + { + integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==, + } + engines: { node: ">= 0.6" } + + convert-to-spaces@1.0.2: + resolution: + { + integrity: sha512-cj09EBuObp9gZNQCzc7hByQyrs6jVGE+o9kSJmeUoj+GiPiJvi5LYqEH/Hmme4+MTLHM+Ejtq+FChpjjEnsPdQ==, + } + engines: { node: ">= 4" } + + cookie-signature@1.0.6: + resolution: + { + integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==, + } + + cookie@0.6.0: + resolution: + { + integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==, + } + engines: { node: ">= 0.6" } + + create-require@1.1.1: + resolution: + { + integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==, + } + + date-fns@3.3.1: + resolution: + { + integrity: sha512-y8e109LYGgoQDveiEBD3DYXKba1jWf5BA8YU1FL5Tvm0BTdEfy54WLCwnuYWZNnzzvALy/QQ4Hov+Q9RVRv+Zw==, + } + + dateformat@4.6.3: + resolution: + { + integrity: sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==, + } + + debug@2.6.9: + resolution: + { + integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==, + } + peerDependencies: + supports-color: "*" + peerDependenciesMeta: + supports-color: + optional: true + + deepmerge@4.3.1: + resolution: + { + integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==, + } + engines: { node: ">=0.10.0" } + + define-property@1.0.0: + resolution: + { + integrity: sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==, + } + engines: { node: ">=0.10.0" } + + depd@2.0.0: + resolution: + { + integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==, + } + engines: { node: ">= 0.8" } + + destroy@1.2.0: + resolution: + { + integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==, + } + engines: { node: ">= 0.8", npm: 1.2.8000 || >= 1.4.16 } + + diff@4.0.4: + resolution: + { + integrity: sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==, + } + engines: { node: ">=0.3.1" } + + dotenv@16.4.5: + resolution: + { + integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==, + } + engines: { node: ">=12" } + + dunder-proto@1.0.1: + resolution: + { + integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==, + } + engines: { node: ">= 0.4" } + + ee-first@1.1.1: + resolution: + { + integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==, + } + + emoji-regex@8.0.0: + resolution: + { + integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==, + } + + encodeurl@1.0.2: + resolution: + { + integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==, + } + engines: { node: ">= 0.8" } + + end-of-stream@1.4.5: + resolution: + { + integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==, + } + + envio-darwin-arm64@2.32.12: + resolution: + { + integrity: sha512-TLs9jjXUHVqKcBReMHgD7C06lbfWfnMkit3uT55XmgiJYc8zS85T0XmDCnCX4BRbZN7uzMNORqnUc2J3/LR9sQ==, + } + cpu: [arm64] + os: [darwin] + + envio-darwin-x64@2.32.12: + resolution: + { + integrity: sha512-JfKU3LaqxO/aabEAIvpHGKhDGNEiVGvcmmi98cZfG1/vP4S5lO+8KDEp563CaB986N6KtGJRKnDWivvCsseZMw==, + } + cpu: [x64] + os: [darwin] + + envio-linux-arm64@2.32.12: + resolution: + { + integrity: sha512-3sBfuR6JLcAkrFcoEfw2WiaPU3VyXGy4kf26HB5BJE/iJUqha+wHoDbv46MfFGuaC0QyM34QvlG0yGRES0ohPw==, + } + cpu: [arm64] + os: [linux] + + envio-linux-x64@2.32.12: + resolution: + { + integrity: sha512-886q+yztKVrhgkwOfoFKARDStbjk1032YBtA6tqrCN8uWjqgzAf30ZDPurJGlq26hQqYNKRp2LhgxChpivsvFw==, + } + cpu: [x64] + os: [linux] + + envio@2.32.12: + resolution: + { + integrity: sha512-bk9y/AjU+kYxO1a9c/jg8RFDrKKKWU0wCffnwtoXo7KGKmPDKq1WyNzVw6sTeboSfGB0i82hJ97WgSAwRAnR1Q==, + } + hasBin: true + + es-define-property@1.0.1: + resolution: + { + integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==, + } + engines: { node: ">= 0.4" } + + es-errors@1.3.0: + resolution: + { + integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==, + } + engines: { node: ">= 0.4" } + + es-object-atoms@1.1.1: + resolution: + { + integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==, + } + engines: { node: ">= 0.4" } + + escalade@3.2.0: + resolution: + { + integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==, + } + engines: { node: ">=6" } + + escape-html@1.0.3: + resolution: + { + integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==, + } + + escape-string-regexp@2.0.0: + resolution: + { + integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==, + } + engines: { node: ">=8" } + + etag@1.8.1: + resolution: + { + integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==, + } + engines: { node: ">= 0.6" } + + ethers@6.8.0: + resolution: + { + integrity: sha512-zrFbmQRlraM+cU5mE4CZTLBurZTs2gdp2ld0nG/f3ecBK+x6lZ69KSxBqZ4NjclxwfTxl5LeNufcBbMsTdY53Q==, + } + engines: { node: ">=14.0.0" } + + event-target-shim@5.0.1: + resolution: + { + integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==, + } + engines: { node: ">=6" } + + events@3.3.0: + resolution: + { + integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==, + } + engines: { node: ">=0.8.x" } + + eventsource-parser@3.0.6: + resolution: + { + integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==, + } + engines: { node: ">=18.0.0" } + + eventsource@4.1.0: + resolution: + { + integrity: sha512-2GuF51iuHX6A9xdTccMTsNb7VO0lHZihApxhvQzJB5A03DvHDd2FQepodbMaztPBmBcE/ox7o2gqaxGhYB9LhQ==, + } + engines: { node: ">=20.0.0" } + + express@4.19.2: + resolution: + { + integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==, + } + engines: { node: ">= 0.10.0" } + + fast-copy@3.0.2: + resolution: + { + integrity: sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ==, + } + + fast-deep-equal@3.1.3: + resolution: + { + integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==, + } + + fast-json-stable-stringify@2.1.0: + resolution: + { + integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==, + } + + fast-json-stringify@2.7.13: + resolution: + { + integrity: sha512-ar+hQ4+OIurUGjSJD1anvYSDcUflywhKjfxnsW4TBTD7+u0tJufv6DKRWoQk3vI6YBOWMoz0TQtfbe7dxbQmvA==, + } + engines: { node: ">= 10.0.0" } + + fast-redact@3.5.0: + resolution: + { + integrity: sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A==, + } + engines: { node: ">=6" } + + fast-safe-stringify@2.1.1: + resolution: + { + integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==, + } + + finalhandler@1.2.0: + resolution: + { + integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==, + } + engines: { node: ">= 0.8" } + + forwarded@0.2.0: + resolution: + { + integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==, + } + engines: { node: ">= 0.6" } + + fresh@0.5.2: + resolution: + { + integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==, + } + engines: { node: ">= 0.6" } + + fs.realpath@1.0.0: + resolution: + { + integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==, + } + + function-bind@1.1.2: + resolution: + { + integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==, + } + + get-caller-file@2.0.5: + resolution: + { + integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==, + } + engines: { node: 6.* || 8.* || >= 10.* } + + get-intrinsic@1.3.0: + resolution: + { + integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==, + } + engines: { node: ">= 0.4" } + + get-proto@1.0.1: + resolution: + { + integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==, + } + engines: { node: ">= 0.4" } + + glob@8.1.0: + resolution: + { + integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==, + } + engines: { node: ">=12" } + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me + + gopd@1.2.0: + resolution: + { + integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==, + } + engines: { node: ">= 0.4" } + + has-flag@4.0.0: + resolution: + { + integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==, + } + engines: { node: ">=8" } + + has-symbols@1.1.0: + resolution: + { + integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==, + } + engines: { node: ">= 0.4" } + + hasown@2.0.2: + resolution: + { + integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==, + } + engines: { node: ">= 0.4" } + + help-me@4.2.0: + resolution: + { + integrity: sha512-TAOnTB8Tz5Dw8penUuzHVrKNKlCIbwwbHnXraNJxPwf8LRtE2HlM84RYuezMFcwOJmoYOCWVDyJ8TQGxn9PgxA==, + } + + http-errors@2.0.0: + resolution: + { + integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==, + } + engines: { node: ">= 0.8" } + + iconv-lite@0.4.24: + resolution: + { + integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==, + } + engines: { node: ">=0.10.0" } + + ieee754@1.2.1: + resolution: + { + integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==, + } + + indent-string@4.0.0: + resolution: + { + integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==, + } + engines: { node: ">=8" } + + inflight@1.0.6: + resolution: + { + integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==, + } + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. + + inherits@2.0.4: + resolution: + { + integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==, + } + + ink-big-text@1.2.0: + resolution: + { + integrity: sha512-xDfn8oOhiji9c4wojTKSaBnEfgpTTd3KL7jsMYVht4SbpfLdSKvVZiMi3U5v45eSjLm1ycMmeMWAP1G99lWL5Q==, + } + engines: { node: ">=8" } + peerDependencies: + ink: ">=2.0.0" + react: ">=16.8.0" + + ink-spinner@4.0.3: + resolution: + { + integrity: sha512-uJ4nbH00MM9fjTJ5xdw0zzvtXMkeGb0WV6dzSWvFv2/+ks6FIhpkt+Ge/eLdh0Ah6Vjw5pLMyNfoHQpRDRVFbQ==, + } + engines: { node: ">=10" } + peerDependencies: + ink: ">=3.0.5" + react: ">=16.8.2" + + ink@3.2.0: + resolution: + { + integrity: sha512-firNp1q3xxTzoItj/eOOSZQnYSlyrWks5llCTVX37nJ59K3eXbQ8PtzCguqo8YI19EELo5QxaKnJd4VxzhU8tg==, + } + engines: { node: ">=10" } + peerDependencies: + "@types/react": ">=16.8.0" + react: ">=16.8.0" + peerDependenciesMeta: + "@types/react": + optional: true + + ipaddr.js@1.9.1: + resolution: + { + integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==, + } + engines: { node: ">= 0.10" } + + is-accessor-descriptor@1.0.1: + resolution: + { + integrity: sha512-YBUanLI8Yoihw923YeFUS5fs0fF2f5TSFTNiYAAzhhDscDa3lEqYuz1pDOEP5KvX94I9ey3vsqjJcLVFVU+3QA==, + } + engines: { node: ">= 0.10" } + + is-buffer@1.1.6: + resolution: + { + integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==, + } + + is-ci@2.0.0: + resolution: + { + integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==, + } + hasBin: true + + is-data-descriptor@1.0.1: + resolution: + { + integrity: sha512-bc4NlCDiCr28U4aEsQ3Qs2491gVq4V8G7MQyws968ImqjKuYtTJXrl7Vq7jsN7Ly/C3xj5KWFrY7sHNeDkAzXw==, + } + engines: { node: ">= 0.4" } + + is-descriptor@1.0.3: + resolution: + { + integrity: sha512-JCNNGbwWZEVaSPtS45mdtrneRWJFp07LLmykxeFV5F6oBvNF8vHSfJuJgoT472pSfk+Mf8VnlrspaFBHWM8JAw==, + } + engines: { node: ">= 0.4" } + + is-fullwidth-code-point@3.0.0: + resolution: + { + integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==, + } + engines: { node: ">=8" } + + is-number@3.0.0: + resolution: + { + integrity: sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==, + } + engines: { node: ">=0.10.0" } + + isows@1.0.4: + resolution: + { + integrity: sha512-hEzjY+x9u9hPmBom9IIAqdJCwNLax+xrPb51vEPpERoFlIxgmZcHzsT5jKG06nvInKOBGvReAVz80Umed5CczQ==, + } + peerDependencies: + ws: "*" + + joycon@3.1.1: + resolution: + { + integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==, + } + engines: { node: ">=10" } + + js-sdsl@4.4.2: + resolution: + { + integrity: sha512-dwXFwByc/ajSV6m5bcKAPwe4yDDF6D614pxmIi5odytzxRlwqF6nwoiCek80Ixc7Cvma5awClxrzFtxCQvcM8w==, + } + + js-tokens@4.0.0: + resolution: + { + integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, + } + + json-schema-traverse@0.4.1: + resolution: + { + integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==, + } + + kind-of@3.2.2: + resolution: + { + integrity: sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==, + } + engines: { node: ">=0.10.0" } + + lodash@4.18.1: + resolution: + { + integrity: sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==, + } + + loose-envify@1.4.0: + resolution: + { + integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==, + } + hasBin: true + + make-error@1.3.6: + resolution: + { + integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==, + } + + math-intrinsics@1.1.0: + resolution: + { + integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==, + } + engines: { node: ">= 0.4" } + + media-typer@0.3.0: + resolution: + { + integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==, + } + engines: { node: ">= 0.6" } + + merge-descriptors@1.0.1: + resolution: + { + integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==, + } + + methods@1.1.2: + resolution: + { + integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==, + } + engines: { node: ">= 0.6" } + + mime-db@1.52.0: + resolution: + { + integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==, + } + engines: { node: ">= 0.6" } + + mime-types@2.1.35: + resolution: + { + integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==, + } + engines: { node: ">= 0.6" } + + mime@1.6.0: + resolution: + { + integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==, + } + engines: { node: ">=4" } + hasBin: true + + mimic-fn@2.1.0: + resolution: + { + integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==, + } + engines: { node: ">=6" } + + minimatch@5.1.9: + resolution: + { + integrity: sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==, + } + engines: { node: ">=10" } + + minimist@1.2.8: + resolution: + { + integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==, + } + + ms@2.0.0: + resolution: + { + integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==, + } + + ms@2.1.3: + resolution: + { + integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, + } + + negotiator@0.6.3: + resolution: + { + integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==, + } + engines: { node: ">= 0.6" } + + object-assign@4.1.1: + resolution: + { + integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==, + } + engines: { node: ">=0.10.0" } + + object-inspect@1.13.4: + resolution: + { + integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==, + } + engines: { node: ">= 0.4" } + + on-exit-leak-free@2.1.2: + resolution: + { + integrity: sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==, + } + engines: { node: ">=14.0.0" } + + on-finished@2.4.1: + resolution: + { + integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==, + } + engines: { node: ">= 0.8" } + + once@1.4.0: + resolution: + { + integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==, + } + + onetime@5.1.2: + resolution: + { + integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==, + } + engines: { node: ">=6" } + + parseurl@1.3.3: + resolution: + { + integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==, + } + engines: { node: ">= 0.8" } + + patch-console@1.0.0: + resolution: + { + integrity: sha512-nxl9nrnLQmh64iTzMfyylSlRozL7kAXIaxw1fVcLYdyhNkJCRUzirRZTikXGJsg+hc4fqpneTK6iU2H1Q8THSA==, + } + engines: { node: ">=10" } + + path-to-regexp@0.1.7: + resolution: + { + integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==, + } + + pino-abstract-transport@1.1.0: + resolution: + { + integrity: sha512-lsleG3/2a/JIWUtf9Q5gUNErBqwIu1tUKTT3dUzaf5DySw9ra1wcqKjJjLX1VTY64Wk1eEOYsVGSaGfCK85ekA==, + } + + pino-abstract-transport@1.2.0: + resolution: + { + integrity: sha512-Guhh8EZfPCfH+PMXAb6rKOjGQEoy0xlAIn+irODG5kgfYV+BQ0rGYYWTIel3P5mmyXqkYkPmdIkywsn6QKUR1Q==, + } + + pino-pretty@10.2.3: + resolution: + { + integrity: sha512-4jfIUc8TC1GPUfDyMSlW1STeORqkoxec71yhxIpLDQapUu8WOuoz2TTCoidrIssyz78LZC69whBMPIKCMbi3cw==, + } + hasBin: true + + pino-std-serializers@6.2.2: + resolution: + { + integrity: sha512-cHjPPsE+vhj/tnhCy/wiMh3M3z3h/j15zHQX+S9GkTBgqJuTuJzYJ4gUyACLhDaJ7kk9ba9iRDmbH2tJU03OiA==, + } + + pino@8.16.1: + resolution: + { + integrity: sha512-3bKsVhBmgPjGV9pyn4fO/8RtoVDR8ssW1ev819FsRXlRNgW8gR/9Kx+gCK4UPWd4JjrRDLWpzd/pb1AyWm3MGA==, + } + hasBin: true + + postgres@3.4.1: + resolution: + { + integrity: sha512-Wasjv6WEzrZXbwKByR2RGD7MBfj7VBqco3hYWz8ifzSAp6tb2L6MlmcKFzkmgV1jT7/vKlcSa+lxXZeTdeVMzQ==, + } + engines: { node: ">=12" } + + process-warning@2.3.2: + resolution: + { + integrity: sha512-n9wh8tvBe5sFmsqlg+XQhaQLumwpqoAUruLwjCopgTmUBjJ/fjtBsJzKleCaIGBOMXYEhp1YfKl4d7rJ5ZKJGA==, + } + + process@0.11.10: + resolution: + { + integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==, + } + engines: { node: ">= 0.6.0" } + + prom-client@15.0.0: + resolution: + { + integrity: sha512-UocpgIrKyA2TKLVZDSfm8rGkL13C19YrQBAiG3xo3aDFWcHedxRxI3z+cIcucoxpSO0h5lff5iv/SXoxyeopeA==, + } + engines: { node: ^16 || ^18 || >=20 } + + prop-types@15.8.1: + resolution: + { + integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==, + } + + proxy-addr@2.0.7: + resolution: + { + integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==, + } + engines: { node: ">= 0.10" } + + pump@3.0.4: + resolution: + { + integrity: sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==, + } + + punycode@2.3.1: + resolution: + { + integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==, + } + engines: { node: ">=6" } + + qs@6.11.0: + resolution: + { + integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==, + } + engines: { node: ">=0.6" } + + quick-format-unescaped@4.0.4: + resolution: + { + integrity: sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==, + } + + range-parser@1.2.1: + resolution: + { + integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==, + } + engines: { node: ">= 0.6" } + + raw-body@2.5.2: + resolution: + { + integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==, + } + engines: { node: ">= 0.8" } + + react-devtools-core@4.28.5: + resolution: + { + integrity: sha512-cq/o30z9W2Wb4rzBefjv5fBalHU0rJGZCHAkf/RHSBWSSYwh8PlQTqqOJmgIIbBtpj27T6FIPXeomIjZtCNVqA==, + } + + react-dom@19.2.4: + resolution: + { + integrity: sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==, + } + peerDependencies: + react: ^19.2.4 + + react-is@16.13.1: + resolution: + { + integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==, + } + + react-reconciler@0.26.2: + resolution: + { + integrity: sha512-nK6kgY28HwrMNwDnMui3dvm3rCFjZrcGiuwLc5COUipBK5hWHLOxMJhSnSomirqWwjPBJKV1QcbkI0VJr7Gl1Q==, + } + engines: { node: ">=0.10.0" } + peerDependencies: + react: ^17.0.2 + + react@18.2.0: + resolution: + { + integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==, + } + engines: { node: ">=0.10.0" } + + readable-stream@3.6.2: + resolution: + { + integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==, + } + engines: { node: ">= 6" } + + readable-stream@4.7.0: + resolution: + { + integrity: sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + + real-require@0.2.0: + resolution: + { + integrity: sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==, + } + engines: { node: ">= 12.13.0" } + + require-directory@2.1.1: + resolution: + { + integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==, + } + engines: { node: ">=0.10.0" } + + rescript-envsafe@5.0.0: + resolution: + { + integrity: sha512-xSQbNsFSSQEynvLWUYtI7GJJhzicACLTq5aO1tjgK0N2Vcm9qlrkcLSmnU8tTohebEu9zgm1V/xYY+oGeQgLvA==, + } + peerDependencies: + rescript: 11.x + rescript-schema: 9.x + + rescript-schema@9.3.0: + resolution: + { + integrity: sha512-NiHAjlhFKZCmNhx/Ij40YltCEJJgVNhBWTN/ZfagTg5hdWWuvCiUacxZv+Q/QQolrAhTnHnCrL7RDvZBogHl5A==, + } + peerDependencies: + rescript: 11.x + peerDependenciesMeta: + rescript: + optional: true + + rescript@11.1.3: + resolution: + { + integrity: sha512-bI+yxDcwsv7qE34zLuXeO8Qkc2+1ng5ErlSjnUIZdrAWKoGzHXpJ6ZxiiRBUoYnoMsgRwhqvrugIFyNgWasmsw==, + } + engines: { node: ">=10" } + hasBin: true + + restore-cursor@3.1.0: + resolution: + { + integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==, + } + engines: { node: ">=8" } + + rfdc@1.4.1: + resolution: + { + integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==, + } + + safe-buffer@5.2.1: + resolution: + { + integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==, + } + + safe-stable-stringify@2.5.0: + resolution: + { + integrity: sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==, + } + engines: { node: ">=10" } + + safer-buffer@2.1.2: + resolution: + { + integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==, + } + + scheduler@0.20.2: + resolution: + { + integrity: sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==, + } + + scheduler@0.27.0: + resolution: + { + integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==, + } + + secure-json-parse@2.7.0: + resolution: + { + integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==, + } + + send@0.18.0: + resolution: + { + integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==, + } + engines: { node: ">= 0.8.0" } + + serve-static@1.15.0: + resolution: + { + integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==, + } + engines: { node: ">= 0.8.0" } + + setprototypeof@1.2.0: + resolution: + { + integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==, + } + + shell-quote@1.8.3: + resolution: + { + integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==, + } + engines: { node: ">= 0.4" } + + side-channel-list@1.0.0: + resolution: + { + integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==, + } + engines: { node: ">= 0.4" } + + side-channel-map@1.0.1: + resolution: + { + integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==, + } + engines: { node: ">= 0.4" } + + side-channel-weakmap@1.0.2: + resolution: + { + integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==, + } + engines: { node: ">= 0.4" } + + side-channel@1.1.0: + resolution: + { + integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==, + } + engines: { node: ">= 0.4" } + + signal-exit@3.0.7: + resolution: + { + integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==, + } + + slice-ansi@3.0.0: + resolution: + { + integrity: sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==, + } + engines: { node: ">=8" } + + sonic-boom@3.8.1: + resolution: + { + integrity: sha512-y4Z8LCDBuum+PBP3lSV7RHrXscqksve/bi0as7mhwVnBW+/wUqKT/2Kb7um8yqcFy0duYbbPxzt89Zy2nOCaxg==, + } + + split2@4.2.0: + resolution: + { + integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==, + } + engines: { node: ">= 10.x" } + + stack-utils@2.0.6: + resolution: + { + integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==, + } + engines: { node: ">=10" } + + statuses@2.0.1: + resolution: + { + integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==, + } + engines: { node: ">= 0.8" } + + string-similarity@4.0.4: + resolution: + { + integrity: sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==, + } + deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. + + string-width@4.2.3: + resolution: + { + integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==, + } + engines: { node: ">=8" } + + string_decoder@1.3.0: + resolution: + { + integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==, + } + + strip-ansi@6.0.1: + resolution: + { + integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==, + } + engines: { node: ">=8" } + + strip-json-comments@3.1.1: + resolution: + { + integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==, + } + engines: { node: ">=8" } + + supports-color@7.2.0: + resolution: + { + integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==, + } + engines: { node: ">=8" } + + tdigest@0.1.2: + resolution: + { + integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==, + } + + thread-stream@2.7.0: + resolution: + { + integrity: sha512-qQiRWsU/wvNolI6tbbCKd9iKaTnCXsTwVxhhKM6nctPdujTyztjlbUkUTUymidWcMnZ5pWR0ej4a0tjsW021vw==, + } + + toidentifier@1.0.1: + resolution: + { + integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==, + } + engines: { node: ">=0.6" } + + ts-node@10.9.1: + resolution: + { + integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==, + } + hasBin: true + peerDependencies: + "@swc/core": ">=1.2.50" + "@swc/wasm": ">=1.2.50" + "@types/node": "*" + typescript: ">=2.7" + peerDependenciesMeta: + "@swc/core": + optional: true + "@swc/wasm": + optional: true + + tslib@2.4.0: + resolution: + { + integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==, + } + + type-fest@0.12.0: + resolution: + { + integrity: sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==, + } + engines: { node: ">=10" } + + type-fest@0.21.3: + resolution: + { + integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==, + } + engines: { node: ">=10" } + + type-is@1.6.18: + resolution: + { + integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==, + } + engines: { node: ">= 0.6" } + + typescript@6.0.2: + resolution: + { + integrity: sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ==, + } + engines: { node: ">=14.17" } + hasBin: true + + unpipe@1.0.0: + resolution: + { + integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==, + } + engines: { node: ">= 0.8" } + + uri-js@4.4.1: + resolution: + { + integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==, + } + + util-deprecate@1.0.2: + resolution: + { + integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==, + } + + utils-merge@1.0.1: + resolution: + { + integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==, + } + engines: { node: ">= 0.4.0" } + + v8-compile-cache-lib@3.0.1: + resolution: + { + integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==, + } + + vary@1.1.2: + resolution: + { + integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==, + } + engines: { node: ">= 0.8" } + + viem@2.21.0: + resolution: + { + integrity: sha512-9g3Gw2nOU6t4bNuoDI5vwVExzIxseU0J7Jjx10gA2RNQVrytIrLxggW++tWEe3w4mnnm/pS1WgZFjQ/QKf/nHw==, + } + peerDependencies: + typescript: ">=5.0.4" + peerDependenciesMeta: + typescript: + optional: true + + webauthn-p256@0.0.5: + resolution: + { + integrity: sha512-drMGNWKdaixZNobeORVIqq7k5DsRC9FnG201K2QjeOoQLmtSDaSsVZdkg6n5jUALJKcAG++zBPJXmv6hy0nWFg==, + } + + widest-line@3.1.0: + resolution: + { + integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==, + } + engines: { node: ">=8" } + + window-size@1.1.1: + resolution: + { + integrity: sha512-5D/9vujkmVQ7pSmc0SCBmHXbkv6eaHwXEx65MywhmUMsI8sGqJ972APq1lotfcwMKPFLuCFfL8xGHLIp7jaBmA==, + } + engines: { node: ">= 0.10.0" } + hasBin: true + + wrap-ansi@6.2.0: + resolution: + { + integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==, + } + engines: { node: ">=8" } + + wrap-ansi@7.0.0: + resolution: + { + integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==, + } + engines: { node: ">=10" } + + wrappy@1.0.2: + resolution: + { + integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==, + } + + ws@7.5.10: + resolution: + { + integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==, + } + engines: { node: ">=8.3.0" } + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@8.17.1: + resolution: + { + integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==, + } + engines: { node: ">=10.0.0" } + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ">=5.0.2" + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@8.5.0: + resolution: + { + integrity: sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg==, + } + engines: { node: ">=10.0.0" } + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + y18n@5.0.8: + resolution: + { + integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==, + } + engines: { node: ">=10" } + + yargs-parser@21.1.1: + resolution: + { + integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==, + } + engines: { node: ">=12" } + + yargs@17.7.2: + resolution: + { + integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==, + } + engines: { node: ">=12" } + + yn@3.1.1: + resolution: + { + integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==, + } + engines: { node: ">=6" } + + yoga-layout-prebuilt@1.10.0: + resolution: + { + integrity: sha512-YnOmtSbv4MTf7RGJMK0FvZ+KD8OEe/J5BNnR0GHhD8J/XcG/Qvxgszm0Un6FTHWW4uHlTgP0IztiXQnGyIR45g==, + } + engines: { node: ">=8" } + +snapshots: + "@adraffy/ens-normalize@1.10.0": {} + + "@cspotcode/source-map-support@0.8.1": + dependencies: + "@jridgewell/trace-mapping": 0.3.9 + + "@elastic/ecs-helpers@1.1.0": + dependencies: + fast-json-stringify: 2.7.13 + + "@elastic/ecs-pino-format@1.4.0": + dependencies: + "@elastic/ecs-helpers": 1.1.0 + + "@envio-dev/hyperfuel-client-darwin-arm64@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-darwin-x64@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-linux-arm64-gnu@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-linux-x64-gnu@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-linux-x64-musl@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client-win32-x64-msvc@1.2.2": + optional: true + + "@envio-dev/hyperfuel-client@1.2.2": + optionalDependencies: + "@envio-dev/hyperfuel-client-darwin-arm64": 1.2.2 + "@envio-dev/hyperfuel-client-darwin-x64": 1.2.2 + "@envio-dev/hyperfuel-client-linux-arm64-gnu": 1.2.2 + "@envio-dev/hyperfuel-client-linux-x64-gnu": 1.2.2 + "@envio-dev/hyperfuel-client-linux-x64-musl": 1.2.2 + "@envio-dev/hyperfuel-client-win32-x64-msvc": 1.2.2 + + "@envio-dev/hypersync-client-darwin-arm64@1.3.0": + optional: true + + "@envio-dev/hypersync-client-darwin-x64@1.3.0": + optional: true + + "@envio-dev/hypersync-client-linux-arm64-gnu@1.3.0": + optional: true + + "@envio-dev/hypersync-client-linux-x64-gnu@1.3.0": + optional: true + + "@envio-dev/hypersync-client-linux-x64-musl@1.3.0": + optional: true + + "@envio-dev/hypersync-client@1.3.0": + optionalDependencies: + "@envio-dev/hypersync-client-darwin-arm64": 1.3.0 + "@envio-dev/hypersync-client-darwin-x64": 1.3.0 + "@envio-dev/hypersync-client-linux-arm64-gnu": 1.3.0 + "@envio-dev/hypersync-client-linux-x64-gnu": 1.3.0 + "@envio-dev/hypersync-client-linux-x64-musl": 1.3.0 + + "@jridgewell/resolve-uri@3.1.2": {} + + "@jridgewell/sourcemap-codec@1.5.5": {} + + "@jridgewell/trace-mapping@0.3.9": + dependencies: + "@jridgewell/resolve-uri": 3.1.2 + "@jridgewell/sourcemap-codec": 1.5.5 + + "@noble/curves@1.2.0": + dependencies: + "@noble/hashes": 1.3.2 + + "@noble/curves@1.4.0": + dependencies: + "@noble/hashes": 1.4.0 + + "@noble/hashes@1.3.2": {} + + "@noble/hashes@1.4.0": {} + + "@opentelemetry/api@1.9.1": {} + + "@rescript/react@0.12.1(react-dom@19.2.4(react@18.2.0))(react@18.2.0)": + dependencies: + react: 18.2.0 + react-dom: 19.2.4(react@18.2.0) + + "@scure/base@1.1.9": {} + + "@scure/bip32@1.4.0": + dependencies: + "@noble/curves": 1.4.0 + "@noble/hashes": 1.4.0 + "@scure/base": 1.1.9 + + "@scure/bip39@1.3.0": + dependencies: + "@noble/hashes": 1.4.0 + "@scure/base": 1.1.9 + + "@tsconfig/node10@1.0.12": {} + + "@tsconfig/node12@1.0.11": {} + + "@tsconfig/node14@1.0.3": {} + + "@tsconfig/node16@1.0.4": {} + + "@types/node@18.15.13": {} + + "@types/yoga-layout@1.9.2": {} + + abitype@1.0.5(typescript@6.0.2): + optionalDependencies: + typescript: 6.0.2 + + abort-controller@3.0.0: + dependencies: + event-target-shim: 5.0.1 + + accepts@1.3.8: + dependencies: + mime-types: 2.1.35 + negotiator: 0.6.3 + + acorn-walk@8.3.5: + dependencies: + acorn: 8.16.0 + + acorn@8.16.0: {} + + aes-js@4.0.0-beta.5: {} + + ajv@6.14.0: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + + ansi-regex@5.0.1: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + arg@4.1.3: {} + + array-flatten@1.1.1: {} + + astral-regex@2.0.0: {} + + atomic-sleep@1.0.0: {} + + auto-bind@4.0.0: {} + + balanced-match@1.0.2: {} + + base64-js@1.5.1: {} + + bignumber.js@9.1.2: {} + + bintrees@1.0.2: {} + + body-parser@1.20.2: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + on-finished: 2.4.1 + qs: 6.11.0 + raw-body: 2.5.2 + type-is: 1.6.18 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + + brace-expansion@2.0.3: + dependencies: + balanced-match: 1.0.2 + + buffer@6.0.3: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + bytes@3.1.2: {} + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + call-bound@1.0.4: + dependencies: + call-bind-apply-helpers: 1.0.2 + get-intrinsic: 1.3.0 + + cfonts@2.10.1: + dependencies: + chalk: 4.1.2 + window-size: 1.1.1 + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + ci-info@2.0.0: {} + + cli-boxes@2.2.1: {} + + cli-cursor@3.1.0: + dependencies: + restore-cursor: 3.1.0 + + cli-spinners@2.9.2: {} + + cli-truncate@2.1.0: + dependencies: + slice-ansi: 3.0.0 + string-width: 4.2.3 + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + code-excerpt@3.0.0: + dependencies: + convert-to-spaces: 1.0.2 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + colorette@2.0.20: {} + + content-disposition@0.5.4: + dependencies: + safe-buffer: 5.2.1 + + content-type@1.0.5: {} + + convert-to-spaces@1.0.2: {} + + cookie-signature@1.0.6: {} + + cookie@0.6.0: {} + + create-require@1.1.1: {} + + date-fns@3.3.1: {} + + dateformat@4.6.3: {} + + debug@2.6.9: + dependencies: + ms: 2.0.0 + + deepmerge@4.3.1: {} + + define-property@1.0.0: + dependencies: + is-descriptor: 1.0.3 + + depd@2.0.0: {} + + destroy@1.2.0: {} + + diff@4.0.4: {} + + dotenv@16.4.5: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + ee-first@1.1.1: {} + + emoji-regex@8.0.0: {} + + encodeurl@1.0.2: {} + + end-of-stream@1.4.5: + dependencies: + once: 1.4.0 + + envio-darwin-arm64@2.32.12: + optional: true + + envio-darwin-x64@2.32.12: + optional: true + + envio-linux-arm64@2.32.12: + optional: true + + envio-linux-x64@2.32.12: + optional: true + + envio@2.32.12(typescript@6.0.2): + dependencies: + "@elastic/ecs-pino-format": 1.4.0 + "@envio-dev/hyperfuel-client": 1.2.2 + "@envio-dev/hypersync-client": 1.3.0 + bignumber.js: 9.1.2 + eventsource: 4.1.0 + pino: 8.16.1 + pino-pretty: 10.2.3 + prom-client: 15.0.0 + rescript: 11.1.3 + rescript-schema: 9.3.0(rescript@11.1.3) + viem: 2.21.0(typescript@6.0.2) + optionalDependencies: + envio-darwin-arm64: 2.32.12 + envio-darwin-x64: 2.32.12 + envio-linux-arm64: 2.32.12 + envio-linux-x64: 2.32.12 + transitivePeerDependencies: + - bufferutil + - typescript + - utf-8-validate + - zod + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + escalade@3.2.0: {} + + escape-html@1.0.3: {} + + escape-string-regexp@2.0.0: {} + + etag@1.8.1: {} + + ethers@6.8.0: + dependencies: + "@adraffy/ens-normalize": 1.10.0 + "@noble/curves": 1.2.0 + "@noble/hashes": 1.3.2 + "@types/node": 18.15.13 + aes-js: 4.0.0-beta.5 + tslib: 2.4.0 + ws: 8.5.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + event-target-shim@5.0.1: {} + + events@3.3.0: {} + + eventsource-parser@3.0.6: {} + + eventsource@4.1.0: + dependencies: + eventsource-parser: 3.0.6 + + express@4.19.2: + dependencies: + accepts: 1.3.8 + array-flatten: 1.1.1 + body-parser: 1.20.2 + content-disposition: 0.5.4 + content-type: 1.0.5 + cookie: 0.6.0 + cookie-signature: 1.0.6 + debug: 2.6.9 + depd: 2.0.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 1.2.0 + fresh: 0.5.2 + http-errors: 2.0.0 + merge-descriptors: 1.0.1 + methods: 1.1.2 + on-finished: 2.4.1 + parseurl: 1.3.3 + path-to-regexp: 0.1.7 + proxy-addr: 2.0.7 + qs: 6.11.0 + range-parser: 1.2.1 + safe-buffer: 5.2.1 + send: 0.18.0 + serve-static: 1.15.0 + setprototypeof: 1.2.0 + statuses: 2.0.1 + type-is: 1.6.18 + utils-merge: 1.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + + fast-copy@3.0.2: {} + + fast-deep-equal@3.1.3: {} + + fast-json-stable-stringify@2.1.0: {} + + fast-json-stringify@2.7.13: + dependencies: + ajv: 6.14.0 + deepmerge: 4.3.1 + rfdc: 1.4.1 + string-similarity: 4.0.4 + + fast-redact@3.5.0: {} + + fast-safe-stringify@2.1.1: {} + + finalhandler@1.2.0: + dependencies: + debug: 2.6.9 + encodeurl: 1.0.2 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.1 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + + forwarded@0.2.0: {} + + fresh@0.5.2: {} + + fs.realpath@1.0.0: {} + + function-bind@1.1.2: {} + + get-caller-file@2.0.5: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + glob@8.1.0: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 5.1.9 + once: 1.4.0 + + gopd@1.2.0: {} + + has-flag@4.0.0: {} + + has-symbols@1.1.0: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + help-me@4.2.0: + dependencies: + glob: 8.1.0 + readable-stream: 3.6.2 + + http-errors@2.0.0: + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.1 + toidentifier: 1.0.1 + + iconv-lite@0.4.24: + dependencies: + safer-buffer: 2.1.2 + + ieee754@1.2.1: {} + + indent-string@4.0.0: {} + + inflight@1.0.6: + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + inherits@2.0.4: {} + + ink-big-text@1.2.0(ink@3.2.0(react@18.2.0))(react@18.2.0): + dependencies: + cfonts: 2.10.1 + ink: 3.2.0(react@18.2.0) + prop-types: 15.8.1 + react: 18.2.0 + + ink-spinner@4.0.3(ink@3.2.0(react@18.2.0))(react@18.2.0): + dependencies: + cli-spinners: 2.9.2 + ink: 3.2.0(react@18.2.0) + react: 18.2.0 + + ink@3.2.0(react@18.2.0): + dependencies: + ansi-escapes: 4.3.2 + auto-bind: 4.0.0 + chalk: 4.1.2 + cli-boxes: 2.2.1 + cli-cursor: 3.1.0 + cli-truncate: 2.1.0 + code-excerpt: 3.0.0 + indent-string: 4.0.0 + is-ci: 2.0.0 + lodash: 4.18.1 + patch-console: 1.0.0 + react: 18.2.0 + react-devtools-core: 4.28.5 + react-reconciler: 0.26.2(react@18.2.0) + scheduler: 0.20.2 + signal-exit: 3.0.7 + slice-ansi: 3.0.0 + stack-utils: 2.0.6 + string-width: 4.2.3 + type-fest: 0.12.0 + widest-line: 3.1.0 + wrap-ansi: 6.2.0 + ws: 7.5.10 + yoga-layout-prebuilt: 1.10.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + ipaddr.js@1.9.1: {} + + is-accessor-descriptor@1.0.1: + dependencies: + hasown: 2.0.2 + + is-buffer@1.1.6: {} + + is-ci@2.0.0: + dependencies: + ci-info: 2.0.0 + + is-data-descriptor@1.0.1: + dependencies: + hasown: 2.0.2 + + is-descriptor@1.0.3: + dependencies: + is-accessor-descriptor: 1.0.1 + is-data-descriptor: 1.0.1 + + is-fullwidth-code-point@3.0.0: {} + + is-number@3.0.0: + dependencies: + kind-of: 3.2.2 + + isows@1.0.4(ws@8.17.1): + dependencies: + ws: 8.17.1 + + joycon@3.1.1: {} + + js-sdsl@4.4.2: {} + + js-tokens@4.0.0: {} + + json-schema-traverse@0.4.1: {} + + kind-of@3.2.2: + dependencies: + is-buffer: 1.1.6 + + lodash@4.18.1: {} + + loose-envify@1.4.0: + dependencies: + js-tokens: 4.0.0 + + make-error@1.3.6: {} + + math-intrinsics@1.1.0: {} + + media-typer@0.3.0: {} + + merge-descriptors@1.0.1: {} + + methods@1.1.2: {} + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + mime@1.6.0: {} + + mimic-fn@2.1.0: {} + + minimatch@5.1.9: + dependencies: + brace-expansion: 2.0.3 + + minimist@1.2.8: {} + + ms@2.0.0: {} + + ms@2.1.3: {} + + negotiator@0.6.3: {} + + object-assign@4.1.1: {} + + object-inspect@1.13.4: {} + + on-exit-leak-free@2.1.2: {} + + on-finished@2.4.1: + dependencies: + ee-first: 1.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + parseurl@1.3.3: {} + + patch-console@1.0.0: {} + + path-to-regexp@0.1.7: {} + + pino-abstract-transport@1.1.0: + dependencies: + readable-stream: 4.7.0 + split2: 4.2.0 + + pino-abstract-transport@1.2.0: + dependencies: + readable-stream: 4.7.0 + split2: 4.2.0 + + pino-pretty@10.2.3: + dependencies: + colorette: 2.0.20 + dateformat: 4.6.3 + fast-copy: 3.0.2 + fast-safe-stringify: 2.1.1 + help-me: 4.2.0 + joycon: 3.1.1 + minimist: 1.2.8 + on-exit-leak-free: 2.1.2 + pino-abstract-transport: 1.2.0 + pump: 3.0.4 + readable-stream: 4.7.0 + secure-json-parse: 2.7.0 + sonic-boom: 3.8.1 + strip-json-comments: 3.1.1 + + pino-std-serializers@6.2.2: {} + + pino@8.16.1: + dependencies: + atomic-sleep: 1.0.0 + fast-redact: 3.5.0 + on-exit-leak-free: 2.1.2 + pino-abstract-transport: 1.1.0 + pino-std-serializers: 6.2.2 + process-warning: 2.3.2 + quick-format-unescaped: 4.0.4 + real-require: 0.2.0 + safe-stable-stringify: 2.5.0 + sonic-boom: 3.8.1 + thread-stream: 2.7.0 + + postgres@3.4.1: {} + + process-warning@2.3.2: {} + + process@0.11.10: {} + + prom-client@15.0.0: + dependencies: + "@opentelemetry/api": 1.9.1 + tdigest: 0.1.2 + + prop-types@15.8.1: + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + + proxy-addr@2.0.7: + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + + pump@3.0.4: + dependencies: + end-of-stream: 1.4.5 + once: 1.4.0 + + punycode@2.3.1: {} + + qs@6.11.0: + dependencies: + side-channel: 1.1.0 + + quick-format-unescaped@4.0.4: {} + + range-parser@1.2.1: {} + + raw-body@2.5.2: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + unpipe: 1.0.0 + + react-devtools-core@4.28.5: + dependencies: + shell-quote: 1.8.3 + ws: 7.5.10 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + react-dom@19.2.4(react@18.2.0): + dependencies: + react: 18.2.0 + scheduler: 0.27.0 + + react-is@16.13.1: {} + + react-reconciler@0.26.2(react@18.2.0): + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react: 18.2.0 + scheduler: 0.20.2 + + react@18.2.0: + dependencies: + loose-envify: 1.4.0 + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readable-stream@4.7.0: + dependencies: + abort-controller: 3.0.0 + buffer: 6.0.3 + events: 3.3.0 + process: 0.11.10 + string_decoder: 1.3.0 + + real-require@0.2.0: {} + + require-directory@2.1.1: {} + + rescript-envsafe@5.0.0(rescript-schema@9.3.0(rescript@11.1.3))(rescript@11.1.3): + dependencies: + rescript: 11.1.3 + rescript-schema: 9.3.0(rescript@11.1.3) + + rescript-schema@9.3.0(rescript@11.1.3): + optionalDependencies: + rescript: 11.1.3 + + rescript@11.1.3: {} + + restore-cursor@3.1.0: + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + + rfdc@1.4.1: {} + + safe-buffer@5.2.1: {} + + safe-stable-stringify@2.5.0: {} + + safer-buffer@2.1.2: {} + + scheduler@0.20.2: + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + + scheduler@0.27.0: {} + + secure-json-parse@2.7.0: {} + + send@0.18.0: + dependencies: + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.0 + mime: 1.6.0 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + + serve-static@1.15.0: + dependencies: + encodeurl: 1.0.2 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 0.18.0 + transitivePeerDependencies: + - supports-color + + setprototypeof@1.2.0: {} + + shell-quote@1.8.3: {} + + side-channel-list@1.0.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + + side-channel-map@1.0.1: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + + side-channel-weakmap@1.0.2: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + side-channel-map: 1.0.1 + + side-channel@1.1.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + side-channel-list: 1.0.0 + side-channel-map: 1.0.1 + side-channel-weakmap: 1.0.2 + + signal-exit@3.0.7: {} + + slice-ansi@3.0.0: + dependencies: + ansi-styles: 4.3.0 + astral-regex: 2.0.0 + is-fullwidth-code-point: 3.0.0 + + sonic-boom@3.8.1: + dependencies: + atomic-sleep: 1.0.0 + + split2@4.2.0: {} + + stack-utils@2.0.6: + dependencies: + escape-string-regexp: 2.0.0 + + statuses@2.0.1: {} + + string-similarity@4.0.4: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-json-comments@3.1.1: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + tdigest@0.1.2: + dependencies: + bintrees: 1.0.2 + + thread-stream@2.7.0: + dependencies: + real-require: 0.2.0 + + toidentifier@1.0.1: {} + + ts-node@10.9.1(@types/node@18.15.13)(typescript@6.0.2): + dependencies: + "@cspotcode/source-map-support": 0.8.1 + "@tsconfig/node10": 1.0.12 + "@tsconfig/node12": 1.0.11 + "@tsconfig/node14": 1.0.3 + "@tsconfig/node16": 1.0.4 + "@types/node": 18.15.13 + acorn: 8.16.0 + acorn-walk: 8.3.5 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.4 + make-error: 1.3.6 + typescript: 6.0.2 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + + tslib@2.4.0: {} + + type-fest@0.12.0: {} + + type-fest@0.21.3: {} + + type-is@1.6.18: + dependencies: + media-typer: 0.3.0 + mime-types: 2.1.35 + + typescript@6.0.2: {} + + unpipe@1.0.0: {} + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + util-deprecate@1.0.2: {} + + utils-merge@1.0.1: {} + + v8-compile-cache-lib@3.0.1: {} + + vary@1.1.2: {} + + viem@2.21.0(typescript@6.0.2): + dependencies: + "@adraffy/ens-normalize": 1.10.0 + "@noble/curves": 1.4.0 + "@noble/hashes": 1.4.0 + "@scure/bip32": 1.4.0 + "@scure/bip39": 1.3.0 + abitype: 1.0.5(typescript@6.0.2) + isows: 1.0.4(ws@8.17.1) + webauthn-p256: 0.0.5 + ws: 8.17.1 + optionalDependencies: + typescript: 6.0.2 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + - zod + + webauthn-p256@0.0.5: + dependencies: + "@noble/curves": 1.4.0 + "@noble/hashes": 1.4.0 + + widest-line@3.1.0: + dependencies: + string-width: 4.2.3 + + window-size@1.1.1: + dependencies: + define-property: 1.0.0 + is-number: 3.0.0 + + wrap-ansi@6.2.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrappy@1.0.2: {} + + ws@7.5.10: {} + + ws@8.17.1: {} + + ws@8.5.0: {} + + y18n@5.0.8: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + + yn@3.1.1: {} + + yoga-layout-prebuilt@1.10.0: + dependencies: + "@types/yoga-layout": 1.9.2 diff --git a/apps/hypersync-indexer/generated/rescript.json b/apps/hypersync-indexer/generated/rescript.json new file mode 100644 index 000000000..e99f4ead7 --- /dev/null +++ b/apps/hypersync-indexer/generated/rescript.json @@ -0,0 +1,35 @@ +{ + "name": "generated", + "version": "0.1.0", + "sources": [ + { + "dir": "src", + "subdirs": true + } + ], + "gentypeconfig": { + "shims": { + "Js": "Js" + }, + "generatedFileExtension": ".gen.ts", + "debug": { + "all": false, + "basic": false + } + }, + "package-specs": { + "module": "commonjs", + "in-source": true + }, + "jsx": { + "version": 4 + }, + "suffix": ".res.js", + "bs-dependencies": [ + "rescript-envsafe", + "rescript-schema", + "@rescript/react", + "envio" + ], + "bsc-flags": ["-open RescriptSchema"] +} diff --git a/apps/hypersync-indexer/generated/schema.graphql b/apps/hypersync-indexer/generated/schema.graphql new file mode 100644 index 000000000..f7245e50e --- /dev/null +++ b/apps/hypersync-indexer/generated/schema.graphql @@ -0,0 +1,1258 @@ +""" +The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). +""" +scalar JSON + +scalar BigInt + +type PageInfo { + hasNextPage: Boolean! + hasPreviousPage: Boolean! + startCursor: String + endCursor: String +} + +type Meta { + status: JSON +} + +type Query { + token(id: String!): token + tokens(where: tokenFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): tokenPage! + account(id: String!): account + accounts(where: accountFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountPage! + accountBalance(accountId: String!, tokenId: String!): accountBalance + accountBalances(where: accountBalanceFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountBalancePage! + accountPower(accountId: String!): accountPower + accountPowers(where: accountPowerFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountPowerPage! + votingPowerHistory(transactionHash: String!, accountId: String!, logIndex: Float!): votingPowerHistory + votingPowerHistorys(where: votingPowerHistoryFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): votingPowerHistoryPage! + balanceHistory(transactionHash: String!, accountId: String!, logIndex: Float!): balanceHistory + balanceHistorys(where: balanceHistoryFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): balanceHistoryPage! + delegation(transactionHash: String!, delegatorAccountId: String!, delegateAccountId: String!): delegation + delegations(where: delegationFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): delegationPage! + transfer(transactionHash: String!, fromAccountId: String!, toAccountId: String!): transfer + transfers(where: transferFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transferPage! + votesOnchain(voterAccountId: String!, proposalId: String!): votesOnchain + votesOnchains(where: votesOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): votesOnchainPage! + proposalsOnchain(id: String!): proposalsOnchain + proposalsOnchains(where: proposalsOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): proposalsOnchainPage! + daoMetricsDayBucket(date: BigInt!, tokenId: String!, metricType: String!): daoMetricsDayBucket + daoMetricsDayBuckets(where: daoMetricsDayBucketFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): daoMetricsDayBucketPage! + transaction(transactionHash: String!): transaction + transactions(where: transactionFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transactionPage! + tokenPrice(timestamp: BigInt!): tokenPrice + tokenPrices(where: tokenPriceFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): tokenPricePage! + _meta: Meta +} + +type token { + id: String! + name: String + decimals: Int! + totalSupply: BigInt! + delegatedSupply: BigInt! + cexSupply: BigInt! + dexSupply: BigInt! + lendingSupply: BigInt! + circulatingSupply: BigInt! + treasury: BigInt! +} + +type tokenPage { + items: [token!]! + pageInfo: PageInfo! + totalCount: Int! +} + +input tokenFilter { + AND: [tokenFilter] + OR: [tokenFilter] + id: String + id_not: String + id_in: [String] + id_not_in: [String] + id_contains: String + id_not_contains: String + id_starts_with: String + id_ends_with: String + id_not_starts_with: String + id_not_ends_with: String + name: String + name_not: String + name_in: [String] + name_not_in: [String] + name_contains: String + name_not_contains: String + name_starts_with: String + name_ends_with: String + name_not_starts_with: String + name_not_ends_with: String + decimals: Int + decimals_not: Int + decimals_in: [Int] + decimals_not_in: [Int] + decimals_gt: Int + decimals_lt: Int + decimals_gte: Int + decimals_lte: Int + totalSupply: BigInt + totalSupply_not: BigInt + totalSupply_in: [BigInt] + totalSupply_not_in: [BigInt] + totalSupply_gt: BigInt + totalSupply_lt: BigInt + totalSupply_gte: BigInt + totalSupply_lte: BigInt + delegatedSupply: BigInt + delegatedSupply_not: BigInt + delegatedSupply_in: [BigInt] + delegatedSupply_not_in: [BigInt] + delegatedSupply_gt: BigInt + delegatedSupply_lt: BigInt + delegatedSupply_gte: BigInt + delegatedSupply_lte: BigInt + cexSupply: BigInt + cexSupply_not: BigInt + cexSupply_in: [BigInt] + cexSupply_not_in: [BigInt] + cexSupply_gt: BigInt + cexSupply_lt: BigInt + cexSupply_gte: BigInt + cexSupply_lte: BigInt + dexSupply: BigInt + dexSupply_not: BigInt + dexSupply_in: [BigInt] + dexSupply_not_in: [BigInt] + dexSupply_gt: BigInt + dexSupply_lt: BigInt + dexSupply_gte: BigInt + dexSupply_lte: BigInt + lendingSupply: BigInt + lendingSupply_not: BigInt + lendingSupply_in: [BigInt] + lendingSupply_not_in: [BigInt] + lendingSupply_gt: BigInt + lendingSupply_lt: BigInt + lendingSupply_gte: BigInt + lendingSupply_lte: BigInt + circulatingSupply: BigInt + circulatingSupply_not: BigInt + circulatingSupply_in: [BigInt] + circulatingSupply_not_in: [BigInt] + circulatingSupply_gt: BigInt + circulatingSupply_lt: BigInt + circulatingSupply_gte: BigInt + circulatingSupply_lte: BigInt + treasury: BigInt + treasury_not: BigInt + treasury_in: [BigInt] + treasury_not_in: [BigInt] + treasury_gt: BigInt + treasury_lt: BigInt + treasury_gte: BigInt + treasury_lte: BigInt +} + +type account { + id: String! + balances(where: accountBalanceFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountBalancePage + powers(where: accountPowerFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountPowerPage + delegationsFrom(where: delegationFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): delegationPage + delegationsTo(where: delegationFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): delegationPage + sentTransfers(where: transferFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transferPage + receivedTransfers(where: transferFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transferPage + proposals(where: proposalsOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): proposalsOnchainPage + votes(where: votesOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): votesOnchainPage + delegatedFromBalances(where: accountBalanceFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountBalancePage +} + +type accountBalancePage { + items: [accountBalance!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type accountBalance { + accountId: String! + tokenId: String! + balance: BigInt! + delegate: String! + delegatePower: accountPower + account: account + delegateAccount: account + delegatedTo: accountPower + token: token +} + +type accountPower { + accountId: String! + daoId: String! + votingPower: BigInt! + votesCount: Int! + proposalsCount: Int! + delegationsCount: Int! + lastVoteTimestamp: BigInt! + account: account +} + +input accountBalanceFilter { + AND: [accountBalanceFilter] + OR: [accountBalanceFilter] + accountId: String + accountId_not: String + accountId_in: [String] + accountId_not_in: [String] + accountId_contains: String + accountId_not_contains: String + accountId_starts_with: String + accountId_ends_with: String + accountId_not_starts_with: String + accountId_not_ends_with: String + tokenId: String + tokenId_not: String + tokenId_in: [String] + tokenId_not_in: [String] + tokenId_contains: String + tokenId_not_contains: String + tokenId_starts_with: String + tokenId_ends_with: String + tokenId_not_starts_with: String + tokenId_not_ends_with: String + balance: BigInt + balance_not: BigInt + balance_in: [BigInt] + balance_not_in: [BigInt] + balance_gt: BigInt + balance_lt: BigInt + balance_gte: BigInt + balance_lte: BigInt + delegate: String + delegate_not: String + delegate_in: [String] + delegate_not_in: [String] + delegate_contains: String + delegate_not_contains: String + delegate_starts_with: String + delegate_ends_with: String + delegate_not_starts_with: String + delegate_not_ends_with: String +} + +type accountPowerPage { + items: [accountPower!]! + pageInfo: PageInfo! + totalCount: Int! +} + +input accountPowerFilter { + AND: [accountPowerFilter] + OR: [accountPowerFilter] + accountId: String + accountId_not: String + accountId_in: [String] + accountId_not_in: [String] + accountId_contains: String + accountId_not_contains: String + accountId_starts_with: String + accountId_ends_with: String + accountId_not_starts_with: String + accountId_not_ends_with: String + daoId: String + daoId_not: String + daoId_in: [String] + daoId_not_in: [String] + daoId_contains: String + daoId_not_contains: String + daoId_starts_with: String + daoId_ends_with: String + daoId_not_starts_with: String + daoId_not_ends_with: String + votingPower: BigInt + votingPower_not: BigInt + votingPower_in: [BigInt] + votingPower_not_in: [BigInt] + votingPower_gt: BigInt + votingPower_lt: BigInt + votingPower_gte: BigInt + votingPower_lte: BigInt + votesCount: Int + votesCount_not: Int + votesCount_in: [Int] + votesCount_not_in: [Int] + votesCount_gt: Int + votesCount_lt: Int + votesCount_gte: Int + votesCount_lte: Int + proposalsCount: Int + proposalsCount_not: Int + proposalsCount_in: [Int] + proposalsCount_not_in: [Int] + proposalsCount_gt: Int + proposalsCount_lt: Int + proposalsCount_gte: Int + proposalsCount_lte: Int + delegationsCount: Int + delegationsCount_not: Int + delegationsCount_in: [Int] + delegationsCount_not_in: [Int] + delegationsCount_gt: Int + delegationsCount_lt: Int + delegationsCount_gte: Int + delegationsCount_lte: Int + lastVoteTimestamp: BigInt + lastVoteTimestamp_not: BigInt + lastVoteTimestamp_in: [BigInt] + lastVoteTimestamp_not_in: [BigInt] + lastVoteTimestamp_gt: BigInt + lastVoteTimestamp_lt: BigInt + lastVoteTimestamp_gte: BigInt + lastVoteTimestamp_lte: BigInt +} + +type delegationPage { + items: [delegation!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type delegation { + transactionHash: String! + daoId: String! + delegateAccountId: String! + delegatorAccountId: String! + delegatedValue: BigInt! + previousDelegate: String + timestamp: BigInt! + logIndex: Int! + isCex: Boolean! + isDex: Boolean! + isLending: Boolean! + isTotal: Boolean! + delegate: account + delegator: account + transaction: transaction +} + +type transaction { + transactionHash: String! + fromAddress: String + toAddress: String + isCex: Boolean! + isDex: Boolean! + isLending: Boolean! + isTotal: Boolean! + timestamp: BigInt! + transfers(where: transferFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transferPage + delegations(where: delegationFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): delegationPage +} + +type transferPage { + items: [transfer!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type transfer { + transactionHash: String! + daoId: String! + tokenId: String! + amount: BigInt! + fromAccountId: String! + toAccountId: String! + timestamp: BigInt! + logIndex: Int! + isCex: Boolean! + isDex: Boolean! + isLending: Boolean! + isTotal: Boolean! + from: account + to: account + token: token + transaction: transaction +} + +input transferFilter { + AND: [transferFilter] + OR: [transferFilter] + transactionHash: String + transactionHash_not: String + transactionHash_in: [String] + transactionHash_not_in: [String] + transactionHash_contains: String + transactionHash_not_contains: String + transactionHash_starts_with: String + transactionHash_ends_with: String + transactionHash_not_starts_with: String + transactionHash_not_ends_with: String + daoId: String + daoId_not: String + daoId_in: [String] + daoId_not_in: [String] + daoId_contains: String + daoId_not_contains: String + daoId_starts_with: String + daoId_ends_with: String + daoId_not_starts_with: String + daoId_not_ends_with: String + tokenId: String + tokenId_not: String + tokenId_in: [String] + tokenId_not_in: [String] + tokenId_contains: String + tokenId_not_contains: String + tokenId_starts_with: String + tokenId_ends_with: String + tokenId_not_starts_with: String + tokenId_not_ends_with: String + amount: BigInt + amount_not: BigInt + amount_in: [BigInt] + amount_not_in: [BigInt] + amount_gt: BigInt + amount_lt: BigInt + amount_gte: BigInt + amount_lte: BigInt + fromAccountId: String + fromAccountId_not: String + fromAccountId_in: [String] + fromAccountId_not_in: [String] + fromAccountId_contains: String + fromAccountId_not_contains: String + fromAccountId_starts_with: String + fromAccountId_ends_with: String + fromAccountId_not_starts_with: String + fromAccountId_not_ends_with: String + toAccountId: String + toAccountId_not: String + toAccountId_in: [String] + toAccountId_not_in: [String] + toAccountId_contains: String + toAccountId_not_contains: String + toAccountId_starts_with: String + toAccountId_ends_with: String + toAccountId_not_starts_with: String + toAccountId_not_ends_with: String + timestamp: BigInt + timestamp_not: BigInt + timestamp_in: [BigInt] + timestamp_not_in: [BigInt] + timestamp_gt: BigInt + timestamp_lt: BigInt + timestamp_gte: BigInt + timestamp_lte: BigInt + logIndex: Int + logIndex_not: Int + logIndex_in: [Int] + logIndex_not_in: [Int] + logIndex_gt: Int + logIndex_lt: Int + logIndex_gte: Int + logIndex_lte: Int + isCex: Boolean + isCex_not: Boolean + isCex_in: [Boolean] + isCex_not_in: [Boolean] + isDex: Boolean + isDex_not: Boolean + isDex_in: [Boolean] + isDex_not_in: [Boolean] + isLending: Boolean + isLending_not: Boolean + isLending_in: [Boolean] + isLending_not_in: [Boolean] + isTotal: Boolean + isTotal_not: Boolean + isTotal_in: [Boolean] + isTotal_not_in: [Boolean] +} + +input delegationFilter { + AND: [delegationFilter] + OR: [delegationFilter] + transactionHash: String + transactionHash_not: String + transactionHash_in: [String] + transactionHash_not_in: [String] + transactionHash_contains: String + transactionHash_not_contains: String + transactionHash_starts_with: String + transactionHash_ends_with: String + transactionHash_not_starts_with: String + transactionHash_not_ends_with: String + daoId: String + daoId_not: String + daoId_in: [String] + daoId_not_in: [String] + daoId_contains: String + daoId_not_contains: String + daoId_starts_with: String + daoId_ends_with: String + daoId_not_starts_with: String + daoId_not_ends_with: String + delegateAccountId: String + delegateAccountId_not: String + delegateAccountId_in: [String] + delegateAccountId_not_in: [String] + delegateAccountId_contains: String + delegateAccountId_not_contains: String + delegateAccountId_starts_with: String + delegateAccountId_ends_with: String + delegateAccountId_not_starts_with: String + delegateAccountId_not_ends_with: String + delegatorAccountId: String + delegatorAccountId_not: String + delegatorAccountId_in: [String] + delegatorAccountId_not_in: [String] + delegatorAccountId_contains: String + delegatorAccountId_not_contains: String + delegatorAccountId_starts_with: String + delegatorAccountId_ends_with: String + delegatorAccountId_not_starts_with: String + delegatorAccountId_not_ends_with: String + delegatedValue: BigInt + delegatedValue_not: BigInt + delegatedValue_in: [BigInt] + delegatedValue_not_in: [BigInt] + delegatedValue_gt: BigInt + delegatedValue_lt: BigInt + delegatedValue_gte: BigInt + delegatedValue_lte: BigInt + previousDelegate: String + previousDelegate_not: String + previousDelegate_in: [String] + previousDelegate_not_in: [String] + previousDelegate_contains: String + previousDelegate_not_contains: String + previousDelegate_starts_with: String + previousDelegate_ends_with: String + previousDelegate_not_starts_with: String + previousDelegate_not_ends_with: String + timestamp: BigInt + timestamp_not: BigInt + timestamp_in: [BigInt] + timestamp_not_in: [BigInt] + timestamp_gt: BigInt + timestamp_lt: BigInt + timestamp_gte: BigInt + timestamp_lte: BigInt + logIndex: Int + logIndex_not: Int + logIndex_in: [Int] + logIndex_not_in: [Int] + logIndex_gt: Int + logIndex_lt: Int + logIndex_gte: Int + logIndex_lte: Int + isCex: Boolean + isCex_not: Boolean + isCex_in: [Boolean] + isCex_not_in: [Boolean] + isDex: Boolean + isDex_not: Boolean + isDex_in: [Boolean] + isDex_not_in: [Boolean] + isLending: Boolean + isLending_not: Boolean + isLending_in: [Boolean] + isLending_not_in: [Boolean] + isTotal: Boolean + isTotal_not: Boolean + isTotal_in: [Boolean] + isTotal_not_in: [Boolean] +} + +type proposalsOnchainPage { + items: [proposalsOnchain!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type proposalsOnchain { + id: String! + txHash: String! + daoId: String! + proposerAccountId: String! + targets: JSON! + values: JSON! + signatures: JSON! + calldatas: JSON! + startBlock: Int! + endBlock: Int! + description: String! + timestamp: BigInt! + endTimestamp: BigInt! + status: String! + forVotes: BigInt! + againstVotes: BigInt! + abstainVotes: BigInt! + proposalType: Int + votes(where: votesOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): votesOnchainPage + proposer: account +} + +type votesOnchainPage { + items: [votesOnchain!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type votesOnchain { + txHash: String! + daoId: String! + voterAccountId: String! + proposalId: String! + support: String! + votingPower: BigInt! + reason: String + timestamp: BigInt! + proposal: proposalsOnchain + voter: account +} + +input votesOnchainFilter { + AND: [votesOnchainFilter] + OR: [votesOnchainFilter] + txHash: String + txHash_not: String + txHash_in: [String] + txHash_not_in: [String] + txHash_contains: String + txHash_not_contains: String + txHash_starts_with: String + txHash_ends_with: String + txHash_not_starts_with: String + txHash_not_ends_with: String + daoId: String + daoId_not: String + daoId_in: [String] + daoId_not_in: [String] + daoId_contains: String + daoId_not_contains: String + daoId_starts_with: String + daoId_ends_with: String + daoId_not_starts_with: String + daoId_not_ends_with: String + voterAccountId: String + voterAccountId_not: String + voterAccountId_in: [String] + voterAccountId_not_in: [String] + voterAccountId_contains: String + voterAccountId_not_contains: String + voterAccountId_starts_with: String + voterAccountId_ends_with: String + voterAccountId_not_starts_with: String + voterAccountId_not_ends_with: String + proposalId: String + proposalId_not: String + proposalId_in: [String] + proposalId_not_in: [String] + proposalId_contains: String + proposalId_not_contains: String + proposalId_starts_with: String + proposalId_ends_with: String + proposalId_not_starts_with: String + proposalId_not_ends_with: String + support: String + support_not: String + support_in: [String] + support_not_in: [String] + support_contains: String + support_not_contains: String + support_starts_with: String + support_ends_with: String + support_not_starts_with: String + support_not_ends_with: String + votingPower: BigInt + votingPower_not: BigInt + votingPower_in: [BigInt] + votingPower_not_in: [BigInt] + votingPower_gt: BigInt + votingPower_lt: BigInt + votingPower_gte: BigInt + votingPower_lte: BigInt + reason: String + reason_not: String + reason_in: [String] + reason_not_in: [String] + reason_contains: String + reason_not_contains: String + reason_starts_with: String + reason_ends_with: String + reason_not_starts_with: String + reason_not_ends_with: String + timestamp: BigInt + timestamp_not: BigInt + timestamp_in: [BigInt] + timestamp_not_in: [BigInt] + timestamp_gt: BigInt + timestamp_lt: BigInt + timestamp_gte: BigInt + timestamp_lte: BigInt +} + +input proposalsOnchainFilter { + AND: [proposalsOnchainFilter] + OR: [proposalsOnchainFilter] + id: String + id_not: String + id_in: [String] + id_not_in: [String] + id_contains: String + id_not_contains: String + id_starts_with: String + id_ends_with: String + id_not_starts_with: String + id_not_ends_with: String + txHash: String + txHash_not: String + txHash_in: [String] + txHash_not_in: [String] + txHash_contains: String + txHash_not_contains: String + txHash_starts_with: String + txHash_ends_with: String + txHash_not_starts_with: String + txHash_not_ends_with: String + daoId: String + daoId_not: String + daoId_in: [String] + daoId_not_in: [String] + daoId_contains: String + daoId_not_contains: String + daoId_starts_with: String + daoId_ends_with: String + daoId_not_starts_with: String + daoId_not_ends_with: String + proposerAccountId: String + proposerAccountId_not: String + proposerAccountId_in: [String] + proposerAccountId_not_in: [String] + proposerAccountId_contains: String + proposerAccountId_not_contains: String + proposerAccountId_starts_with: String + proposerAccountId_ends_with: String + proposerAccountId_not_starts_with: String + proposerAccountId_not_ends_with: String + startBlock: Int + startBlock_not: Int + startBlock_in: [Int] + startBlock_not_in: [Int] + startBlock_gt: Int + startBlock_lt: Int + startBlock_gte: Int + startBlock_lte: Int + endBlock: Int + endBlock_not: Int + endBlock_in: [Int] + endBlock_not_in: [Int] + endBlock_gt: Int + endBlock_lt: Int + endBlock_gte: Int + endBlock_lte: Int + description: String + description_not: String + description_in: [String] + description_not_in: [String] + description_contains: String + description_not_contains: String + description_starts_with: String + description_ends_with: String + description_not_starts_with: String + description_not_ends_with: String + timestamp: BigInt + timestamp_not: BigInt + timestamp_in: [BigInt] + timestamp_not_in: [BigInt] + timestamp_gt: BigInt + timestamp_lt: BigInt + timestamp_gte: BigInt + timestamp_lte: BigInt + endTimestamp: BigInt + endTimestamp_not: BigInt + endTimestamp_in: [BigInt] + endTimestamp_not_in: [BigInt] + endTimestamp_gt: BigInt + endTimestamp_lt: BigInt + endTimestamp_gte: BigInt + endTimestamp_lte: BigInt + status: String + status_not: String + status_in: [String] + status_not_in: [String] + status_contains: String + status_not_contains: String + status_starts_with: String + status_ends_with: String + status_not_starts_with: String + status_not_ends_with: String + forVotes: BigInt + forVotes_not: BigInt + forVotes_in: [BigInt] + forVotes_not_in: [BigInt] + forVotes_gt: BigInt + forVotes_lt: BigInt + forVotes_gte: BigInt + forVotes_lte: BigInt + againstVotes: BigInt + againstVotes_not: BigInt + againstVotes_in: [BigInt] + againstVotes_not_in: [BigInt] + againstVotes_gt: BigInt + againstVotes_lt: BigInt + againstVotes_gte: BigInt + againstVotes_lte: BigInt + abstainVotes: BigInt + abstainVotes_not: BigInt + abstainVotes_in: [BigInt] + abstainVotes_not_in: [BigInt] + abstainVotes_gt: BigInt + abstainVotes_lt: BigInt + abstainVotes_gte: BigInt + abstainVotes_lte: BigInt + proposalType: Int + proposalType_not: Int + proposalType_in: [Int] + proposalType_not_in: [Int] + proposalType_gt: Int + proposalType_lt: Int + proposalType_gte: Int + proposalType_lte: Int +} + +type accountPage { + items: [account!]! + pageInfo: PageInfo! + totalCount: Int! +} + +input accountFilter { + AND: [accountFilter] + OR: [accountFilter] + id: String + id_not: String + id_in: [String] + id_not_in: [String] + id_contains: String + id_not_contains: String + id_starts_with: String + id_ends_with: String + id_not_starts_with: String + id_not_ends_with: String +} + +type votingPowerHistory { + transactionHash: String! + daoId: String! + accountId: String! + votingPower: BigInt! + delta: BigInt! + deltaMod: BigInt! + timestamp: BigInt! + logIndex: Int! + transfer: transfer + delegation: delegation + account: account +} + +type votingPowerHistoryPage { + items: [votingPowerHistory!]! + pageInfo: PageInfo! + totalCount: Int! +} + +input votingPowerHistoryFilter { + AND: [votingPowerHistoryFilter] + OR: [votingPowerHistoryFilter] + transactionHash: String + transactionHash_not: String + transactionHash_in: [String] + transactionHash_not_in: [String] + transactionHash_contains: String + transactionHash_not_contains: String + transactionHash_starts_with: String + transactionHash_ends_with: String + transactionHash_not_starts_with: String + transactionHash_not_ends_with: String + daoId: String + daoId_not: String + daoId_in: [String] + daoId_not_in: [String] + daoId_contains: String + daoId_not_contains: String + daoId_starts_with: String + daoId_ends_with: String + daoId_not_starts_with: String + daoId_not_ends_with: String + accountId: String + accountId_not: String + accountId_in: [String] + accountId_not_in: [String] + accountId_contains: String + accountId_not_contains: String + accountId_starts_with: String + accountId_ends_with: String + accountId_not_starts_with: String + accountId_not_ends_with: String + votingPower: BigInt + votingPower_not: BigInt + votingPower_in: [BigInt] + votingPower_not_in: [BigInt] + votingPower_gt: BigInt + votingPower_lt: BigInt + votingPower_gte: BigInt + votingPower_lte: BigInt + delta: BigInt + delta_not: BigInt + delta_in: [BigInt] + delta_not_in: [BigInt] + delta_gt: BigInt + delta_lt: BigInt + delta_gte: BigInt + delta_lte: BigInt + deltaMod: BigInt + deltaMod_not: BigInt + deltaMod_in: [BigInt] + deltaMod_not_in: [BigInt] + deltaMod_gt: BigInt + deltaMod_lt: BigInt + deltaMod_gte: BigInt + deltaMod_lte: BigInt + timestamp: BigInt + timestamp_not: BigInt + timestamp_in: [BigInt] + timestamp_not_in: [BigInt] + timestamp_gt: BigInt + timestamp_lt: BigInt + timestamp_gte: BigInt + timestamp_lte: BigInt + logIndex: Int + logIndex_not: Int + logIndex_in: [Int] + logIndex_not_in: [Int] + logIndex_gt: Int + logIndex_lt: Int + logIndex_gte: Int + logIndex_lte: Int +} + +type balanceHistory { + transactionHash: String! + daoId: String! + accountId: String! + balance: BigInt! + delta: BigInt! + deltaMod: BigInt! + timestamp: BigInt! + logIndex: Int! +} + +type balanceHistoryPage { + items: [balanceHistory!]! + pageInfo: PageInfo! + totalCount: Int! +} + +input balanceHistoryFilter { + AND: [balanceHistoryFilter] + OR: [balanceHistoryFilter] + transactionHash: String + transactionHash_not: String + transactionHash_in: [String] + transactionHash_not_in: [String] + transactionHash_contains: String + transactionHash_not_contains: String + transactionHash_starts_with: String + transactionHash_ends_with: String + transactionHash_not_starts_with: String + transactionHash_not_ends_with: String + daoId: String + daoId_not: String + daoId_in: [String] + daoId_not_in: [String] + daoId_contains: String + daoId_not_contains: String + daoId_starts_with: String + daoId_ends_with: String + daoId_not_starts_with: String + daoId_not_ends_with: String + accountId: String + accountId_not: String + accountId_in: [String] + accountId_not_in: [String] + accountId_contains: String + accountId_not_contains: String + accountId_starts_with: String + accountId_ends_with: String + accountId_not_starts_with: String + accountId_not_ends_with: String + balance: BigInt + balance_not: BigInt + balance_in: [BigInt] + balance_not_in: [BigInt] + balance_gt: BigInt + balance_lt: BigInt + balance_gte: BigInt + balance_lte: BigInt + delta: BigInt + delta_not: BigInt + delta_in: [BigInt] + delta_not_in: [BigInt] + delta_gt: BigInt + delta_lt: BigInt + delta_gte: BigInt + delta_lte: BigInt + deltaMod: BigInt + deltaMod_not: BigInt + deltaMod_in: [BigInt] + deltaMod_not_in: [BigInt] + deltaMod_gt: BigInt + deltaMod_lt: BigInt + deltaMod_gte: BigInt + deltaMod_lte: BigInt + timestamp: BigInt + timestamp_not: BigInt + timestamp_in: [BigInt] + timestamp_not_in: [BigInt] + timestamp_gt: BigInt + timestamp_lt: BigInt + timestamp_gte: BigInt + timestamp_lte: BigInt + logIndex: Int + logIndex_not: Int + logIndex_in: [Int] + logIndex_not_in: [Int] + logIndex_gt: Int + logIndex_lt: Int + logIndex_gte: Int + logIndex_lte: Int +} + +type daoMetricsDayBucket { + date: BigInt! + daoId: String! + tokenId: String! + metricType: metricType! + open: BigInt! + close: BigInt! + low: BigInt! + high: BigInt! + average: BigInt! + volume: BigInt! + count: Int! + lastUpdate: BigInt! +} + +enum metricType { + TOTAL_SUPPLY + DELEGATED_SUPPLY + CEX_SUPPLY + DEX_SUPPLY + LENDING_SUPPLY + CIRCULATING_SUPPLY + TREASURY +} + +type daoMetricsDayBucketPage { + items: [daoMetricsDayBucket!]! + pageInfo: PageInfo! + totalCount: Int! +} + +input daoMetricsDayBucketFilter { + AND: [daoMetricsDayBucketFilter] + OR: [daoMetricsDayBucketFilter] + date: BigInt + date_not: BigInt + date_in: [BigInt] + date_not_in: [BigInt] + date_gt: BigInt + date_lt: BigInt + date_gte: BigInt + date_lte: BigInt + daoId: String + daoId_not: String + daoId_in: [String] + daoId_not_in: [String] + daoId_contains: String + daoId_not_contains: String + daoId_starts_with: String + daoId_ends_with: String + daoId_not_starts_with: String + daoId_not_ends_with: String + tokenId: String + tokenId_not: String + tokenId_in: [String] + tokenId_not_in: [String] + tokenId_contains: String + tokenId_not_contains: String + tokenId_starts_with: String + tokenId_ends_with: String + tokenId_not_starts_with: String + tokenId_not_ends_with: String + metricType: metricType + metricType_not: metricType + metricType_in: [metricType] + metricType_not_in: [metricType] + open: BigInt + open_not: BigInt + open_in: [BigInt] + open_not_in: [BigInt] + open_gt: BigInt + open_lt: BigInt + open_gte: BigInt + open_lte: BigInt + close: BigInt + close_not: BigInt + close_in: [BigInt] + close_not_in: [BigInt] + close_gt: BigInt + close_lt: BigInt + close_gte: BigInt + close_lte: BigInt + low: BigInt + low_not: BigInt + low_in: [BigInt] + low_not_in: [BigInt] + low_gt: BigInt + low_lt: BigInt + low_gte: BigInt + low_lte: BigInt + high: BigInt + high_not: BigInt + high_in: [BigInt] + high_not_in: [BigInt] + high_gt: BigInt + high_lt: BigInt + high_gte: BigInt + high_lte: BigInt + average: BigInt + average_not: BigInt + average_in: [BigInt] + average_not_in: [BigInt] + average_gt: BigInt + average_lt: BigInt + average_gte: BigInt + average_lte: BigInt + volume: BigInt + volume_not: BigInt + volume_in: [BigInt] + volume_not_in: [BigInt] + volume_gt: BigInt + volume_lt: BigInt + volume_gte: BigInt + volume_lte: BigInt + count: Int + count_not: Int + count_in: [Int] + count_not_in: [Int] + count_gt: Int + count_lt: Int + count_gte: Int + count_lte: Int + lastUpdate: BigInt + lastUpdate_not: BigInt + lastUpdate_in: [BigInt] + lastUpdate_not_in: [BigInt] + lastUpdate_gt: BigInt + lastUpdate_lt: BigInt + lastUpdate_gte: BigInt + lastUpdate_lte: BigInt +} + +type transactionPage { + items: [transaction!]! + pageInfo: PageInfo! + totalCount: Int! +} + +input transactionFilter { + AND: [transactionFilter] + OR: [transactionFilter] + transactionHash: String + transactionHash_not: String + transactionHash_in: [String] + transactionHash_not_in: [String] + transactionHash_contains: String + transactionHash_not_contains: String + transactionHash_starts_with: String + transactionHash_ends_with: String + transactionHash_not_starts_with: String + transactionHash_not_ends_with: String + fromAddress: String + fromAddress_not: String + fromAddress_in: [String] + fromAddress_not_in: [String] + fromAddress_contains: String + fromAddress_not_contains: String + fromAddress_starts_with: String + fromAddress_ends_with: String + fromAddress_not_starts_with: String + fromAddress_not_ends_with: String + toAddress: String + toAddress_not: String + toAddress_in: [String] + toAddress_not_in: [String] + toAddress_contains: String + toAddress_not_contains: String + toAddress_starts_with: String + toAddress_ends_with: String + toAddress_not_starts_with: String + toAddress_not_ends_with: String + isCex: Boolean + isCex_not: Boolean + isCex_in: [Boolean] + isCex_not_in: [Boolean] + isDex: Boolean + isDex_not: Boolean + isDex_in: [Boolean] + isDex_not_in: [Boolean] + isLending: Boolean + isLending_not: Boolean + isLending_in: [Boolean] + isLending_not_in: [Boolean] + isTotal: Boolean + isTotal_not: Boolean + isTotal_in: [Boolean] + isTotal_not_in: [Boolean] + timestamp: BigInt + timestamp_not: BigInt + timestamp_in: [BigInt] + timestamp_not_in: [BigInt] + timestamp_gt: BigInt + timestamp_lt: BigInt + timestamp_gte: BigInt + timestamp_lte: BigInt +} + +type tokenPrice { + price: BigInt! + timestamp: BigInt! +} + +type tokenPricePage { + items: [tokenPrice!]! + pageInfo: PageInfo! + totalCount: Int! +} + +input tokenPriceFilter { + AND: [tokenPriceFilter] + OR: [tokenPriceFilter] + price: BigInt + price_not: BigInt + price_in: [BigInt] + price_not_in: [BigInt] + price_gt: BigInt + price_lt: BigInt + price_gte: BigInt + price_lte: BigInt + timestamp: BigInt + timestamp_not: BigInt + timestamp_in: [BigInt] + timestamp_not_in: [BigInt] + timestamp_gt: BigInt + timestamp_lt: BigInt + timestamp_gte: BigInt + timestamp_lte: BigInt +} \ No newline at end of file diff --git a/apps/hypersync-indexer/generated/src/Benchmark.res b/apps/hypersync-indexer/generated/src/Benchmark.res new file mode 100644 index 000000000..7dd91aa12 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Benchmark.res @@ -0,0 +1,394 @@ +module MillisAccum = { + type millis = float + type t = {counters: dict, startTime: Js.Date.t, mutable endTime: Js.Date.t} + let schema: S.t = S.schema(s => { + counters: s.matches(S.dict(S.float)), + startTime: s.matches(S.string->S.datetime), + endTime: s.matches(S.string->S.datetime), + }) + let make: unit => t = () => { + counters: Js.Dict.empty(), + startTime: Js.Date.make(), + endTime: Js.Date.make(), + } + + let increment = (self: t, label, amount) => { + self.endTime = Js.Date.make() + let amount = amount->Belt.Float.fromInt + switch self.counters->Utils.Dict.dangerouslyGetNonOption(label) { + | None => + self.counters->Js.Dict.set(label, amount) + amount + | Some(current) => + let newAmount = current +. amount + self.counters->Js.Dict.set(label, newAmount) + newAmount + } + } +} + +module SummaryData = { + module DataSet = { + type t = { + count: float, + min: float, + max: float, + sum: BigDecimal.t, + sumOfSquares: option, + decimalPlaces: int, + } + + let schema = S.schema(s => { + count: s.matches(S.float), + min: s.matches(S.float), + max: s.matches(S.float), + sum: s.matches(BigDecimal.schema), + sumOfSquares: s.matches(S.option(BigDecimal.schema)), + decimalPlaces: s.matches(S.int), + }) + + let make = (val: float, ~decimalPlaces=2) => { + let bigDecimal = val->BigDecimal.fromFloat + { + count: 1., + min: val, + max: val, + sum: bigDecimal, + sumOfSquares: Env.Benchmark.shouldSaveStdDev + ? Some(bigDecimal->BigDecimal.times(bigDecimal)) + : None, + decimalPlaces, + } + } + + let add = (self: t, val: float) => { + let bigDecimal = val->BigDecimal.fromFloat + { + count: self.count +. 1., + min: Pervasives.min(self.min, val), + max: Pervasives.max(self.max, val), + sum: self.sum->BigDecimal.plus(bigDecimal), + sumOfSquares: self.sumOfSquares->Belt.Option.map(s => + s->BigDecimal.plus(bigDecimal->BigDecimal.times(bigDecimal)) + ), + decimalPlaces: self.decimalPlaces, + } + } + } + module Group = { + type t = dict + let schema: S.t = S.dict(DataSet.schema) + let make = (): t => Js.Dict.empty() + + /** + Adds a value to the data set for the given key. If the key does not exist, it will be created. + + Returns the updated data set. + */ + let add = (self: t, label, value: float, ~decimalPlaces=2) => { + switch self->Utils.Dict.dangerouslyGetNonOption(label) { + | None => + let new = DataSet.make(value, ~decimalPlaces) + self->Js.Dict.set(label, new) + new + | Some(dataSet) => + let updated = dataSet->DataSet.add(value) + self->Js.Dict.set(label, updated) + updated + } + } + } + + type t = dict + let schema = S.dict(Group.schema) + let make = (): t => Js.Dict.empty() + + let add = (self: t, ~group, ~label, ~value, ~decimalPlaces=2) => { + let group = switch self->Utils.Dict.dangerouslyGetNonOption(group) { + | None => + let newGroup = Group.make() + self->Js.Dict.set(group, newGroup) + newGroup + | Some(group) => group + } + + group->Group.add(label, value, ~decimalPlaces) + } +} + +module Stats = { + open Belt + type t = { + n: float, + mean: float, + @as("std-dev") stdDev: option, + min: float, + max: float, + sum: float, + } + + let round = (float, ~precision=2) => { + let factor = Js.Math.pow_float(~base=10.0, ~exp=precision->Int.toFloat) + Js.Math.round(float *. factor) /. factor + } + + let makeFromDataSet = (dataSet: SummaryData.DataSet.t) => { + let n = dataSet.count + let countBigDecimal = n->BigDecimal.fromFloat + let mean = dataSet.sum->BigDecimal.div(countBigDecimal) + + let roundBigDecimal = bd => + bd->BigDecimal.decimalPlaces(dataSet.decimalPlaces)->BigDecimal.toNumber + let roundFloat = float => float->round(~precision=dataSet.decimalPlaces) + + let stdDev = dataSet.sumOfSquares->Option.map(sumOfSquares => { + let variance = + sumOfSquares + ->BigDecimal.div(countBigDecimal) + ->BigDecimal.minus(mean->BigDecimal.times(mean)) + BigDecimal.sqrt(variance)->roundBigDecimal + }) + { + n, + mean: mean->roundBigDecimal, + stdDev, + min: dataSet.min->roundFloat, + max: dataSet.max->roundFloat, + sum: dataSet.sum->roundBigDecimal, + } + } +} + +module Data = { + type t = { + millisAccum: MillisAccum.t, + summaryData: SummaryData.t, + } + + let schema = S.schema(s => { + millisAccum: s.matches(MillisAccum.schema), + summaryData: s.matches(SummaryData.schema), + }) + + let make = () => { + millisAccum: MillisAccum.make(), + summaryData: SummaryData.make(), + } + + module LiveMetrics = { + let addDataSet = if ( + Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSavePrometheus + ) { + (dataSet: SummaryData.DataSet.t, ~group, ~label) => { + let {n, mean, stdDev, min, max, sum} = dataSet->Stats.makeFromDataSet + Prometheus.BenchmarkSummaryData.set(~group, ~label, ~n, ~mean, ~stdDev, ~min, ~max, ~sum) + } + } else { + (_dataSet, ~group as _, ~label as _) => () + } + let setCounterMillis = if ( + Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSavePrometheus + ) { + (millisAccum: MillisAccum.t, ~label, ~millis) => { + let totalRuntimeMillis = + millisAccum.endTime->Js.Date.getTime -. millisAccum.startTime->Js.Date.getTime + Prometheus.BenchmarkCounters.set(~label, ~millis, ~totalRuntimeMillis) + } + } else { + (_, ~label as _, ~millis as _) => () + } + } + + let incrementMillis = (self: t, ~label, ~amount) => { + let nextMillis = self.millisAccum->MillisAccum.increment(label, amount) + self.millisAccum->LiveMetrics.setCounterMillis(~label, ~millis=nextMillis) + } + + let addSummaryData = (self: t, ~group, ~label, ~value, ~decimalPlaces=2) => { + let updatedDataSet = self.summaryData->SummaryData.add(~group, ~label, ~value, ~decimalPlaces) + updatedDataSet->LiveMetrics.addDataSet(~group, ~label) + } +} + +let data = Data.make() +let throttler = Throttler.make( + ~intervalMillis=Env.ThrottleWrites.jsonFileBenchmarkIntervalMillis, + ~logger=Logging.createChild(~params={"context": "Benchmarking framework"}), +) +let cacheFileName = "BenchmarkCache.json" +let cacheFilePath = NodeJs.Path.join(NodeJs.Path.__dirname, cacheFileName) + +let saveToCacheFile = if ( + Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSaveJsonFile +) { + //Save to cache file only happens if the strategy is set to json-file + data => { + let write = () => { + let json = data->S.reverseConvertToJsonStringOrThrow(Data.schema) + NodeJs.Fs.Promises.writeFile(~filepath=cacheFilePath, ~content=json) + } + throttler->Throttler.schedule(write) + } +} else { + _ => () +} + +let readFromCacheFile = async () => { + switch await NodeJs.Fs.Promises.readFile(~filepath=cacheFilePath, ~encoding=Utf8) { + | exception _ => None + | content => + try content->S.parseJsonStringOrThrow(Data.schema)->Some catch { + | S.Raised(e) => + Logging.error( + "Failed to parse benchmark cache file, please delete it and rerun the benchmark", + ) + e->S.Error.raise + } + } +} + +let addSummaryData = (~group, ~label, ~value, ~decimalPlaces=2) => { + let _ = data->Data.addSummaryData(~group, ~label, ~value, ~decimalPlaces) + data->saveToCacheFile +} + +let incrementMillis = (~label, ~amount) => { + let _ = data->Data.incrementMillis(~label, ~amount) + data->saveToCacheFile +} + +let addBlockRangeFetched = ( + ~totalTimeElapsed: int, + ~parsingTimeElapsed: int, + ~pageFetchTime: int, + ~chainId, + ~fromBlock, + ~toBlock, + ~numEvents, + ~numAddresses, + ~queryName, +) => { + let group = `BlockRangeFetched Summary for Chain ${chainId->Belt.Int.toString} ${queryName}` + let add = (label, value) => data->Data.addSummaryData(~group, ~label, ~value=Utils.magic(value)) + + add("Total Time Elapsed (ms)", totalTimeElapsed) + add("Parsing Time Elapsed (ms)", parsingTimeElapsed) + add("Page Fetch Time (ms)", pageFetchTime) + add("Num Events", numEvents) + add("Num Addresses", numAddresses) + add("Block Range Size", toBlock - fromBlock) + + data->Data.incrementMillis( + ~label=`Total Time Fetching Chain ${chainId->Belt.Int.toString} ${queryName}`, + ~amount=totalTimeElapsed, + ) + + data->saveToCacheFile +} + +let eventProcessingGroup = "EventProcessing Summary" +let batchSizeLabel = "Batch Size" + +let addEventProcessing = ( + ~batchSize, + ~loadDuration, + ~handlerDuration, + ~dbWriteDuration, + ~totalTimeElapsed, +) => { + let add = (label, value) => + data->Data.addSummaryData(~group=eventProcessingGroup, ~label, ~value=value->Belt.Int.toFloat) + + add(batchSizeLabel, batchSize) + add("Load Duration (ms)", loadDuration) + add("Handler Duration (ms)", handlerDuration) + add("DB Write Duration (ms)", dbWriteDuration) + add("Total Time Elapsed (ms)", totalTimeElapsed) + + data->Data.incrementMillis(~label="Total Time Processing", ~amount=totalTimeElapsed) + + data->saveToCacheFile +} + +module Summary = { + open Belt + + type summaryTable = dict + + external logSummaryTable: summaryTable => unit = "console.table" + external logArrTable: array<'a> => unit = "console.table" + external logObjTable: {..} => unit = "console.table" + external logDictTable: dict<'a> => unit = "console.table" + + external arrayIntToFloat: array => array = "%identity" + + let printSummary = async () => { + let data = await readFromCacheFile() + switch data { + | None => + Logging.error( + "No benchmark cache file found, please use 'ENVIO_SAVE_BENCHMARK_DATA=true' and rerun the benchmark", + ) + | Some({summaryData, millisAccum}) => + Js.log("Time breakdown") + let timeBreakdown = [ + ( + "Total Runtime", + DateFns.intervalToDuration({ + start: millisAccum.startTime, + end: millisAccum.endTime, + }), + ), + ] + + millisAccum.counters + ->Js.Dict.entries + ->Array.forEach(((label, millis)) => + timeBreakdown + ->Js.Array2.push((label, DateFns.durationFromMillis(millis->Belt.Int.fromFloat))) + ->ignore + ) + + timeBreakdown + ->Js.Dict.fromArray + ->logDictTable + + Js.log("General") + let batchSizesSum = + summaryData + ->Js.Dict.get(eventProcessingGroup) + ->Option.flatMap(g => g->Js.Dict.get(batchSizeLabel)) + ->Option.map(data => data.sum) + ->Option.getWithDefault(BigDecimal.zero) + + let totalRuntimeMillis = + millisAccum.endTime->Js.Date.getTime -. millisAccum.startTime->Js.Date.getTime + + let totalRuntimeSeconds = totalRuntimeMillis /. 1000. + + let eventsPerSecond = + batchSizesSum + ->BigDecimal.div(BigDecimal.fromFloat(totalRuntimeSeconds)) + ->BigDecimal.decimalPlaces(2) + ->BigDecimal.toNumber + + logObjTable({ + "batch sizes sum": batchSizesSum->BigDecimal.toNumber, + "total runtime (sec)": totalRuntimeSeconds, + "events per second": eventsPerSecond, + }) + + summaryData + ->Js.Dict.entries + ->Js.Array2.sortInPlaceWith(((a, _), (b, _)) => a < b ? -1 : 1) + ->Array.forEach(((groupName, group)) => { + Js.log(groupName) + group + ->Js.Dict.entries + ->Array.map(((label, values)) => (label, values->Stats.makeFromDataSet)) + ->Js.Dict.fromArray + ->logDictTable + }) + } + } +} diff --git a/apps/hypersync-indexer/generated/src/ConfigYAML.gen.ts b/apps/hypersync-indexer/generated/src/ConfigYAML.gen.ts new file mode 100644 index 000000000..2b580012d --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ConfigYAML.gen.ts @@ -0,0 +1,38 @@ +/* TypeScript file generated from ConfigYAML.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +const ConfigYAMLJS = require('./ConfigYAML.res.js'); + +export type hyperSyncConfig = { readonly endpointUrl: string }; + +export type hyperFuelConfig = { readonly endpointUrl: string }; + +export abstract class rpcConfig { protected opaque!: any }; /* simulate opaque types */ + +export type syncSource = + { TAG: "HyperSync"; _0: hyperSyncConfig } + | { TAG: "HyperFuel"; _0: hyperFuelConfig } + | { TAG: "Rpc"; _0: rpcConfig }; + +export abstract class aliasAbi { protected opaque!: any }; /* simulate opaque types */ + +export type eventName = string; + +export type contract = { + readonly name: string; + readonly abi: aliasAbi; + readonly addresses: string[]; + readonly events: eventName[] +}; + +export type configYaml = { + readonly syncSource: syncSource; + readonly startBlock: number; + readonly confirmedBlockThreshold: number; + readonly contracts: {[id: string]: contract}; + readonly lowercaseAddresses: boolean +}; + +export const getGeneratedByChainId: (chainId:number) => configYaml = ConfigYAMLJS.getGeneratedByChainId as any; diff --git a/apps/hypersync-indexer/generated/src/ConfigYAML.res b/apps/hypersync-indexer/generated/src/ConfigYAML.res new file mode 100644 index 000000000..deb1a1505 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ConfigYAML.res @@ -0,0 +1,92 @@ + +type hyperSyncConfig = {endpointUrl: string} +type hyperFuelConfig = {endpointUrl: string} + +@genType.opaque +type rpcConfig = { + syncConfig: Config.sourceSync, +} + +@genType +type syncSource = HyperSync(hyperSyncConfig) | HyperFuel(hyperFuelConfig) | Rpc(rpcConfig) + +@genType.opaque +type aliasAbi = Ethers.abi + +type eventName = string + +type contract = { + name: string, + abi: aliasAbi, + addresses: array, + events: array, +} + +type configYaml = { + syncSource, + startBlock: int, + confirmedBlockThreshold: int, + contracts: dict, + lowercaseAddresses: bool, +} + +let publicConfig = ChainMap.fromArrayUnsafe([ + { + let contracts = Js.Dict.fromArray([ + ( + "ENSToken", + { + name: "ENSToken", + abi: Types.ENSToken.abi, + addresses: [ + "0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72", + ], + events: [ + Types.ENSToken.Transfer.name, + Types.ENSToken.DelegateChanged.name, + Types.ENSToken.DelegateVotesChanged.name, + ], + } + ), + ( + "ENSGovernor", + { + name: "ENSGovernor", + abi: Types.ENSGovernor.abi, + addresses: [ + "0x323a76393544d5ecca80cd6ef2a560c6a395b7e3", + ], + events: [ + Types.ENSGovernor.ProposalCreated.name, + Types.ENSGovernor.VoteCast.name, + Types.ENSGovernor.ProposalCanceled.name, + Types.ENSGovernor.ProposalExecuted.name, + Types.ENSGovernor.ProposalQueued.name, + ], + } + ), + ]) + let chain = ChainMap.Chain.makeUnsafe(~chainId=1) + ( + chain, + { + confirmedBlockThreshold: 200, + syncSource: HyperSync({endpointUrl: "https://eth.hypersync.xyz"}), + startBlock: 9380410, + contracts, + lowercaseAddresses: false + } + ) + }, +]) + +@genType +let getGeneratedByChainId: int => configYaml = chainId => { + let chain = ChainMap.Chain.makeUnsafe(~chainId) + if !(publicConfig->ChainMap.has(chain)) { + Js.Exn.raiseError( + "No chain with id " ++ chain->ChainMap.Chain.toString ++ " found in config.yaml", + ) + } + publicConfig->ChainMap.get(chain) +} diff --git a/apps/hypersync-indexer/generated/src/Env.res b/apps/hypersync-indexer/generated/src/Env.res new file mode 100644 index 000000000..6618abd6d --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Env.res @@ -0,0 +1,247 @@ +Dotenv.initialize() +%%private( + let envSafe = EnvSafe.make() + + let getLogLevelConfig = (name, ~default): Pino.logLevel => + envSafe->EnvSafe.get( + name, + S.enum([#trace, #debug, #info, #warn, #error, #fatal, #udebug, #uinfo, #uwarn, #uerror]), + ~fallback=default, + ) +) +// resets the timestampCaughtUpToHeadOrEndblock after a restart when true +let updateSyncTimeOnRestart = + envSafe->EnvSafe.get("UPDATE_SYNC_TIME_ON_RESTART", S.bool, ~fallback=true) +let batchSize = envSafe->EnvSafe.get("MAX_BATCH_SIZE", S.option(S.int)) +let targetBufferSize = envSafe->EnvSafe.get("ENVIO_INDEXING_MAX_BUFFER_SIZE", S.option(S.int)) +let maxAddrInPartition = envSafe->EnvSafe.get("MAX_PARTITION_SIZE", S.int, ~fallback=5_000) +let maxPartitionConcurrency = + envSafe->EnvSafe.get("ENVIO_MAX_PARTITION_CONCURRENCY", S.int, ~fallback=10) +let indexingBlockLag = envSafe->EnvSafe.get("ENVIO_INDEXING_BLOCK_LAG", S.option(S.int)) + +// FIXME: This broke HS grafana dashboard. Should investigate it later. Maybe we should use :: as a default value? +// We want to be able to set it to 0.0.0.0 +// to allow to passthrough the port from a Docker container +// let serverHost = envSafe->EnvSafe.get("ENVIO_INDEXER_HOST", S.string, ~fallback="localhost") +let serverPort = + envSafe->EnvSafe.get( + "ENVIO_INDEXER_PORT", + S.int->S.port, + ~fallback=envSafe->EnvSafe.get("METRICS_PORT", S.int->S.port, ~fallback=9898), + ) + +let tuiOffEnvVar = envSafe->EnvSafe.get("TUI_OFF", S.bool, ~fallback=false) + +let logFilePath = envSafe->EnvSafe.get("LOG_FILE", S.string, ~fallback="logs/envio.log") +let userLogLevel = getLogLevelConfig("LOG_LEVEL", ~default=#info) +let defaultFileLogLevel = getLogLevelConfig("FILE_LOG_LEVEL", ~default=#trace) + +let prodEnvioAppUrl = "https://envio.dev" +let envioAppUrl = envSafe->EnvSafe.get("ENVIO_APP", S.string, ~fallback=prodEnvioAppUrl) +let envioApiToken = envSafe->EnvSafe.get("ENVIO_API_TOKEN", S.option(S.string)) +let hyperSyncClientTimeoutMillis = + envSafe->EnvSafe.get("ENVIO_HYPERSYNC_CLIENT_TIMEOUT_MILLIS", S.int, ~fallback=120_000) + +/** +This is the number of retries that the binary client makes before rejecting the promise with an error +Default is 0 so that the indexer can handle retries internally +*/ +let hyperSyncClientMaxRetries = + envSafe->EnvSafe.get("ENVIO_HYPERSYNC_CLIENT_MAX_RETRIES", S.int, ~fallback=0) + +let hypersyncClientSerializationFormat = + envSafe->EnvSafe.get( + "ENVIO_HYPERSYNC_CLIENT_SERIALIZATION_FORMAT", + HyperSyncClient.serializationFormatSchema, + ~fallback=CapnProto, + ) + +let hypersyncClientEnableQueryCaching = + envSafe->EnvSafe.get("ENVIO_HYPERSYNC_CLIENT_ENABLE_QUERY_CACHING", S.bool, ~fallback=true) + +let hypersyncLogLevel = + envSafe->EnvSafe.get( + "ENVIO_HYPERSYNC_LOG_LEVEL", + HyperSyncClient.logLevelSchema, + ~fallback=#info, + ) +HyperSyncClient.setLogLevel(hypersyncLogLevel) + +module Benchmark = { + module SaveDataStrategy: { + type t + let schema: S.t + let default: t + let shouldSaveJsonFile: t => bool + let shouldSavePrometheus: t => bool + let shouldSaveData: t => bool + } = { + @unboxed + type t = Bool(bool) | @as("json-file") JsonFile | @as("prometheus") Prometheus + + let schema = S.enum([Bool(true), Bool(false), JsonFile, Prometheus]) + let default = Bool(false) + + let shouldSaveJsonFile = self => + switch self { + | JsonFile | Bool(true) => true + | _ => false + } + + let shouldSavePrometheus = _ => true + + let shouldSaveData = self => self->shouldSavePrometheus || self->shouldSaveJsonFile + } + + let saveDataStrategy = + envSafe->EnvSafe.get( + "ENVIO_SAVE_BENCHMARK_DATA", + SaveDataStrategy.schema, + ~fallback=SaveDataStrategy.default, + ) + + let shouldSaveData = saveDataStrategy->SaveDataStrategy.shouldSaveData + + /** + StdDev involves saving sum of squares of data points, which could get very large. + + Currently only do this for local runs on json-file and not prometheus. + */ + let shouldSaveStdDev = + saveDataStrategy->SaveDataStrategy.shouldSaveJsonFile +} + +let logStrategy = + envSafe->EnvSafe.get( + "LOG_STRATEGY", + S.enum([ + Logging.EcsFile, + EcsConsole, + EcsConsoleMultistream, + FileOnly, + ConsoleRaw, + ConsolePretty, + Both, + ]), + ~fallback=ConsolePretty, + ) + +Logging.setLogger( + Logging.makeLogger(~logStrategy, ~logFilePath, ~defaultFileLogLevel, ~userLogLevel), +) + +module Db = { + let host = envSafe->EnvSafe.get("ENVIO_PG_HOST", S.string, ~devFallback="localhost") + let port = envSafe->EnvSafe.get("ENVIO_PG_PORT", S.int->S.port, ~devFallback=5433) + let user = envSafe->EnvSafe.get("ENVIO_PG_USER", S.string, ~devFallback="postgres") + let password = envSafe->EnvSafe.get( + "ENVIO_PG_PASSWORD", + S.string, + ~fallback={ + envSafe->EnvSafe.get("ENVIO_POSTGRES_PASSWORD", S.string, ~fallback="testing") + }, + ) + let database = envSafe->EnvSafe.get("ENVIO_PG_DATABASE", S.string, ~devFallback="envio-dev") + let publicSchema = envSafe->EnvSafe.get("ENVIO_PG_PUBLIC_SCHEMA", S.string, ~fallback="public") + let ssl = envSafe->EnvSafe.get( + "ENVIO_PG_SSL_MODE", + Postgres.sslOptionsSchema, + //this is a dev fallback option for local deployments, shouldn't run in the prod env + //the SSL modes should be provided as string otherwise as 'require' | 'allow' | 'prefer' | 'verify-full' + ~devFallback=Bool(false), + ) +} + +module Hasura = { + // Disable it on HS indexer run, since we don't have Hasura credentials anyways + // Also, it might be useful for some users who don't care about Hasura + let enabled = envSafe->EnvSafe.get("ENVIO_HASURA", S.bool, ~fallback=true) + + let responseLimit = switch envSafe->EnvSafe.get("ENVIO_HASURA_RESPONSE_LIMIT", S.option(S.int)) { + | Some(_) as s => s + | None => envSafe->EnvSafe.get("HASURA_RESPONSE_LIMIT", S.option(S.int)) + } + + let graphqlEndpoint = + envSafe->EnvSafe.get( + "HASURA_GRAPHQL_ENDPOINT", + S.string, + ~devFallback="http://localhost:8080/v1/metadata", + ) + + let url = graphqlEndpoint->Js.String2.slice(~from=0, ~to_=-("/v1/metadata"->Js.String2.length)) + + let role = envSafe->EnvSafe.get("HASURA_GRAPHQL_ROLE", S.string, ~devFallback="admin") + + let secret = envSafe->EnvSafe.get("HASURA_GRAPHQL_ADMIN_SECRET", S.string, ~devFallback="testing") + + let aggregateEntities = envSafe->EnvSafe.get( + "ENVIO_HASURA_PUBLIC_AGGREGATE", + S.union([ + S.array(S.string), + // Temporary workaround: Hosted Service can't use commas in env vars for multiple entities. + // Will be removed once comma support is added — don't rely on this. + S.string->S.transform(s => { + parser: string => + switch string->Js.String2.split("&") { + | [] + | [_] => + s.fail(`Provide an array of entities in the JSON format.`) + | entities => entities + }, + }), + ]), + ~fallback=[], + ) +} + +module Configurable = { + /** + Used for backwards compatability + */ + let unstable__temp_unordered_head_mode = envSafe->EnvSafe.get( + "UNSTABLE__TEMP_UNORDERED_HEAD_MODE", + S.option(S.bool), + ) + + let isUnorderedMultichainMode = + envSafe->EnvSafe.get("UNORDERED_MULTICHAIN_MODE", S.option(S.bool)) + + module SyncConfig = { + let initialBlockInterval = + envSafe->EnvSafe.get("ENVIO_RPC_INITIAL_BLOCK_INTERVAL", S.option(S.int)) + let backoffMultiplicative = + envSafe->EnvSafe.get("ENVIO_RPC_BACKOFF_MULTIPLICATIVE", S.option(S.float)) + let accelerationAdditive = + envSafe->EnvSafe.get("ENVIO_RPC_ACCELERATION_ADDITIVE", S.option(S.int)) + let intervalCeiling = envSafe->EnvSafe.get("ENVIO_RPC_INTERVAL_CEILING", S.option(S.int)) + } +} + +module ThrottleWrites = { + let chainMetadataIntervalMillis = + envSafe->EnvSafe.get("ENVIO_THROTTLE_CHAIN_METADATA_INTERVAL_MILLIS", S.int, ~devFallback=500) + let pruneStaleDataIntervalMillis = + envSafe->EnvSafe.get( + "ENVIO_THROTTLE_PRUNE_STALE_DATA_INTERVAL_MILLIS", + S.int, + ~devFallback=30_000, + ) + + let liveMetricsBenchmarkIntervalMillis = + envSafe->EnvSafe.get( + "ENVIO_THROTTLE_LIVE_METRICS_BENCHMARK_INTERVAL_MILLIS", + S.int, + ~devFallback=1_000, + ) + + let jsonFileBenchmarkIntervalMillis = + envSafe->EnvSafe.get( + "ENVIO_THROTTLE_JSON_FILE_BENCHMARK_INTERVAL_MILLIS", + S.int, + ~devFallback=500, + ) +} + +// You need to close the envSafe after you're done with it so that it immediately tells you about your misconfigured environment on startup. +envSafe->EnvSafe.close diff --git a/apps/hypersync-indexer/generated/src/EventProcessing.res b/apps/hypersync-indexer/generated/src/EventProcessing.res new file mode 100644 index 000000000..bc394e254 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/EventProcessing.res @@ -0,0 +1,478 @@ +open Belt + +let allChainsEventsProcessedToEndblock = (chainFetchers: ChainMap.t) => { + chainFetchers + ->ChainMap.values + ->Array.every(cf => cf->ChainFetcher.hasProcessedToEndblock) +} + +let computeChainsState = (chainFetchers: ChainMap.t): Internal.chains => { + let chains = Js.Dict.empty() + + chainFetchers + ->ChainMap.entries + ->Array.forEach(((chain, chainFetcher)) => { + let chainId = chain->ChainMap.Chain.toChainId->Int.toString + let isReady = chainFetcher.timestampCaughtUpToHeadOrEndblock !== None + + chains->Js.Dict.set( + chainId, + { + Internal.isReady: isReady, + }, + ) + }) + + chains +} + +let convertFieldsToJson = (fields: option>) => { + switch fields { + | None => %raw(`{}`) + | Some(fields) => { + let keys = fields->Js.Dict.keys + let new = Js.Dict.empty() + for i in 0 to keys->Js.Array2.length - 1 { + let key = keys->Js.Array2.unsafe_get(i) + let value = fields->Js.Dict.unsafeGet(key) + // Skip `undefined` values and convert bigint fields to string + // There are not fields with nested bigints, so this is safe + new->Js.Dict.set( + key, + Js.typeof(value) === "bigint" ? value->Utils.magic->BigInt.toString->Utils.magic : value, + ) + } + new->(Utils.magic: dict => Js.Json.t) + } + } +} + +let addItemToRawEvents = (eventItem: Internal.eventItem, ~inMemoryStore: InMemoryStore.t) => { + let {event, eventConfig, chain, blockNumber, timestamp: blockTimestamp} = eventItem + let {block, transaction, params, logIndex, srcAddress} = event + let chainId = chain->ChainMap.Chain.toChainId + let eventId = EventUtils.packEventIndex(~logIndex, ~blockNumber) + let blockFields = + block + ->(Utils.magic: Internal.eventBlock => option>) + ->convertFieldsToJson + let transactionFields = + transaction + ->(Utils.magic: Internal.eventTransaction => option>) + ->convertFieldsToJson + + blockFields->Types.Block.cleanUpRawEventFieldsInPlace + + // Serialize to unknown, because serializing to Js.Json.t fails for Bytes Fuel type, since it has unknown schema + let params = + params + ->S.reverseConvertOrThrow(eventConfig.paramsRawEventSchema) + ->(Utils.magic: unknown => Js.Json.t) + let params = if params === %raw(`null`) { + // Should probably make the params field nullable + // But this is currently needed to make events + // with empty params work + %raw(`"null"`) + } else { + params + } + + let rawEvent: InternalTable.RawEvents.t = { + chainId, + eventId, + eventName: eventConfig.name, + contractName: eventConfig.contractName, + blockNumber, + logIndex, + srcAddress, + blockHash: block->Types.Block.getId, + blockTimestamp, + blockFields, + transactionFields, + params, + } + + let eventIdStr = eventId->BigInt.toString + + inMemoryStore.rawEvents->InMemoryTable.set({chainId, eventId: eventIdStr}, rawEvent) +} + +exception ProcessingError({message: string, exn: exn, item: Internal.item}) + +let runEventHandlerOrThrow = async ( + item: Internal.item, + ~checkpointId, + ~handler, + ~inMemoryStore, + ~loadManager, + ~persistence, + ~shouldSaveHistory, + ~shouldBenchmark, + ~chains: Internal.chains, +) => { + let eventItem = item->Internal.castUnsafeEventItem + + //Include the load in time before handler + let timeBeforeHandler = Hrtime.makeTimer() + + try { + let contextParams: UserContext.contextParams = { + item, + checkpointId, + inMemoryStore, + loadManager, + persistence, + shouldSaveHistory, + isPreload: false, + chains, + isResolved: false, + } + await handler( + ( + { + event: eventItem.event, + context: UserContext.getHandlerContext(contextParams), + }: Internal.handlerArgs + ), + ) + contextParams.isResolved = true + } catch { + | exn => + raise( + ProcessingError({ + message: "Unexpected error in the event handler. Please handle the error to keep the indexer running smoothly.", + item, + exn, + }), + ) + } + if shouldBenchmark { + let timeEnd = timeBeforeHandler->Hrtime.timeSince->Hrtime.toMillis->Hrtime.floatFromMillis + Benchmark.addSummaryData( + ~group="Handlers Per Event", + ~label=`${eventItem.eventConfig.contractName} ${eventItem.eventConfig.name} Handler (ms)`, + ~value=timeEnd, + ~decimalPlaces=4, + ) + } +} + +let runHandlerOrThrow = async ( + item: Internal.item, + ~checkpointId, + ~inMemoryStore, + ~loadManager, + ~indexer: Indexer.t, + ~shouldSaveHistory, + ~shouldBenchmark, + ~chains: Internal.chains, +) => { + switch item { + | Block({onBlockConfig: {handler, chainId}, blockNumber}) => + try { + let contextParams: UserContext.contextParams = { + item, + inMemoryStore, + loadManager, + persistence: indexer.persistence, + shouldSaveHistory, + checkpointId, + isPreload: false, + chains, + isResolved: false, + } + await handler( + ( + { + block: { + number: blockNumber, + chainId, + }, + context: UserContext.getHandlerContext(contextParams), + }: Internal.onBlockArgs + ), + ) + contextParams.isResolved = true + } catch { + | exn => + raise( + ProcessingError({ + message: "Unexpected error in the block handler. Please handle the error to keep the indexer running smoothly.", + item, + exn, + }), + ) + } + | Event({eventConfig}) => { + switch eventConfig.handler { + | Some(handler) => + await item->runEventHandlerOrThrow( + ~handler, + ~checkpointId, + ~inMemoryStore, + ~loadManager, + ~persistence=indexer.persistence, + ~shouldSaveHistory, + ~shouldBenchmark, + ~chains, + ) + | None => () + } + + if indexer.config.enableRawEvents { + item->Internal.castUnsafeEventItem->addItemToRawEvents(~inMemoryStore) + } + } + } +} + +let preloadBatchOrThrow = async ( + batch: Batch.t, + ~loadManager, + ~persistence, + ~inMemoryStore, + ~chains: Internal.chains, +) => { + // On the first run of loaders, we don't care about the result, + // whether it's an error or a return type. + // We'll rerun the loader again right before the handler run, + // to avoid having a stale data returned from the loader. + + let promises = [] + let itemIdx = ref(0) + + for checkpointIdx in 0 to batch.checkpointIds->Array.length - 1 { + let checkpointId = batch.checkpointIds->Js.Array2.unsafe_get(checkpointIdx) + let checkpointEventsProcessed = + batch.checkpointEventsProcessed->Js.Array2.unsafe_get(checkpointIdx) + + for idx in 0 to checkpointEventsProcessed - 1 { + let item = batch.items->Js.Array2.unsafe_get(itemIdx.contents + idx) + switch item { + | Event({eventConfig: {handler}, event}) => + switch handler { + | None => () + | Some(handler) => + try { + promises->Array.push( + handler({ + event, + context: UserContext.getHandlerContext({ + item, + inMemoryStore, + loadManager, + persistence, + checkpointId, + isPreload: true, + shouldSaveHistory: false, + chains, + isResolved: false, + }), + })->Promise.silentCatch, + // Must have Promise.catch as well as normal catch, + // because if user throws an error before await in the handler, + // it won't create a rejected promise + ) + } catch { + | _ => () + } + } + | Block({onBlockConfig: {handler, chainId}, blockNumber}) => + try { + promises->Array.push( + handler({ + block: { + number: blockNumber, + chainId, + }, + context: UserContext.getHandlerContext({ + item, + inMemoryStore, + loadManager, + persistence, + checkpointId, + isPreload: true, + shouldSaveHistory: false, + chains, + isResolved: false, + }), + })->Promise.silentCatch, + ) + } catch { + | _ => () + } + } + } + + itemIdx := itemIdx.contents + checkpointEventsProcessed + } + + let _ = await Promise.all(promises) +} + +let runBatchHandlersOrThrow = async ( + batch: Batch.t, + ~inMemoryStore, + ~loadManager, + ~indexer, + ~shouldSaveHistory, + ~shouldBenchmark, + ~chains: Internal.chains, +) => { + let itemIdx = ref(0) + + for checkpointIdx in 0 to batch.checkpointIds->Array.length - 1 { + let checkpointId = batch.checkpointIds->Js.Array2.unsafe_get(checkpointIdx) + let checkpointEventsProcessed = + batch.checkpointEventsProcessed->Js.Array2.unsafe_get(checkpointIdx) + + for idx in 0 to checkpointEventsProcessed - 1 { + let item = batch.items->Js.Array2.unsafe_get(itemIdx.contents + idx) + + await runHandlerOrThrow( + item, + ~checkpointId, + ~inMemoryStore, + ~loadManager, + ~indexer, + ~shouldSaveHistory, + ~shouldBenchmark, + ~chains, + ) + } + itemIdx := itemIdx.contents + checkpointEventsProcessed + } +} + +let registerProcessEventBatchMetrics = ( + ~logger, + ~loadDuration, + ~handlerDuration, + ~dbWriteDuration, +) => { + logger->Logging.childTrace({ + "msg": "Finished processing batch", + "loader_time_elapsed": loadDuration, + "handlers_time_elapsed": handlerDuration, + "write_time_elapsed": dbWriteDuration, + }) + + Prometheus.incrementLoadEntityDurationCounter(~duration=loadDuration) + Prometheus.incrementEventRouterDurationCounter(~duration=handlerDuration) + Prometheus.incrementExecuteBatchDurationCounter(~duration=dbWriteDuration) +} + +type logPartitionInfo = { + batchSize: int, + firstItemTimestamp: option, + firstItemBlockNumber?: int, + lastItemBlockNumber?: int, +} + +let processEventBatch = async ( + ~batch: Batch.t, + ~inMemoryStore: InMemoryStore.t, + ~isInReorgThreshold, + ~loadManager, + ~indexer: Indexer.t, + ~chainFetchers: ChainMap.t, +) => { + let totalBatchSize = batch.totalBatchSize + // Compute chains state for this batch + let chains: Internal.chains = chainFetchers->computeChainsState + + let logger = Logging.getLogger() + logger->Logging.childTrace({ + "msg": "Started processing batch", + "totalBatchSize": totalBatchSize, + "chains": batch.progressedChainsById->Utils.Dict.mapValues(chainAfterBatch => { + { + "batchSize": chainAfterBatch.batchSize, + "progress": chainAfterBatch.progressBlockNumber, + } + }), + }) + + try { + let timeRef = Hrtime.makeTimer() + + if batch.items->Utils.Array.notEmpty { + await batch->preloadBatchOrThrow( + ~loadManager, + ~persistence=indexer.persistence, + ~inMemoryStore, + ~chains, + ) + } + + let elapsedTimeAfterLoaders = timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis + + if batch.items->Utils.Array.notEmpty { + await batch->runBatchHandlersOrThrow( + ~inMemoryStore, + ~loadManager, + ~indexer, + ~shouldSaveHistory=indexer.config->Config.shouldSaveHistory(~isInReorgThreshold), + ~shouldBenchmark=Env.Benchmark.shouldSaveData, + ~chains, + ) + } + + let elapsedTimeAfterProcessing = + timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis + + let rec executeBatch = async (~escapeTables=?) => { + switch await indexer.persistence.sql->IO.executeBatch( + ~batch, + ~inMemoryStore, + ~isInReorgThreshold, + ~indexer, + ~escapeTables?, + ) { + | exception Persistence.StorageError({message, reason}) => + reason->ErrorHandling.make(~msg=message, ~logger)->Error + + | exception PgStorage.PgEncodingError({table}) => + let escapeTables = switch escapeTables { + | Some(set) => set + | None => Utils.Set.make() + } + let _ = escapeTables->Utils.Set.add(table) + // Retry with specifying which tables to escape. + await executeBatch(~escapeTables) + | exception exn => + exn->ErrorHandling.make(~msg="Failed writing batch to database", ~logger)->Error + | () => { + let elapsedTimeAfterDbWrite = + timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis + let loaderDuration = elapsedTimeAfterLoaders + let handlerDuration = elapsedTimeAfterProcessing - loaderDuration + let dbWriteDuration = elapsedTimeAfterDbWrite - elapsedTimeAfterProcessing + registerProcessEventBatchMetrics( + ~logger, + ~loadDuration=loaderDuration, + ~handlerDuration, + ~dbWriteDuration, + ) + if Env.Benchmark.shouldSaveData { + Benchmark.addEventProcessing( + ~batchSize=totalBatchSize, + ~loadDuration=loaderDuration, + ~handlerDuration, + ~dbWriteDuration, + ~totalTimeElapsed=elapsedTimeAfterDbWrite, + ) + } + Ok() + } + } + } + + await executeBatch() + } catch { + | ProcessingError({message, exn, item}) => + exn + ->ErrorHandling.make(~msg=message, ~logger=item->Logging.getItemLogger) + ->Error + } +} diff --git a/apps/hypersync-indexer/generated/src/Generated.res b/apps/hypersync-indexer/generated/src/Generated.res new file mode 100644 index 000000000..4d704a8e0 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Generated.res @@ -0,0 +1,206 @@ +@val external require: string => unit = "require" + +let registerContractHandlers = ( + ~contractName, + ~handlerPathRelativeToRoot, + ~handlerPathRelativeToConfig, +) => { + try { + require(`../${Path.relativePathToRootFromGenerated}/${handlerPathRelativeToRoot}`) + } catch { + | exn => + let params = { + "Contract Name": contractName, + "Expected Handler Path": handlerPathRelativeToConfig, + "Code": "EE500", + } + let logger = Logging.createChild(~params) + + let errHandler = exn->ErrorHandling.make(~msg="Failed to import handler file", ~logger) + errHandler->ErrorHandling.log + errHandler->ErrorHandling.raiseExn + } +} + +let makeGeneratedConfig = () => { + let chains = [ + { + let contracts = [ + { + Config.name: "ENSToken", + abi: Types.ENSToken.abi, + addresses: [ + "0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72"->Address.Evm.fromStringOrThrow +, + ], + events: [ + (Types.ENSToken.Transfer.register() :> Internal.eventConfig), + (Types.ENSToken.DelegateChanged.register() :> Internal.eventConfig), + (Types.ENSToken.DelegateVotesChanged.register() :> Internal.eventConfig), + ], + startBlock: None, + }, + { + Config.name: "ENSGovernor", + abi: Types.ENSGovernor.abi, + addresses: [ + "0x323a76393544d5ecca80cd6ef2a560c6a395b7e3"->Address.Evm.fromStringOrThrow +, + ], + events: [ + (Types.ENSGovernor.ProposalCreated.register() :> Internal.eventConfig), + (Types.ENSGovernor.VoteCast.register() :> Internal.eventConfig), + (Types.ENSGovernor.ProposalCanceled.register() :> Internal.eventConfig), + (Types.ENSGovernor.ProposalExecuted.register() :> Internal.eventConfig), + (Types.ENSGovernor.ProposalQueued.register() :> Internal.eventConfig), + ], + startBlock: Some(13533772), + }, + ] + let chain = ChainMap.Chain.makeUnsafe(~chainId=1) + { + Config.maxReorgDepth: 200, + startBlock: 9380410, + id: 1, + contracts, + sources: NetworkSources.evm(~chain, ~contracts=[{name: "ENSToken",events: [Types.ENSToken.Transfer.register(), Types.ENSToken.DelegateChanged.register(), Types.ENSToken.DelegateVotesChanged.register()],abi: Types.ENSToken.abi}, {name: "ENSGovernor",events: [Types.ENSGovernor.ProposalCreated.register(), Types.ENSGovernor.VoteCast.register(), Types.ENSGovernor.ProposalCanceled.register(), Types.ENSGovernor.ProposalExecuted.register(), Types.ENSGovernor.ProposalQueued.register()],abi: Types.ENSGovernor.abi}], ~hyperSync=Some("https://eth.hypersync.xyz"), ~allEventSignatures=[Types.ENSToken.eventSignatures, Types.ENSGovernor.eventSignatures]->Belt.Array.concatMany, ~shouldUseHypersyncClientDecoder=true, ~rpcs=[], ~lowercaseAddresses=false) + } + }, + ] + + Config.make( + ~shouldRollbackOnReorg=true, + ~shouldSaveFullHistory=false, + ~multichain=if ( + Env.Configurable.isUnorderedMultichainMode->Belt.Option.getWithDefault( + Env.Configurable.unstable__temp_unordered_head_mode->Belt.Option.getWithDefault( + false, + ), + ) + ) { + Unordered + } else { + Ordered + }, + ~chains, + ~enableRawEvents=false, + ~batchSize=?Env.batchSize, + ~preloadHandlers=false, + ~lowercaseAddresses=false, + ~shouldUseHypersyncClientDecoder=true, + ) +} + +let configWithoutRegistrations = makeGeneratedConfig() + +let registerAllHandlers = () => { + EventRegister.startRegistration( + ~ecosystem=configWithoutRegistrations.ecosystem, + ~multichain=configWithoutRegistrations.multichain, + ~preloadHandlers=configWithoutRegistrations.preloadHandlers, + ) + + registerContractHandlers( + ~contractName="ENSGovernor", + ~handlerPathRelativeToRoot="src/eventHandlers/ENSGovernor.ts", + ~handlerPathRelativeToConfig="src/eventHandlers/ENSGovernor.ts", + ) + registerContractHandlers( + ~contractName="ENSToken", + ~handlerPathRelativeToRoot="src/eventHandlers/ENSToken.ts", + ~handlerPathRelativeToConfig="src/eventHandlers/ENSToken.ts", + ) + + EventRegister.finishRegistration() +} + +let initialSql = Db.makeClient() +let storagePgSchema = Env.Db.publicSchema +let makeStorage = (~sql, ~pgSchema=storagePgSchema, ~isHasuraEnabled=Env.Hasura.enabled) => { + PgStorage.make( + ~sql, + ~pgSchema, + ~pgHost=Env.Db.host, + ~pgUser=Env.Db.user, + ~pgPort=Env.Db.port, + ~pgDatabase=Env.Db.database, + ~pgPassword=Env.Db.password, + ~onInitialize=?{ + if isHasuraEnabled { + Some( + () => { + Hasura.trackDatabase( + ~endpoint=Env.Hasura.graphqlEndpoint, + ~auth={ + role: Env.Hasura.role, + secret: Env.Hasura.secret, + }, + ~pgSchema=storagePgSchema, + ~userEntities=Entities.userEntities, + ~responseLimit=Env.Hasura.responseLimit, + ~schema=Db.schema, + ~aggregateEntities=Env.Hasura.aggregateEntities, + )->Promise.catch(err => { + Logging.errorWithExn( + err->Utils.prettifyExn, + `EE803: Error tracking tables`, + )->Promise.resolve + }) + }, + ) + } else { + None + } + }, + ~onNewTables=?{ + if isHasuraEnabled { + Some( + (~tableNames) => { + Hasura.trackTables( + ~endpoint=Env.Hasura.graphqlEndpoint, + ~auth={ + role: Env.Hasura.role, + secret: Env.Hasura.secret, + }, + ~pgSchema=storagePgSchema, + ~tableNames, + )->Promise.catch(err => { + Logging.errorWithExn( + err->Utils.prettifyExn, + `EE804: Error tracking new tables`, + )->Promise.resolve + }) + }, + ) + } else { + None + } + }, + ~isHasuraEnabled, + ) +} + +let codegenPersistence = Persistence.make( + ~userEntities=Entities.userEntities, + ~allEnums=Enums.allEnums, + ~storage=makeStorage(~sql=initialSql), + ~sql=initialSql, +) + +%%private(let indexer: ref> = ref(None)) +let getIndexer = () => { + switch indexer.contents { + | Some(indexer) => indexer + | None => + let i = { + Indexer.registrations: registerAllHandlers(), + // Need to recreate initial config one more time, + // since configWithoutRegistrations called register for event + // before they were ready + config: makeGeneratedConfig(), + persistence: codegenPersistence, + } + indexer := Some(i) + i + } +} diff --git a/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.gen.ts b/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.gen.ts new file mode 100644 index 000000000..f9f2cf2c7 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.gen.ts @@ -0,0 +1,8 @@ +/* TypeScript file generated from GqlDbCustomTypes.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +export type Float_t = number; + +export type Int_t = number; diff --git a/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.res b/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.res new file mode 100644 index 000000000..8915358d6 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.res @@ -0,0 +1,11 @@ +// Can be deleted on a breaking release (V3) + +module Float = { + @genType + type t = float +} + +module Int = { + @genType + type t = int +} diff --git a/apps/hypersync-indexer/generated/src/Handlers.gen.ts b/apps/hypersync-indexer/generated/src/Handlers.gen.ts new file mode 100644 index 000000000..5e57e88fe --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Handlers.gen.ts @@ -0,0 +1,165 @@ +/* TypeScript file generated from Handlers.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +const HandlersJS = require('./Handlers.res.js'); + +import type {ENSGovernor_ProposalCanceled_eventFilters as Types_ENSGovernor_ProposalCanceled_eventFilters} from './Types.gen'; + +import type {ENSGovernor_ProposalCanceled_event as Types_ENSGovernor_ProposalCanceled_event} from './Types.gen'; + +import type {ENSGovernor_ProposalCreated_eventFilters as Types_ENSGovernor_ProposalCreated_eventFilters} from './Types.gen'; + +import type {ENSGovernor_ProposalCreated_event as Types_ENSGovernor_ProposalCreated_event} from './Types.gen'; + +import type {ENSGovernor_ProposalExecuted_eventFilters as Types_ENSGovernor_ProposalExecuted_eventFilters} from './Types.gen'; + +import type {ENSGovernor_ProposalExecuted_event as Types_ENSGovernor_ProposalExecuted_event} from './Types.gen'; + +import type {ENSGovernor_ProposalQueued_eventFilters as Types_ENSGovernor_ProposalQueued_eventFilters} from './Types.gen'; + +import type {ENSGovernor_ProposalQueued_event as Types_ENSGovernor_ProposalQueued_event} from './Types.gen'; + +import type {ENSGovernor_VoteCast_eventFilters as Types_ENSGovernor_VoteCast_eventFilters} from './Types.gen'; + +import type {ENSGovernor_VoteCast_event as Types_ENSGovernor_VoteCast_event} from './Types.gen'; + +import type {ENSToken_DelegateChanged_eventFilters as Types_ENSToken_DelegateChanged_eventFilters} from './Types.gen'; + +import type {ENSToken_DelegateChanged_event as Types_ENSToken_DelegateChanged_event} from './Types.gen'; + +import type {ENSToken_DelegateVotesChanged_eventFilters as Types_ENSToken_DelegateVotesChanged_eventFilters} from './Types.gen'; + +import type {ENSToken_DelegateVotesChanged_event as Types_ENSToken_DelegateVotesChanged_event} from './Types.gen'; + +import type {ENSToken_Transfer_eventFilters as Types_ENSToken_Transfer_eventFilters} from './Types.gen'; + +import type {ENSToken_Transfer_event as Types_ENSToken_Transfer_event} from './Types.gen'; + +import type {HandlerTypes_eventConfig as Types_HandlerTypes_eventConfig} from './Types.gen'; + +import type {chain as Types_chain} from './Types.gen'; + +import type {contractRegistrations as Types_contractRegistrations} from './Types.gen'; + +import type {fnWithEventConfig as Types_fnWithEventConfig} from './Types.gen'; + +import type {genericContractRegisterArgs as Internal_genericContractRegisterArgs} from 'envio/src/Internal.gen'; + +import type {genericContractRegister as Internal_genericContractRegister} from 'envio/src/Internal.gen'; + +import type {genericHandlerArgs as Internal_genericHandlerArgs} from 'envio/src/Internal.gen'; + +import type {genericHandlerWithLoader as Internal_genericHandlerWithLoader} from 'envio/src/Internal.gen'; + +import type {genericHandler as Internal_genericHandler} from 'envio/src/Internal.gen'; + +import type {genericLoaderArgs as Internal_genericLoaderArgs} from 'envio/src/Internal.gen'; + +import type {genericLoader as Internal_genericLoader} from 'envio/src/Internal.gen'; + +import type {handlerContext as Types_handlerContext} from './Types.gen'; + +import type {loaderContext as Types_loaderContext} from './Types.gen'; + +import type {onBlockArgs as Envio_onBlockArgs} from 'envio/src/Envio.gen'; + +import type {onBlockOptions as Envio_onBlockOptions} from 'envio/src/Envio.gen'; + +export const ENSGovernor_ProposalCreated_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalCreated.contractRegister as any; + +export const ENSGovernor_ProposalCreated_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalCreated.handler as any; + +export const ENSGovernor_ProposalCreated_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalCreated_eventFilters>) => void = HandlersJS.ENSGovernor.ProposalCreated.handlerWithLoader as any; + +export const ENSGovernor_VoteCast_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.VoteCast.contractRegister as any; + +export const ENSGovernor_VoteCast_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.VoteCast.handler as any; + +export const ENSGovernor_VoteCast_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_VoteCast_eventFilters>) => void = HandlersJS.ENSGovernor.VoteCast.handlerWithLoader as any; + +export const ENSGovernor_ProposalCanceled_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalCanceled.contractRegister as any; + +export const ENSGovernor_ProposalCanceled_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalCanceled.handler as any; + +export const ENSGovernor_ProposalCanceled_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalCanceled_eventFilters>) => void = HandlersJS.ENSGovernor.ProposalCanceled.handlerWithLoader as any; + +export const ENSGovernor_ProposalExecuted_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalExecuted.contractRegister as any; + +export const ENSGovernor_ProposalExecuted_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalExecuted.handler as any; + +export const ENSGovernor_ProposalExecuted_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalExecuted_eventFilters>) => void = HandlersJS.ENSGovernor.ProposalExecuted.handlerWithLoader as any; + +export const ENSGovernor_ProposalQueued_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalQueued.contractRegister as any; + +export const ENSGovernor_ProposalQueued_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalQueued.handler as any; + +export const ENSGovernor_ProposalQueued_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalQueued_eventFilters>) => void = HandlersJS.ENSGovernor.ProposalQueued.handlerWithLoader as any; + +export const ENSToken_Transfer_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.Transfer.contractRegister as any; + +export const ENSToken_Transfer_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.Transfer.handler as any; + +export const ENSToken_Transfer_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_Transfer_eventFilters>) => void = HandlersJS.ENSToken.Transfer.handlerWithLoader as any; + +export const ENSToken_DelegateChanged_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.DelegateChanged.contractRegister as any; + +export const ENSToken_DelegateChanged_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.DelegateChanged.handler as any; + +export const ENSToken_DelegateChanged_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_DelegateChanged_eventFilters>) => void = HandlersJS.ENSToken.DelegateChanged.handlerWithLoader as any; + +export const ENSToken_DelegateVotesChanged_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.DelegateVotesChanged.contractRegister as any; + +export const ENSToken_DelegateVotesChanged_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.DelegateVotesChanged.handler as any; + +export const ENSToken_DelegateVotesChanged_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_DelegateVotesChanged_eventFilters>) => void = HandlersJS.ENSToken.DelegateVotesChanged.handlerWithLoader as any; + +/** Register a Block Handler. It'll be called for every block by default. */ +export const onBlock: (_1:Envio_onBlockOptions, _2:((_1:Envio_onBlockArgs) => Promise)) => void = HandlersJS.onBlock as any; + +export const ENSGovernor: { + VoteCast: { + handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_VoteCast_eventFilters>) => void; + handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; + contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> + }; + ProposalQueued: { + handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalQueued_eventFilters>) => void; + handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; + contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> + }; + ProposalCreated: { + handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalCreated_eventFilters>) => void; + handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; + contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> + }; + ProposalCanceled: { + handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalCanceled_eventFilters>) => void; + handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; + contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> + }; + ProposalExecuted: { + handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalExecuted_eventFilters>) => void; + handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; + contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> + } +} = HandlersJS.ENSGovernor as any; + +export const ENSToken: { + Transfer: { + handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_Transfer_eventFilters>) => void; + handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; + contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> + }; + DelegateChanged: { + handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_DelegateChanged_eventFilters>) => void; + handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; + contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> + }; + DelegateVotesChanged: { + handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_DelegateVotesChanged_eventFilters>) => void; + handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; + contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> + } +} = HandlersJS.ENSToken as any; diff --git a/apps/hypersync-indexer/generated/src/Handlers.res b/apps/hypersync-indexer/generated/src/Handlers.res new file mode 100644 index 000000000..b6de3af00 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Handlers.res @@ -0,0 +1,23 @@ + @genType +module ENSGovernor = { + module ProposalCreated = Types.MakeRegister(Types.ENSGovernor.ProposalCreated) + module VoteCast = Types.MakeRegister(Types.ENSGovernor.VoteCast) + module ProposalCanceled = Types.MakeRegister(Types.ENSGovernor.ProposalCanceled) + module ProposalExecuted = Types.MakeRegister(Types.ENSGovernor.ProposalExecuted) + module ProposalQueued = Types.MakeRegister(Types.ENSGovernor.ProposalQueued) +} + + @genType +module ENSToken = { + module Transfer = Types.MakeRegister(Types.ENSToken.Transfer) + module DelegateChanged = Types.MakeRegister(Types.ENSToken.DelegateChanged) + module DelegateVotesChanged = Types.MakeRegister(Types.ENSToken.DelegateVotesChanged) +} + +@genType /** Register a Block Handler. It'll be called for every block by default. */ +let onBlock: ( + Envio.onBlockOptions, + Envio.onBlockArgs => promise, +) => unit = ( + EventRegister.onBlock: (unknown, Internal.onBlockArgs => promise) => unit +)->Utils.magic diff --git a/apps/hypersync-indexer/generated/src/IO.res b/apps/hypersync-indexer/generated/src/IO.res new file mode 100644 index 000000000..3b69a3313 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/IO.res @@ -0,0 +1,396 @@ +open Belt + +let executeSet = ( + sql: Postgres.sql, + ~items: array<'a>, + ~dbFunction: (Postgres.sql, array<'a>) => promise, +) => { + if items->Array.length > 0 { + sql->dbFunction(items) + } else { + Promise.resolve() + } +} + +let executeBatch = async ( + sql, + ~batch: Batch.t, + ~inMemoryStore: InMemoryStore.t, + ~isInReorgThreshold, + ~indexer: Indexer.t, + ~escapeTables=?, +) => { + let shouldSaveHistory = indexer.config->Config.shouldSaveHistory(~isInReorgThreshold) + + let specificError = ref(None) + + let setRawEvents = executeSet( + _, + ~dbFunction=(sql, items) => { + sql->PgStorage.setOrThrow( + ~items, + ~table=InternalTable.RawEvents.table, + ~itemSchema=InternalTable.RawEvents.schema, + ~pgSchema=Generated.storagePgSchema, + ) + }, + ~items=inMemoryStore.rawEvents->InMemoryTable.values, + ) + + let setEntities = Entities.allEntities->Belt.Array.map(entityConfig => { + let entitiesToSet = [] + let idsToDelete = [] + + let rows = + inMemoryStore + ->InMemoryStore.getInMemTable(~entityConfig) + ->InMemoryTable.Entity.rows + + rows->Js.Array2.forEach(row => { + switch row { + | Updated({latest: {entityUpdateAction: Set(entity)}}) => entitiesToSet->Array.push(entity) + | Updated({latest: {entityUpdateAction: Delete, entityId}}) => + idsToDelete->Array.push(entityId) + | _ => () + } + }) + + let shouldRemoveInvalidUtf8 = switch escapeTables { + | Some(tables) if tables->Utils.Set.has(entityConfig.table) => true + | _ => false + } + + async sql => { + try { + let promises = [] + + if shouldSaveHistory { + let backfillHistoryIds = Utils.Set.make() + let batchSetUpdates = [] + // Use unnest approach + let batchDeleteCheckpointIds = [] + let batchDeleteEntityIds = [] + + rows->Js.Array2.forEach(row => { + switch row { + | Updated({history, containsRollbackDiffChange}) => + history->Js.Array2.forEach( + (entityUpdate: EntityHistory.entityUpdate<'a>) => { + if !containsRollbackDiffChange { + // For every update we want to make sure that there's an existing history item + // with the current entity state. So we backfill history with checkpoint id 0, + // before writing updates. Don't do this if the update has a rollback diff change. + backfillHistoryIds->Utils.Set.add(entityUpdate.entityId)->ignore + } + switch entityUpdate.entityUpdateAction { + | Delete => { + batchDeleteEntityIds->Array.push(entityUpdate.entityId)->ignore + batchDeleteCheckpointIds->Array.push(entityUpdate.checkpointId)->ignore + } + | Set(_) => batchSetUpdates->Js.Array2.push(entityUpdate)->ignore + } + }, + ) + | _ => () + } + }) + + if backfillHistoryIds->Utils.Set.size !== 0 { + // This must run before updating entity or entity history tables + await EntityHistory.backfillHistory( + sql, + ~pgSchema=Db.publicSchema, + ~entityName=entityConfig.name, + ~entityIndex=entityConfig.index, + ~ids=backfillHistoryIds->Utils.Set.toArray, + ) + } + + if batchDeleteCheckpointIds->Utils.Array.notEmpty { + promises->Array.push( + sql->EntityHistory.insertDeleteUpdates( + ~pgSchema=Db.publicSchema, + ~entityHistory=entityConfig.entityHistory, + ~batchDeleteEntityIds, + ~batchDeleteCheckpointIds, + ), + ) + } + + if batchSetUpdates->Utils.Array.notEmpty { + if shouldRemoveInvalidUtf8 { + let entities = batchSetUpdates->Js.Array2.map(batchSetUpdate => { + switch batchSetUpdate.entityUpdateAction { + | Set(entity) => entity + | _ => Js.Exn.raiseError("Expected Set action") + } + }) + entities->PgStorage.removeInvalidUtf8InPlace + } + + promises + ->Js.Array2.push( + sql->PgStorage.setOrThrow( + ~items=batchSetUpdates, + ~itemSchema=entityConfig.entityHistory.setUpdateSchema, + ~table=entityConfig.entityHistory.table, + ~pgSchema=Db.publicSchema, + ), + ) + ->ignore + } + } + + if entitiesToSet->Utils.Array.notEmpty { + if shouldRemoveInvalidUtf8 { + entitiesToSet->PgStorage.removeInvalidUtf8InPlace + } + promises->Array.push( + sql->PgStorage.setOrThrow( + ~items=entitiesToSet, + ~table=entityConfig.table, + ~itemSchema=entityConfig.schema, + ~pgSchema=Generated.storagePgSchema, + ), + ) + } + if idsToDelete->Utils.Array.notEmpty { + promises->Array.push(sql->DbFunctionsEntities.batchDelete(~entityConfig)(idsToDelete)) + } + + let _ = await promises->Promise.all + } catch { + // There's a race condition that sql->Postgres.beginSql + // might throw PG error, earlier, than the handled error + // from setOrThrow will be passed through. + // This is needed for the utf8 encoding fix. + | exn => { + /* Note: Entity History doesn't return StorageError yet, and directly throws JsError */ + let normalizedExn = switch exn { + | JsError(_) => exn + | Persistence.StorageError({reason: exn}) => exn + | _ => exn + }->Js.Exn.anyToExnInternal + + switch normalizedExn { + | JsError(error) => + // Workaround for https://github.com/enviodev/hyperindex/issues/446 + // We do escaping only when we actually got an error writing for the first time. + // This is not perfect, but an optimization to avoid escaping for every single item. + + switch error->S.parseOrThrow(PgStorage.pgErrorMessageSchema) { + | `current transaction is aborted, commands ignored until end of transaction block` => () + | `invalid byte sequence for encoding "UTF8": 0x00` => + // Since the transaction is aborted at this point, + // we can't simply retry the function with escaped items, + // so propagate the error, to restart the whole batch write. + // Also, pass the failing table, to escape only its items. + // TODO: Ideally all this should be done in the file, + // so it'll be easier to work on PG specific logic. + specificError.contents = Some(PgStorage.PgEncodingError({table: entityConfig.table})) + | _ => specificError.contents = Some(exn->Utils.prettifyExn) + | exception _ => () + } + | S.Raised(_) => raise(normalizedExn) // But rethrow this one, since it's not a PG error + | _ => () + } + + // Improtant: Don't rethrow here, since it'll result in + // an unhandled rejected promise error. + // That's fine not to throw, since sql->Postgres.beginSql + // will fail anyways. + } + } + } + }) + + //In the event of a rollback, rollback all meta tables based on the given + //valid event identifier, where all rows created after this eventIdentifier should + //be deleted + let rollbackTables = switch inMemoryStore { + | {rollbackTargetCheckpointId: Some(rollbackTargetCheckpointId)} => + Some( + sql => { + let promises = Entities.allEntities->Js.Array2.map(entityConfig => { + sql->EntityHistory.rollback( + ~pgSchema=Db.publicSchema, + ~entityName=entityConfig.name, + ~entityIndex=entityConfig.index, + ~rollbackTargetCheckpointId, + ) + }) + promises + ->Js.Array2.push( + sql->InternalTable.Checkpoints.rollback( + ~pgSchema=Db.publicSchema, + ~rollbackTargetCheckpointId, + ), + ) + ->ignore + Promise.all(promises) + }, + ) + | _ => None + } + + try { + let _ = await Promise.all2(( + sql->Postgres.beginSql(async sql => { + //Rollback tables need to happen first in the traction + switch rollbackTables { + | Some(rollbackTables) => + let _ = await rollbackTables(sql) + | None => () + } + + let setOperations = [ + sql => + sql->InternalTable.Chains.setProgressedChains( + ~pgSchema=Db.publicSchema, + ~progressedChains=batch.progressedChainsById->Utils.Dict.mapValuesToArray(( + chainAfterBatch + ): InternalTable.Chains.progressedChain => { + chainId: chainAfterBatch.fetchState.chainId, + progressBlockNumber: chainAfterBatch.progressBlockNumber, + totalEventsProcessed: chainAfterBatch.totalEventsProcessed, + }), + ), + setRawEvents, + ]->Belt.Array.concat(setEntities) + + if shouldSaveHistory { + setOperations->Array.push(sql => + sql->InternalTable.Checkpoints.insert( + ~pgSchema=Db.publicSchema, + ~checkpointIds=batch.checkpointIds, + ~checkpointChainIds=batch.checkpointChainIds, + ~checkpointBlockNumbers=batch.checkpointBlockNumbers, + ~checkpointBlockHashes=batch.checkpointBlockHashes, + ~checkpointEventsProcessed=batch.checkpointEventsProcessed, + ) + ) + } + + await setOperations + ->Belt.Array.map(dbFunc => sql->dbFunc) + ->Promise.all + }), + // Since effect cache currently doesn't support rollback, + // we can run it outside of the transaction for simplicity. + inMemoryStore.effects + ->Js.Dict.keys + ->Belt.Array.keepMapU(effectName => { + let inMemTable = inMemoryStore.effects->Js.Dict.unsafeGet(effectName) + let {idsToStore, dict, effect, invalidationsCount} = inMemTable + switch idsToStore { + | [] => None + | ids => { + let items = Belt.Array.makeUninitializedUnsafe(ids->Belt.Array.length) + ids->Belt.Array.forEachWithIndex((index, id) => { + items->Js.Array2.unsafe_set( + index, + ( + { + id, + output: dict->Js.Dict.unsafeGet(id), + }: Internal.effectCacheItem + ), + ) + }) + Some( + indexer.persistence->Persistence.setEffectCacheOrThrow( + ~effect, + ~items, + ~invalidationsCount, + ), + ) + } + } + }) + ->Promise.all, + )) + + // Just in case, if there's a not PG-specific error. + switch specificError.contents { + | Some(specificError) => raise(specificError) + | None => () + } + } catch { + | exn => + raise( + switch specificError.contents { + | Some(specificError) => specificError + | None => exn + }, + ) + } +} + +let prepareRollbackDiff = async (~persistence: Persistence.t, ~rollbackTargetCheckpointId) => { + let inMemStore = InMemoryStore.make(~entities=Entities.allEntities, ~rollbackTargetCheckpointId) + + let deletedEntities = Js.Dict.empty() + let setEntities = Js.Dict.empty() + + let _ = + await Entities.allEntities + ->Belt.Array.map(async entityConfig => { + let entityTable = inMemStore->InMemoryStore.getInMemTable(~entityConfig) + + let (removedIdsResult, restoredEntitiesResult) = await Promise.all2(( + // Get IDs of entities that should be deleted (created after rollback target with no prior history) + persistence.sql + ->Postgres.preparedUnsafe( + entityConfig.entityHistory.makeGetRollbackRemovedIdsQuery(~pgSchema=Db.publicSchema), + [rollbackTargetCheckpointId]->Utils.magic, + ) + ->(Utils.magic: promise => promise>), + // Get entities that should be restored to their state at or before rollback target + persistence.sql + ->Postgres.preparedUnsafe( + entityConfig.entityHistory.makeGetRollbackRestoredEntitiesQuery( + ~pgSchema=Db.publicSchema, + ), + [rollbackTargetCheckpointId]->Utils.magic, + ) + ->(Utils.magic: promise => promise>), + )) + + // Process removed IDs + removedIdsResult->Js.Array2.forEach(data => { + deletedEntities->Utils.Dict.push(entityConfig.name, data["id"]) + entityTable->InMemoryTable.Entity.set( + { + entityId: data["id"], + checkpointId: 0, + entityUpdateAction: Delete, + }, + ~shouldSaveHistory=false, + ~containsRollbackDiffChange=true, + ) + }) + + let restoredEntities = restoredEntitiesResult->S.parseOrThrow(entityConfig.rowsSchema) + + // Process restored entities + restoredEntities->Belt.Array.forEach((entity: Entities.internalEntity) => { + setEntities->Utils.Dict.push(entityConfig.name, entity.id) + entityTable->InMemoryTable.Entity.set( + { + entityId: entity.id, + checkpointId: 0, + entityUpdateAction: Set(entity), + }, + ~shouldSaveHistory=false, + ~containsRollbackDiffChange=true, + ) + }) + }) + ->Promise.all + + { + "inMemStore": inMemStore, + "deletedEntities": deletedEntities, + "setEntities": setEntities, + } +} diff --git a/apps/hypersync-indexer/generated/src/Index.bs.js b/apps/hypersync-indexer/generated/src/Index.bs.js new file mode 100644 index 000000000..598439233 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Index.bs.js @@ -0,0 +1,209 @@ +#!/usr/bin/env node + +/* + * Migration script for Envio v2.22.0+ + * + * In version 2.22.0 we introduced a breaking change by changing ReScript generated + * files suffix from .bs.js to .res.js. This script helps users smoothly upgrade + * their indexers to the new version. + * + * Previously, users needed to run ReScript-generated files directly in their + * package.json scripts (e.g., "node generated/src/Index.bs.js"). Now, Envio + * provides a unified `envio start` command that handles this internally. + * + * This script: + * 1. Updates package.json scripts to use `envio start` instead of direct file execution + * 2. Runs the new envio start command + */ + +const fs = require("fs"); +const path = require("path"); +const readline = require("readline"); +const { spawn } = require("child_process"); + +// Function to update package.json +function updatePackageJson() { + try { + // Look for package.json in current directory and parent directories + let packageJsonPath = null; + let currentDir = process.cwd(); + + // Search up the directory tree for package.json + while (currentDir !== path.dirname(currentDir)) { + const potentialPath = path.join(currentDir, "package.json"); + if (fs.existsSync(potentialPath)) { + packageJsonPath = potentialPath; + break; + } + currentDir = path.dirname(currentDir); + } + + if (!packageJsonPath) { + console.log("❌ Could not find package.json file"); + return false; + } + + console.log(`📦 Found package.json at: ${packageJsonPath}`); + + // Read and parse package.json + const packageJsonContent = fs.readFileSync(packageJsonPath, "utf8"); + const packageJson = JSON.parse(packageJsonContent); + + // Check if scripts section exists + if (!packageJson.scripts) { + console.log("⚠️ No scripts section found in package.json"); + return false; + } + + // Update the start script + let updated = false; + if (packageJson.scripts.start) { + let originalScript = packageJson.scripts.start; + let newScript = originalScript; + + // Replace ts-node generated/src/Index.bs.js with envio start + newScript = newScript.replace( + /ts-node\s+generated\/src\/Index\.bs\.js/g, + "envio start" + ); + + // Replace node generated/src/Index.bs.js with envio start + newScript = newScript.replace( + /node\s+generated\/src\/Index\.bs\.js/g, + "envio start" + ); + + if (newScript !== originalScript) { + console.log("🔧 Updating start script..."); + console.log(` From: ${originalScript}`); + console.log(` To: ${newScript}`); + packageJson.scripts.start = newScript; + updated = true; + } + } + + if (updated) { + // Write back the updated package.json + fs.writeFileSync( + packageJsonPath, + JSON.stringify(packageJson, null, 2) + "\n" + ); + console.log("✅ Package.json updated successfully!"); + return true; + } else { + console.log("ℹ️ No scripts found that need updating"); + return false; + } + } catch (error) { + console.error("❌ Error updating package.json:", error.message); + return false; + } +} + +// Function to prompt user for migration +function promptUserForMigration() { + return new Promise((resolve) => { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + + // Set up timeout to automatically skip after 30 seconds + const timeout = setTimeout(() => { + rl.close(); + console.log( + "\n⏱️ No response received in 30 seconds, skipping package.json migration." + ); + resolve(false); + }, 30000); + + rl.question( + "🤔 Would you like to automatically update your package.json scripts? (y/N): ", + (answer) => { + clearTimeout(timeout); + rl.close(); + const shouldMigrate = + answer.toLowerCase() === "y" || + answer.toLowerCase() === "yes" || + answer.trim() === ""; + resolve(shouldMigrate); + } + ); + }); +} + +// Function to run envio start +function runEnvioStart() { + console.log("🚀 Starting Envio..."); + + const envioProcess = spawn("envio", ["start"], { + stdio: "inherit", + cwd: process.cwd(), + }); + + envioProcess.on("error", (error) => { + if (error.code === "ENOENT") { + console.error( + '❌ Error: "envio" command not found. Please make sure Envio CLI is installed.' + ); + } else { + console.error("❌ Error running envio start:", error.message); + } + process.exit(1); + }); + + envioProcess.on("close", (code) => { + if (code !== 0) { + console.log(`❌ Envio start exited with code ${code}`); + process.exit(code); + } + }); +} + +// Main execution +async function main() { + console.log("🔄 Migrating to envio@2.22.0 or later..."); + console.log("📋 Migration steps:"); + console.log(" 1. Update package.json scripts (optional)"); + console.log(" 2. Run envio start"); + console.log(""); + + console.log( + "ℹ️ Note: In the new version, pnpm-workspaces.yaml and .npmrc files are no longer required." + ); + console.log( + " You can safely remove them if you don't need them for other purposes." + ); + console.log(""); + + // Prompt user for package.json migration + const shouldMigrate = await promptUserForMigration(); + + if (shouldMigrate) { + console.log(""); + const packageUpdated = updatePackageJson(); + + if (packageUpdated) { + console.log(""); + console.log( + "🎉 Migration completed! Your package.json has been updated." + ); + console.log( + ' From now on, you can use "npm start" or "envio start" directly.' + ); + console.log(""); + } + } else { + console.log("⏭️ Skipping package.json migration."); + console.log(""); + } + + // Run envio start + runEnvioStart(); +} + +// Start the main function +main().catch((error) => { + console.error("❌ Error during migration:", error.message); + process.exit(1); +}); diff --git a/apps/hypersync-indexer/generated/src/Index.res b/apps/hypersync-indexer/generated/src/Index.res new file mode 100644 index 000000000..daba19f7a --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Index.res @@ -0,0 +1,332 @@ +open Belt + +type chainData = { + chainId: float, + poweredByHyperSync: bool, + firstEventBlockNumber: option, + latestProcessedBlock: option, + timestampCaughtUpToHeadOrEndblock: option, + numEventsProcessed: int, + latestFetchedBlockNumber: int, + currentBlockHeight: int, + numBatchesFetched: int, + endBlock: option, + numAddresses: int, +} +@tag("status") +type state = + | @as("disabled") Disabled({}) + | @as("initializing") Initializing({}) + | @as("active") + Active({ + envioVersion: string, + chains: array, + indexerStartTime: Js.Date.t, + isPreRegisteringDynamicContracts: bool, + isUnorderedMultichainMode: bool, + rollbackOnReorg: bool, + }) + +let chainDataSchema = S.schema((s): chainData => { + chainId: s.matches(S.float), + poweredByHyperSync: s.matches(S.bool), + firstEventBlockNumber: s.matches(S.option(S.int)), + latestProcessedBlock: s.matches(S.option(S.int)), + timestampCaughtUpToHeadOrEndblock: s.matches(S.option(S.datetime(S.string))), + numEventsProcessed: s.matches(S.int), + latestFetchedBlockNumber: s.matches(S.int), + currentBlockHeight: s.matches(S.int), + numBatchesFetched: s.matches(S.int), + endBlock: s.matches(S.option(S.int)), + numAddresses: s.matches(S.int), +}) +let stateSchema = S.union([ + S.literal(Disabled({})), + S.literal(Initializing({})), + S.schema(s => Active({ + envioVersion: s.matches(S.string), + chains: s.matches(S.array(chainDataSchema)), + indexerStartTime: s.matches(S.datetime(S.string)), + // Keep the field, since Dev Console expects it to be present + isPreRegisteringDynamicContracts: false, + isUnorderedMultichainMode: s.matches(S.bool), + rollbackOnReorg: s.matches(S.bool), + })), +]) + +let startServer = (~getState, ~indexer: Indexer.t, ~isDevelopmentMode: bool) => { + open Express + + let app = makeCjs() + + let consoleCorsMiddleware = (req, res, next) => { + switch req.headers->Js.Dict.get("origin") { + | Some(origin) if origin === Env.prodEnvioAppUrl || origin === Env.envioAppUrl => + res->setHeader("Access-Control-Allow-Origin", origin) + | _ => () + } + + res->setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") + res->setHeader( + "Access-Control-Allow-Headers", + "Origin, X-Requested-With, Content-Type, Accept", + ) + + if req.method === Options { + res->sendStatus(200) + } else { + next() + } + } + app->useFor("/console", consoleCorsMiddleware) + app->useFor("/metrics", consoleCorsMiddleware) + + app->get("/healthz", (_req, res) => { + // this is the machine readable port used in kubernetes to check the health of this service. + // aditional health information could be added in the future (info about errors, back-offs, etc). + res->sendStatus(200) + }) + + app->get("/console/state", (_req, res) => { + let state = if isDevelopmentMode { + getState() + } else { + Disabled({}) + } + + res->json(state->S.reverseConvertToJsonOrThrow(stateSchema)) + }) + + app->post("/console/syncCache", (_req, res) => { + if isDevelopmentMode { + (indexer.persistence->Persistence.getInitializedStorageOrThrow).dumpEffectCache() + ->Promise.thenResolve(_ => res->json(Boolean(true))) + ->Promise.done + } else { + res->json(Boolean(false)) + } + }) + + PromClient.collectDefaultMetrics() + + app->get("/metrics", (_req, res) => { + res->set("Content-Type", PromClient.defaultRegister->PromClient.getContentType) + let _ = + PromClient.defaultRegister + ->PromClient.metrics + ->Promise.thenResolve(metrics => res->endWithData(metrics)) + }) + + let _ = app->listen(Env.serverPort) +} + +type args = {@as("tui-off") tuiOff?: bool} + +type process +@val external process: process = "process" +@get external argv: process => 'a = "argv" + +type mainArgs = Yargs.parsedArgs + +let makeAppState = (globalState: GlobalState.t): EnvioInkApp.appState => { + let chains = + globalState.chainManager.chainFetchers + ->ChainMap.values + ->Array.map(cf => { + let {numEventsProcessed, fetchState, numBatchesFetched} = cf + let latestFetchedBlockNumber = Pervasives.max(fetchState->FetchState.bufferBlockNumber, 0) + let hasProcessedToEndblock = cf->ChainFetcher.hasProcessedToEndblock + let currentBlockHeight = + cf->ChainFetcher.hasProcessedToEndblock + ? cf.fetchState.endBlock->Option.getWithDefault(cf.currentBlockHeight) + : cf.currentBlockHeight + + let progress: ChainData.progress = if hasProcessedToEndblock { + // If the endblock has been reached then set the progress to synced. + // if there's chains that have no events in the block range start->end, + // it's possible there are no events in that block range (ie firstEventBlockNumber = None) + // This ensures TUI still displays synced in this case + let { + committedProgressBlockNumber, + timestampCaughtUpToHeadOrEndblock, + numEventsProcessed, + firstEventBlockNumber, + } = cf + + Synced({ + firstEventBlockNumber: firstEventBlockNumber->Option.getWithDefault(0), + latestProcessedBlock: committedProgressBlockNumber, + timestampCaughtUpToHeadOrEndblock: timestampCaughtUpToHeadOrEndblock->Option.getWithDefault( + Js.Date.now()->Js.Date.fromFloat, + ), + numEventsProcessed, + }) + } else { + switch cf { + | { + committedProgressBlockNumber, + timestampCaughtUpToHeadOrEndblock: Some(timestampCaughtUpToHeadOrEndblock), + firstEventBlockNumber: Some(firstEventBlockNumber), + } => + Synced({ + firstEventBlockNumber, + latestProcessedBlock: committedProgressBlockNumber, + timestampCaughtUpToHeadOrEndblock, + numEventsProcessed, + }) + | { + committedProgressBlockNumber, + timestampCaughtUpToHeadOrEndblock: None, + firstEventBlockNumber: Some(firstEventBlockNumber), + } => + Syncing({ + firstEventBlockNumber, + latestProcessedBlock: committedProgressBlockNumber, + numEventsProcessed, + }) + | {firstEventBlockNumber: None} => SearchingForEvents + } + } + + ( + { + progress, + currentBlockHeight, + latestFetchedBlockNumber, + numBatchesFetched, + chain: ChainMap.Chain.makeUnsafe(~chainId=cf.chainConfig.id), + endBlock: cf.fetchState.endBlock, + poweredByHyperSync: (cf.sourceManager->SourceManager.getActiveSource).poweredByHyperSync, + }: EnvioInkApp.chainData + ) + }) + { + config: globalState.indexer.config, + indexerStartTime: globalState.indexerStartTime, + chains, + } +} + +// Function to open the URL in the browser +// @module("child_process") +// external exec: (string, (Js.Nullable.t, 'a, 'b) => unit) => unit = "exec" +// @module("process") external platform: string = "platform" +// let openConsole = () => { +// let host = "https://envio.dev" +// let command = switch platform { +// | "win32" => "start" +// | "darwin" => "open" +// | _ => "xdg-open" +// } +// exec(`${command} ${host}/console`, (_, _, _) => ()) +// } + +let main = async () => { + try { + let mainArgs: mainArgs = process->argv->Yargs.hideBin->Yargs.yargs->Yargs.argv + let shouldUseTui = !(mainArgs.tuiOff->Belt.Option.getWithDefault(Env.tuiOffEnvVar)) + // The most simple check to verify whether we are running in development mode + // and prevent exposing the console to public, when creating a real deployment. + let isDevelopmentMode = Env.Db.password === "testing" + + let indexer = Generated.getIndexer() + + let gsManagerRef = ref(None) + + let envioVersion = Utils.EnvioPackage.json.version + Prometheus.Info.set(~version=envioVersion) + Prometheus.RollbackEnabled.set(~enabled=indexer.config.shouldRollbackOnReorg) + + startServer( + ~indexer, + ~isDevelopmentMode, + ~getState=() => + switch gsManagerRef.contents { + | None => Initializing({}) + | Some(gsManager) => { + let state = gsManager->GlobalStateManager.getState + let appState = state->makeAppState + Active({ + envioVersion, + chains: appState.chains->Js.Array2.map(c => { + let cf = state.chainManager.chainFetchers->ChainMap.get(c.chain) + { + chainId: c.chain->ChainMap.Chain.toChainId->Js.Int.toFloat, + poweredByHyperSync: c.poweredByHyperSync, + latestFetchedBlockNumber: c.latestFetchedBlockNumber, + currentBlockHeight: c.currentBlockHeight, + numBatchesFetched: c.numBatchesFetched, + endBlock: c.endBlock, + firstEventBlockNumber: switch c.progress { + | SearchingForEvents => None + | Syncing({firstEventBlockNumber}) | Synced({firstEventBlockNumber}) => + Some(firstEventBlockNumber) + }, + latestProcessedBlock: switch c.progress { + | SearchingForEvents => None + | Syncing({latestProcessedBlock}) | Synced({latestProcessedBlock}) => + Some(latestProcessedBlock) + }, + timestampCaughtUpToHeadOrEndblock: switch c.progress { + | SearchingForEvents + | Syncing(_) => + None + | Synced({timestampCaughtUpToHeadOrEndblock}) => + Some(timestampCaughtUpToHeadOrEndblock) + }, + numEventsProcessed: switch c.progress { + | SearchingForEvents => 0 + | Syncing({numEventsProcessed}) + | Synced({numEventsProcessed}) => numEventsProcessed + }, + numAddresses: cf.fetchState->FetchState.numAddresses, + } + }), + indexerStartTime: appState.indexerStartTime, + isPreRegisteringDynamicContracts: false, + rollbackOnReorg: indexer.config.shouldRollbackOnReorg, + isUnorderedMultichainMode: switch indexer.config.multichain { + | Unordered => true + | Ordered => false + }, + }) + } + }, + ) + + await indexer.persistence->Persistence.init( + ~chainConfigs=indexer.config.chainMap->ChainMap.values, + ) + + let chainManager = await ChainManager.makeFromDbState( + ~initialState=indexer.persistence->Persistence.getInitializedState, + ~config=indexer.config, + ~registrations=indexer.registrations, + ~persistence=indexer.persistence, + ) + let globalState = GlobalState.make(~indexer, ~chainManager, ~isDevelopmentMode, ~shouldUseTui) + let stateUpdatedHook = if shouldUseTui { + let rerender = EnvioInkApp.startApp(makeAppState(globalState)) + Some(globalState => globalState->makeAppState->rerender) + } else { + None + } + let gsManager = globalState->GlobalStateManager.make(~stateUpdatedHook?) + gsManagerRef := Some(gsManager) + gsManager->GlobalStateManager.dispatchTask(NextQuery(CheckAllChains)) + /* + NOTE: + This `ProcessEventBatch` dispatch shouldn't be necessary but we are adding for safety, it should immediately return doing + nothing since there is no events on the queues. + */ + + gsManager->GlobalStateManager.dispatchTask(ProcessEventBatch) + } catch { + | e => { + e->ErrorHandling.make(~msg="Failed at initialization")->ErrorHandling.log + NodeJs.process->NodeJs.exitWithCode(Failure) + } + } +} + +main()->ignore diff --git a/apps/hypersync-indexer/generated/src/Js.shim.ts b/apps/hypersync-indexer/generated/src/Js.shim.ts new file mode 100644 index 000000000..51f44c5fd --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Js.shim.ts @@ -0,0 +1,11 @@ +export type Json_t = + | string + | boolean + | number + | null + | { [key: string]: Json_t } + | Json_t[]; + +export type t = unknown; + +export type Exn_t = Error; diff --git a/apps/hypersync-indexer/generated/src/LoadLayer.res b/apps/hypersync-indexer/generated/src/LoadLayer.res new file mode 100644 index 000000000..5dab1f3b8 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/LoadLayer.res @@ -0,0 +1,444 @@ +open Belt + +let loadById = ( + ~loadManager, + ~persistence: Persistence.t, + ~entityConfig: Internal.entityConfig, + ~inMemoryStore, + ~shouldGroup, + ~item, + ~entityId, +) => { + let key = `${entityConfig.name}.get` + let inMemTable = inMemoryStore->InMemoryStore.getInMemTable(~entityConfig) + + let load = async (idsToLoad, ~onError as _) => { + let timerRef = Prometheus.StorageLoad.startOperation(~operation=key) + + // Since LoadManager.call prevents registerign entities already existing in the inMemoryStore, + // we can be sure that we load only the new ones. + let dbEntities = try { + await (persistence->Persistence.getInitializedStorageOrThrow).loadByIdsOrThrow( + ~table=entityConfig.table, + ~rowsSchema=entityConfig.rowsSchema, + ~ids=idsToLoad, + ) + } catch { + | Persistence.StorageError({message, reason}) => + reason->ErrorHandling.mkLogAndRaise(~logger=item->Logging.getItemLogger, ~msg=message) + } + + let entitiesMap = Js.Dict.empty() + for idx in 0 to dbEntities->Array.length - 1 { + let entity = dbEntities->Js.Array2.unsafe_get(idx) + entitiesMap->Js.Dict.set(entity.id, entity) + } + idsToLoad->Js.Array2.forEach(entityId => { + // Set the entity in the in memory store + // without overwriting existing values + // which might be newer than what we got from db + inMemTable->InMemoryTable.Entity.initValue( + ~allowOverWriteEntity=false, + ~key=entityId, + ~entity=entitiesMap->Utils.Dict.dangerouslyGetNonOption(entityId), + ) + }) + + timerRef->Prometheus.StorageLoad.endOperation( + ~operation=key, + ~whereSize=idsToLoad->Array.length, + ~size=dbEntities->Array.length, + ) + } + + loadManager->LoadManager.call( + ~key, + ~load, + ~shouldGroup, + ~hasher=LoadManager.noopHasher, + ~getUnsafeInMemory=inMemTable->InMemoryTable.Entity.getUnsafe, + ~hasInMemory=hash => inMemTable.table->InMemoryTable.hasByHash(hash), + ~input=entityId, + ) +} + +let callEffect = ( + ~effect: Internal.effect, + ~arg: Internal.effectArgs, + ~inMemTable: InMemoryStore.effectCacheInMemTable, + ~timerRef, + ~onError, +) => { + let effectName = effect.name + let hadActiveCalls = effect.activeCallsCount > 0 + effect.activeCallsCount = effect.activeCallsCount + 1 + Prometheus.EffectCalls.activeCallsCount->Prometheus.SafeGauge.handleInt( + ~labels=effectName, + ~value=effect.activeCallsCount, + ) + + if hadActiveCalls { + let elapsed = Hrtime.millisBetween(~from=effect.prevCallStartTimerRef, ~to=timerRef) + if elapsed > 0 { + Prometheus.EffectCalls.timeCounter->Prometheus.SafeCounter.incrementMany( + ~labels=effectName, + ~value=Hrtime.millisBetween(~from=effect.prevCallStartTimerRef, ~to=timerRef), + ) + } + } + effect.prevCallStartTimerRef = timerRef + + effect.handler(arg) + ->Promise.thenResolve(output => { + inMemTable.dict->Js.Dict.set(arg.cacheKey, output) + if arg.context.cache { + inMemTable.idsToStore->Array.push(arg.cacheKey)->ignore + } + }) + ->Promise.catchResolve(exn => { + onError(~inputKey=arg.cacheKey, ~exn) + }) + ->Promise.finally(() => { + effect.activeCallsCount = effect.activeCallsCount - 1 + Prometheus.EffectCalls.activeCallsCount->Prometheus.SafeGauge.handleInt( + ~labels=effectName, + ~value=effect.activeCallsCount, + ) + let newTimer = Hrtime.makeTimer() + Prometheus.EffectCalls.timeCounter->Prometheus.SafeCounter.incrementMany( + ~labels=effectName, + ~value=Hrtime.millisBetween(~from=effect.prevCallStartTimerRef, ~to=newTimer), + ) + effect.prevCallStartTimerRef = newTimer + + Prometheus.EffectCalls.totalCallsCount->Prometheus.SafeCounter.increment(~labels=effectName) + Prometheus.EffectCalls.sumTimeCounter->Prometheus.SafeCounter.incrementMany( + ~labels=effectName, + ~value=timerRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis, + ) + }) +} + +let rec executeWithRateLimit = ( + ~effect: Internal.effect, + ~effectArgs: array, + ~inMemTable, + ~onError, + ~isFromQueue: bool, +) => { + let effectName = effect.name + + let timerRef = Hrtime.makeTimer() + let promises = [] + + switch effect.rateLimit { + | None => + // No rate limiting - execute all immediately + for idx in 0 to effectArgs->Array.length - 1 { + promises + ->Array.push( + callEffect( + ~effect, + ~arg=effectArgs->Array.getUnsafe(idx), + ~inMemTable, + ~timerRef, + ~onError, + )->Promise.ignoreValue, + ) + ->ignore + } + + | Some(state) => + let now = Js.Date.now() + + // Check if we need to reset the window + if now >= state.windowStartTime +. state.durationMs->Int.toFloat { + state.availableCalls = state.callsPerDuration + state.windowStartTime = now + state.nextWindowPromise = None + } + + // Split into immediate and queued + let immediateCount = Js.Math.min_int(state.availableCalls, effectArgs->Array.length) + let immediateArgs = effectArgs->Array.slice(~offset=0, ~len=immediateCount) + let queuedArgs = effectArgs->Array.sliceToEnd(immediateCount) + + // Update available calls + state.availableCalls = state.availableCalls - immediateCount + + // Call immediate effects + for idx in 0 to immediateArgs->Array.length - 1 { + promises + ->Array.push( + callEffect( + ~effect, + ~arg=immediateArgs->Array.getUnsafe(idx), + ~inMemTable, + ~timerRef, + ~onError, + )->Promise.ignoreValue, + ) + ->ignore + } + + if immediateCount > 0 && isFromQueue { + // Update queue count metric + state.queueCount = state.queueCount - immediateCount + Prometheus.EffectQueueCount.set(~count=state.queueCount, ~effectName) + } + + // Handle queued items + if queuedArgs->Utils.Array.notEmpty { + if !isFromQueue { + // Update queue count metric + state.queueCount = state.queueCount + queuedArgs->Array.length + Prometheus.EffectQueueCount.set(~count=state.queueCount, ~effectName) + } + + let millisUntilReset = ref(0) + let nextWindowPromise = switch state.nextWindowPromise { + | Some(p) => p + | None => + millisUntilReset := + (state.windowStartTime +. state.durationMs->Int.toFloat -. now)->Float.toInt + let p = Utils.delay(millisUntilReset.contents) + state.nextWindowPromise = Some(p) + p + } + + // Wait for next window and recursively process queue + promises + ->Array.push( + nextWindowPromise + ->Promise.then(() => { + if millisUntilReset.contents > 0 { + Prometheus.EffectQueueCount.timeCounter->Prometheus.SafeCounter.incrementMany( + ~labels=effectName, + ~value=millisUntilReset.contents, + ) + } + executeWithRateLimit( + ~effect, + ~effectArgs=queuedArgs, + ~inMemTable, + ~onError, + ~isFromQueue=true, + ) + }) + ->Promise.ignoreValue, + ) + ->ignore + } + } + + // Wait for all to complete + promises->Promise.all +} + +let loadEffect = ( + ~loadManager, + ~persistence: Persistence.t, + ~effect: Internal.effect, + ~effectArgs, + ~inMemoryStore, + ~shouldGroup, + ~item, +) => { + let effectName = effect.name + let key = `${effectName}.effect` + let inMemTable = inMemoryStore->InMemoryStore.getEffectInMemTable(~effect) + + let load = async (args, ~onError) => { + let idsToLoad = args->Js.Array2.map((arg: Internal.effectArgs) => arg.cacheKey) + let idsFromCache = Utils.Set.make() + + if ( + switch persistence.storageStatus { + | Ready({cache}) => cache->Utils.Dict.has(effectName) + | _ => false + } + ) { + let timerRef = Prometheus.StorageLoad.startOperation(~operation=key) + let {table, outputSchema} = effect.storageMeta + + let dbEntities = try { + await (persistence->Persistence.getInitializedStorageOrThrow).loadByIdsOrThrow( + ~table, + ~rowsSchema=Internal.effectCacheItemRowsSchema, + ~ids=idsToLoad, + ) + } catch { + | exn => + item + ->Logging.getItemLogger + ->Logging.childWarn({ + "msg": `Failed to load cache effect cache. The indexer will continue working, but the effect will not be able to use the cache.`, + "err": exn->Utils.prettifyExn, + "effect": effectName, + }) + [] + } + + dbEntities->Js.Array2.forEach(dbEntity => { + try { + let output = dbEntity.output->S.parseOrThrow(outputSchema) + idsFromCache->Utils.Set.add(dbEntity.id)->ignore + inMemTable.dict->Js.Dict.set(dbEntity.id, output) + } catch { + | S.Raised(error) => + inMemTable.invalidationsCount = inMemTable.invalidationsCount + 1 + Prometheus.EffectCacheInvalidationsCount.increment(~effectName) + item + ->Logging.getItemLogger + ->Logging.childTrace({ + "msg": "Invalidated effect cache", + "input": dbEntity.id, + "effect": effectName, + "err": error->S.Error.message, + }) + } + }) + + timerRef->Prometheus.StorageLoad.endOperation( + ~operation=key, + ~whereSize=idsToLoad->Array.length, + ~size=dbEntities->Array.length, + ) + } + + let remainingCallsCount = idsToLoad->Array.length - idsFromCache->Utils.Set.size + if remainingCallsCount > 0 { + let argsToCall = [] + for idx in 0 to args->Array.length - 1 { + let arg = args->Array.getUnsafe(idx) + if !(idsFromCache->Utils.Set.has(arg.cacheKey)) { + argsToCall->Array.push(arg)->ignore + } + } + + if argsToCall->Utils.Array.notEmpty { + await executeWithRateLimit( + ~effect, + ~effectArgs=argsToCall, + ~inMemTable, + ~onError, + ~isFromQueue=false, + )->Promise.ignoreValue + } + } + } + + loadManager->LoadManager.call( + ~key, + ~load, + ~shouldGroup, + ~hasher=args => args.cacheKey, + ~getUnsafeInMemory=hash => inMemTable.dict->Js.Dict.unsafeGet(hash), + ~hasInMemory=hash => inMemTable.dict->Utils.Dict.has(hash), + ~input=effectArgs, + ) +} + +let loadByField = ( + ~loadManager, + ~persistence: Persistence.t, + ~operator: TableIndices.Operator.t, + ~entityConfig: Internal.entityConfig, + ~inMemoryStore, + ~fieldName, + ~fieldValueSchema, + ~shouldGroup, + ~item, + ~fieldValue, +) => { + let operatorCallName = switch operator { + | Eq => "eq" + | Gt => "gt" + | Lt => "lt" + } + let key = `${entityConfig.name}.getWhere.${fieldName}.${operatorCallName}` + let inMemTable = inMemoryStore->InMemoryStore.getInMemTable(~entityConfig) + + let load = async (fieldValues: array<'fieldValue>, ~onError as _) => { + let timerRef = Prometheus.StorageLoad.startOperation(~operation=key) + + let size = ref(0) + + let indiciesToLoad = fieldValues->Js.Array2.map((fieldValue): TableIndices.Index.t => { + Single({ + fieldName, + fieldValue: TableIndices.FieldValue.castFrom(fieldValue), + operator, + }) + }) + + let _ = + await indiciesToLoad + ->Js.Array2.map(async index => { + inMemTable->InMemoryTable.Entity.addEmptyIndex(~index) + try { + let entities = await ( + persistence->Persistence.getInitializedStorageOrThrow + ).loadByFieldOrThrow( + ~operator=switch index { + | Single({operator: Gt}) => #">" + | Single({operator: Eq}) => #"=" + | Single({operator: Lt}) => #"<" + }, + ~table=entityConfig.table, + ~rowsSchema=entityConfig.rowsSchema, + ~fieldName=index->TableIndices.Index.getFieldName, + ~fieldValue=switch index { + | Single({fieldValue}) => fieldValue + }, + ~fieldSchema=fieldValueSchema->( + Utils.magic: S.t<'fieldValue> => S.t + ), + ) + + entities->Array.forEach(entity => { + //Set the entity in the in memory store + inMemTable->InMemoryTable.Entity.initValue( + ~allowOverWriteEntity=false, + ~key=Entities.getEntityId(entity), + ~entity=Some(entity), + ) + }) + + size := size.contents + entities->Array.length + } catch { + | Persistence.StorageError({message, reason}) => + reason->ErrorHandling.mkLogAndRaise( + ~logger=Logging.createChildFrom( + ~logger=item->Logging.getItemLogger, + ~params={ + "operator": operatorCallName, + "tableName": entityConfig.table.tableName, + "fieldName": fieldName, + "fieldValue": fieldValue, + }, + ), + ~msg=message, + ) + } + }) + ->Promise.all + + timerRef->Prometheus.StorageLoad.endOperation( + ~operation=key, + ~whereSize=fieldValues->Array.length, + ~size=size.contents, + ) + } + + loadManager->LoadManager.call( + ~key, + ~load, + ~input=fieldValue, + ~shouldGroup, + ~hasher=fieldValue => + fieldValue->TableIndices.FieldValue.castFrom->TableIndices.FieldValue.toString, + ~getUnsafeInMemory=inMemTable->InMemoryTable.Entity.getUnsafeOnIndex(~fieldName, ~operator), + ~hasInMemory=inMemTable->InMemoryTable.Entity.hasIndex(~fieldName, ~operator), + ) +} diff --git a/apps/hypersync-indexer/generated/src/LoadLayer.resi b/apps/hypersync-indexer/generated/src/LoadLayer.resi new file mode 100644 index 000000000..b63c53109 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/LoadLayer.resi @@ -0,0 +1,32 @@ +let loadById: ( + ~loadManager: LoadManager.t, + ~persistence: Persistence.t, + ~entityConfig: Internal.entityConfig, + ~inMemoryStore: InMemoryStore.t, + ~shouldGroup: bool, + ~item: Internal.item, + ~entityId: string, +) => promise> + +let loadByField: ( + ~loadManager: LoadManager.t, + ~persistence: Persistence.t, + ~operator: TableIndices.Operator.t, + ~entityConfig: Internal.entityConfig, + ~inMemoryStore: InMemoryStore.t, + ~fieldName: string, + ~fieldValueSchema: RescriptSchema.S.t<'fieldValue>, + ~shouldGroup: bool, + ~item: Internal.item, + ~fieldValue: 'fieldValue, +) => promise> + +let loadEffect: ( + ~loadManager: LoadManager.t, + ~persistence: Persistence.t, + ~effect: Internal.effect, + ~effectArgs: Internal.effectArgs, + ~inMemoryStore: InMemoryStore.t, + ~shouldGroup: bool, + ~item: Internal.item, +) => promise diff --git a/apps/hypersync-indexer/generated/src/Path.res b/apps/hypersync-indexer/generated/src/Path.res new file mode 100644 index 000000000..6f6efb5c7 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Path.res @@ -0,0 +1 @@ +let relativePathToRootFromGenerated = "../." diff --git a/apps/hypersync-indexer/generated/src/PersistedState.res b/apps/hypersync-indexer/generated/src/PersistedState.res new file mode 100644 index 000000000..72673a5b7 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/PersistedState.res @@ -0,0 +1,25 @@ +type t = { + @as("envio_version") envioVersion: string, + @as("config_hash") configHash: string, + @as("schema_hash") schemaHash: string, + @as("handler_files_hash") handlerFilesHash: string, + @as("abi_files_hash") abiFilesHash: string, +} + +let schema = S.schema(s => { + envioVersion: s.matches(S.string), + configHash: s.matches(S.string), + schemaHash: s.matches(S.string), + handlerFilesHash: s.matches(S.string), + abiFilesHash: s.matches(S.string), +}) + +external requireJson: string => Js.Json.t = "require" +let getPersistedState = () => + try { + let json = requireJson("../persisted_state.envio.json") + let parsed = json->S.parseJsonOrThrow(schema) + Ok(parsed) + } catch { + | exn => Error(exn) + } diff --git a/apps/hypersync-indexer/generated/src/TestHelpers.gen.ts b/apps/hypersync-indexer/generated/src/TestHelpers.gen.ts new file mode 100644 index 000000000..869abc857 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/TestHelpers.gen.ts @@ -0,0 +1,188 @@ +/* TypeScript file generated from TestHelpers.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +const TestHelpersJS = require('./TestHelpers.res.js'); + +import type {ENSGovernor_ProposalCanceled_event as Types_ENSGovernor_ProposalCanceled_event} from './Types.gen'; + +import type {ENSGovernor_ProposalCreated_event as Types_ENSGovernor_ProposalCreated_event} from './Types.gen'; + +import type {ENSGovernor_ProposalExecuted_event as Types_ENSGovernor_ProposalExecuted_event} from './Types.gen'; + +import type {ENSGovernor_ProposalQueued_event as Types_ENSGovernor_ProposalQueued_event} from './Types.gen'; + +import type {ENSGovernor_VoteCast_event as Types_ENSGovernor_VoteCast_event} from './Types.gen'; + +import type {ENSToken_DelegateChanged_event as Types_ENSToken_DelegateChanged_event} from './Types.gen'; + +import type {ENSToken_DelegateVotesChanged_event as Types_ENSToken_DelegateVotesChanged_event} from './Types.gen'; + +import type {ENSToken_Transfer_event as Types_ENSToken_Transfer_event} from './Types.gen'; + +import type {t as Address_t} from 'envio/src/Address.gen'; + +import type {t as TestHelpers_MockDb_t} from './TestHelpers_MockDb.gen'; + +/** The arguements that get passed to a "processEvent" helper function */ +export type EventFunctions_eventProcessorArgs = { + readonly event: event; + readonly mockDb: TestHelpers_MockDb_t; + readonly chainId?: number +}; + +export type EventFunctions_eventProcessor = (_1:EventFunctions_eventProcessorArgs) => Promise; + +export type EventFunctions_MockBlock_t = { + readonly hash?: string; + readonly number?: number; + readonly timestamp?: number +}; + +export type EventFunctions_MockTransaction_t = { + readonly from?: (undefined | Address_t); + readonly hash?: string; + readonly to?: (undefined | Address_t) +}; + +export type EventFunctions_mockEventData = { + readonly chainId?: number; + readonly srcAddress?: Address_t; + readonly logIndex?: number; + readonly block?: EventFunctions_MockBlock_t; + readonly transaction?: EventFunctions_MockTransaction_t +}; + +export type ENSGovernor_ProposalCreated_createMockArgs = { + readonly proposalId?: bigint; + readonly proposer?: Address_t; + readonly targets?: Address_t[]; + readonly values?: bigint[]; + readonly signatures?: string[]; + readonly calldatas?: string[]; + readonly startBlock?: bigint; + readonly endBlock?: bigint; + readonly description?: string; + readonly mockEventData?: EventFunctions_mockEventData +}; + +export type ENSGovernor_VoteCast_createMockArgs = { + readonly voter?: Address_t; + readonly proposalId?: bigint; + readonly support?: bigint; + readonly weight?: bigint; + readonly reason?: string; + readonly mockEventData?: EventFunctions_mockEventData +}; + +export type ENSGovernor_ProposalCanceled_createMockArgs = { readonly proposalId?: bigint; readonly mockEventData?: EventFunctions_mockEventData }; + +export type ENSGovernor_ProposalExecuted_createMockArgs = { readonly proposalId?: bigint; readonly mockEventData?: EventFunctions_mockEventData }; + +export type ENSGovernor_ProposalQueued_createMockArgs = { + readonly proposalId?: bigint; + readonly eta?: bigint; + readonly mockEventData?: EventFunctions_mockEventData +}; + +export type ENSToken_Transfer_createMockArgs = { + readonly from?: Address_t; + readonly to?: Address_t; + readonly value?: bigint; + readonly mockEventData?: EventFunctions_mockEventData +}; + +export type ENSToken_DelegateChanged_createMockArgs = { + readonly delegator?: Address_t; + readonly fromDelegate?: Address_t; + readonly toDelegate?: Address_t; + readonly mockEventData?: EventFunctions_mockEventData +}; + +export type ENSToken_DelegateVotesChanged_createMockArgs = { + readonly delegate?: Address_t; + readonly previousBalance?: bigint; + readonly newBalance?: bigint; + readonly mockEventData?: EventFunctions_mockEventData +}; + +export const MockDb_createMockDb: () => TestHelpers_MockDb_t = TestHelpersJS.MockDb.createMockDb as any; + +export const Addresses_mockAddresses: Address_t[] = TestHelpersJS.Addresses.mockAddresses as any; + +export const Addresses_defaultAddress: Address_t = TestHelpersJS.Addresses.defaultAddress as any; + +export const ENSGovernor_ProposalCreated_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.ProposalCreated.processEvent as any; + +export const ENSGovernor_ProposalCreated_createMockEvent: (args:ENSGovernor_ProposalCreated_createMockArgs) => Types_ENSGovernor_ProposalCreated_event = TestHelpersJS.ENSGovernor.ProposalCreated.createMockEvent as any; + +export const ENSGovernor_VoteCast_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.VoteCast.processEvent as any; + +export const ENSGovernor_VoteCast_createMockEvent: (args:ENSGovernor_VoteCast_createMockArgs) => Types_ENSGovernor_VoteCast_event = TestHelpersJS.ENSGovernor.VoteCast.createMockEvent as any; + +export const ENSGovernor_ProposalCanceled_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.ProposalCanceled.processEvent as any; + +export const ENSGovernor_ProposalCanceled_createMockEvent: (args:ENSGovernor_ProposalCanceled_createMockArgs) => Types_ENSGovernor_ProposalCanceled_event = TestHelpersJS.ENSGovernor.ProposalCanceled.createMockEvent as any; + +export const ENSGovernor_ProposalExecuted_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.ProposalExecuted.processEvent as any; + +export const ENSGovernor_ProposalExecuted_createMockEvent: (args:ENSGovernor_ProposalExecuted_createMockArgs) => Types_ENSGovernor_ProposalExecuted_event = TestHelpersJS.ENSGovernor.ProposalExecuted.createMockEvent as any; + +export const ENSGovernor_ProposalQueued_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.ProposalQueued.processEvent as any; + +export const ENSGovernor_ProposalQueued_createMockEvent: (args:ENSGovernor_ProposalQueued_createMockArgs) => Types_ENSGovernor_ProposalQueued_event = TestHelpersJS.ENSGovernor.ProposalQueued.createMockEvent as any; + +export const ENSToken_Transfer_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSToken.Transfer.processEvent as any; + +export const ENSToken_Transfer_createMockEvent: (args:ENSToken_Transfer_createMockArgs) => Types_ENSToken_Transfer_event = TestHelpersJS.ENSToken.Transfer.createMockEvent as any; + +export const ENSToken_DelegateChanged_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSToken.DelegateChanged.processEvent as any; + +export const ENSToken_DelegateChanged_createMockEvent: (args:ENSToken_DelegateChanged_createMockArgs) => Types_ENSToken_DelegateChanged_event = TestHelpersJS.ENSToken.DelegateChanged.createMockEvent as any; + +export const ENSToken_DelegateVotesChanged_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSToken.DelegateVotesChanged.processEvent as any; + +export const ENSToken_DelegateVotesChanged_createMockEvent: (args:ENSToken_DelegateVotesChanged_createMockArgs) => Types_ENSToken_DelegateVotesChanged_event = TestHelpersJS.ENSToken.DelegateVotesChanged.createMockEvent as any; + +export const Addresses: { mockAddresses: Address_t[]; defaultAddress: Address_t } = TestHelpersJS.Addresses as any; + +export const ENSGovernor: { + VoteCast: { + processEvent: EventFunctions_eventProcessor; + createMockEvent: (args:ENSGovernor_VoteCast_createMockArgs) => Types_ENSGovernor_VoteCast_event + }; + ProposalQueued: { + processEvent: EventFunctions_eventProcessor; + createMockEvent: (args:ENSGovernor_ProposalQueued_createMockArgs) => Types_ENSGovernor_ProposalQueued_event + }; + ProposalCreated: { + processEvent: EventFunctions_eventProcessor; + createMockEvent: (args:ENSGovernor_ProposalCreated_createMockArgs) => Types_ENSGovernor_ProposalCreated_event + }; + ProposalCanceled: { + processEvent: EventFunctions_eventProcessor; + createMockEvent: (args:ENSGovernor_ProposalCanceled_createMockArgs) => Types_ENSGovernor_ProposalCanceled_event + }; + ProposalExecuted: { + processEvent: EventFunctions_eventProcessor; + createMockEvent: (args:ENSGovernor_ProposalExecuted_createMockArgs) => Types_ENSGovernor_ProposalExecuted_event + } +} = TestHelpersJS.ENSGovernor as any; + +export const MockDb: { createMockDb: () => TestHelpers_MockDb_t } = TestHelpersJS.MockDb as any; + +export const ENSToken: { + Transfer: { + processEvent: EventFunctions_eventProcessor; + createMockEvent: (args:ENSToken_Transfer_createMockArgs) => Types_ENSToken_Transfer_event + }; + DelegateChanged: { + processEvent: EventFunctions_eventProcessor; + createMockEvent: (args:ENSToken_DelegateChanged_createMockArgs) => Types_ENSToken_DelegateChanged_event + }; + DelegateVotesChanged: { + processEvent: EventFunctions_eventProcessor; + createMockEvent: (args:ENSToken_DelegateVotesChanged_createMockArgs) => Types_ENSToken_DelegateVotesChanged_event + } +} = TestHelpersJS.ENSToken as any; diff --git a/apps/hypersync-indexer/generated/src/TestHelpers.res b/apps/hypersync-indexer/generated/src/TestHelpers.res new file mode 100644 index 000000000..a54495021 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/TestHelpers.res @@ -0,0 +1,504 @@ +/***** TAKE NOTE ****** +This is a hack to get genType to work! + +In order for genType to produce recursive types, it needs to be at the +root module of a file. If it's defined in a nested module it does not +work. So all the MockDb types and internal functions are defined in TestHelpers_MockDb +and only public functions are recreated and exported from this module. + +the following module: +```rescript +module MyModule = { + @genType + type rec a = {fieldB: b} + @genType and b = {fieldA: a} +} +``` + +produces the following in ts: +```ts +// tslint:disable-next-line:interface-over-type-literal +export type MyModule_a = { readonly fieldB: b }; + +// tslint:disable-next-line:interface-over-type-literal +export type MyModule_b = { readonly fieldA: MyModule_a }; +``` + +fieldB references type b which doesn't exist because it's defined +as MyModule_b +*/ + +module MockDb = { + @genType + let createMockDb = TestHelpers_MockDb.createMockDb +} + +@genType +module Addresses = { + include TestHelpers_MockAddresses +} + +module EventFunctions = { + //Note these are made into a record to make operate in the same way + //for Res, JS and TS. + + /** + The arguements that get passed to a "processEvent" helper function + */ + @genType + type eventProcessorArgs<'event> = { + event: 'event, + mockDb: TestHelpers_MockDb.t, + @deprecated("Set the chainId for the event instead") + chainId?: int, + } + + @genType + type eventProcessor<'event> = eventProcessorArgs<'event> => promise + + /** + A function composer to help create individual processEvent functions + */ + let makeEventProcessor = (~register) => args => { + let {event, mockDb, ?chainId} = + args->(Utils.magic: eventProcessorArgs<'event> => eventProcessorArgs) + + // Have the line here, just in case the function is called with + // a manually created event. We don't want to break the existing tests here. + let _ = + TestHelpers_MockDb.mockEventRegisters->Utils.WeakMap.set(event, register) + TestHelpers_MockDb.makeProcessEvents(mockDb, ~chainId=?chainId)([event->(Utils.magic: Internal.event => Types.eventLog)]) + } + + module MockBlock = { + @genType + type t = { + @as("hash") hash?: string, + @as("number") number?: int, + @as("timestamp") timestamp?: int, + } + + let toBlock = (_mock: t) => { + hash: _mock.hash->Belt.Option.getWithDefault("foo"), + number: _mock.number->Belt.Option.getWithDefault(0), + timestamp: _mock.timestamp->Belt.Option.getWithDefault(0), + }->(Utils.magic: Types.AggregatedBlock.t => Internal.eventBlock) + } + + module MockTransaction = { + @genType + type t = { + @as("from") from?: option, + @as("hash") hash?: string, + @as("to") to?: option, + } + + let toTransaction = (_mock: t) => { + from: _mock.from->Belt.Option.getWithDefault(None), + hash: _mock.hash->Belt.Option.getWithDefault("foo"), + to: _mock.to->Belt.Option.getWithDefault(None), + }->(Utils.magic: Types.AggregatedTransaction.t => Internal.eventTransaction) + } + + @genType + type mockEventData = { + chainId?: int, + srcAddress?: Address.t, + logIndex?: int, + block?: MockBlock.t, + transaction?: MockTransaction.t, + } + + /** + Applies optional paramters with defaults for all common eventLog field + */ + let makeEventMocker = ( + ~params: Internal.eventParams, + ~mockEventData: option, + ~register: unit => Internal.eventConfig, + ): Internal.event => { + let {?block, ?transaction, ?srcAddress, ?chainId, ?logIndex} = + mockEventData->Belt.Option.getWithDefault({}) + let block = block->Belt.Option.getWithDefault({})->MockBlock.toBlock + let transaction = transaction->Belt.Option.getWithDefault({})->MockTransaction.toTransaction + let event: Internal.event = { + params, + transaction, + chainId: switch chainId { + | Some(chainId) => chainId + | None => + switch Generated.configWithoutRegistrations.defaultChain { + | Some(chainConfig) => chainConfig.id + | None => + Js.Exn.raiseError( + "No default chain Id found, please add at least 1 chain to your config.yaml", + ) + } + }, + block, + srcAddress: srcAddress->Belt.Option.getWithDefault(Addresses.defaultAddress), + logIndex: logIndex->Belt.Option.getWithDefault(0), + } + // Since currently it's not possible to figure out the event config from the event + // we store a reference to the register function by event in a weak map + let _ = TestHelpers_MockDb.mockEventRegisters->Utils.WeakMap.set(event, register) + event + } +} + + +module ENSGovernor = { + module ProposalCreated = { + @genType + let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( + ~register=(Types.ENSGovernor.ProposalCreated.register :> unit => Internal.eventConfig), + ) + + @genType + type createMockArgs = { + @as("proposalId") + proposalId?: bigint, + @as("proposer") + proposer?: Address.t, + @as("targets") + targets?: array, + @as("values") + values?: array, + @as("signatures") + signatures?: array, + @as("calldatas") + calldatas?: array, + @as("startBlock") + startBlock?: bigint, + @as("endBlock") + endBlock?: bigint, + @as("description") + description?: string, + mockEventData?: EventFunctions.mockEventData, + } + + @genType + let createMockEvent = args => { + let { + ?proposalId, + ?proposer, + ?targets, + ?values, + ?signatures, + ?calldatas, + ?startBlock, + ?endBlock, + ?description, + ?mockEventData, + } = args + + let params = + { + proposalId: proposalId->Belt.Option.getWithDefault(0n), + proposer: proposer->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), + targets: targets->Belt.Option.getWithDefault([]), + values: values->Belt.Option.getWithDefault([]), + signatures: signatures->Belt.Option.getWithDefault([]), + calldatas: calldatas->Belt.Option.getWithDefault([]), + startBlock: startBlock->Belt.Option.getWithDefault(0n), + endBlock: endBlock->Belt.Option.getWithDefault(0n), + description: description->Belt.Option.getWithDefault("foo"), + } +->(Utils.magic: Types.ENSGovernor.ProposalCreated.eventArgs => Internal.eventParams) + + EventFunctions.makeEventMocker( + ~params, + ~mockEventData, + ~register=(Types.ENSGovernor.ProposalCreated.register :> unit => Internal.eventConfig), + )->(Utils.magic: Internal.event => Types.ENSGovernor.ProposalCreated.event) + } + } + + module VoteCast = { + @genType + let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( + ~register=(Types.ENSGovernor.VoteCast.register :> unit => Internal.eventConfig), + ) + + @genType + type createMockArgs = { + @as("voter") + voter?: Address.t, + @as("proposalId") + proposalId?: bigint, + @as("support") + support?: bigint, + @as("weight") + weight?: bigint, + @as("reason") + reason?: string, + mockEventData?: EventFunctions.mockEventData, + } + + @genType + let createMockEvent = args => { + let { + ?voter, + ?proposalId, + ?support, + ?weight, + ?reason, + ?mockEventData, + } = args + + let params = + { + voter: voter->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), + proposalId: proposalId->Belt.Option.getWithDefault(0n), + support: support->Belt.Option.getWithDefault(0n), + weight: weight->Belt.Option.getWithDefault(0n), + reason: reason->Belt.Option.getWithDefault("foo"), + } +->(Utils.magic: Types.ENSGovernor.VoteCast.eventArgs => Internal.eventParams) + + EventFunctions.makeEventMocker( + ~params, + ~mockEventData, + ~register=(Types.ENSGovernor.VoteCast.register :> unit => Internal.eventConfig), + )->(Utils.magic: Internal.event => Types.ENSGovernor.VoteCast.event) + } + } + + module ProposalCanceled = { + @genType + let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( + ~register=(Types.ENSGovernor.ProposalCanceled.register :> unit => Internal.eventConfig), + ) + + @genType + type createMockArgs = { + @as("proposalId") + proposalId?: bigint, + mockEventData?: EventFunctions.mockEventData, + } + + @genType + let createMockEvent = args => { + let { + ?proposalId, + ?mockEventData, + } = args + + let params = + { + proposalId: proposalId->Belt.Option.getWithDefault(0n), + } +->(Utils.magic: Types.ENSGovernor.ProposalCanceled.eventArgs => Internal.eventParams) + + EventFunctions.makeEventMocker( + ~params, + ~mockEventData, + ~register=(Types.ENSGovernor.ProposalCanceled.register :> unit => Internal.eventConfig), + )->(Utils.magic: Internal.event => Types.ENSGovernor.ProposalCanceled.event) + } + } + + module ProposalExecuted = { + @genType + let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( + ~register=(Types.ENSGovernor.ProposalExecuted.register :> unit => Internal.eventConfig), + ) + + @genType + type createMockArgs = { + @as("proposalId") + proposalId?: bigint, + mockEventData?: EventFunctions.mockEventData, + } + + @genType + let createMockEvent = args => { + let { + ?proposalId, + ?mockEventData, + } = args + + let params = + { + proposalId: proposalId->Belt.Option.getWithDefault(0n), + } +->(Utils.magic: Types.ENSGovernor.ProposalExecuted.eventArgs => Internal.eventParams) + + EventFunctions.makeEventMocker( + ~params, + ~mockEventData, + ~register=(Types.ENSGovernor.ProposalExecuted.register :> unit => Internal.eventConfig), + )->(Utils.magic: Internal.event => Types.ENSGovernor.ProposalExecuted.event) + } + } + + module ProposalQueued = { + @genType + let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( + ~register=(Types.ENSGovernor.ProposalQueued.register :> unit => Internal.eventConfig), + ) + + @genType + type createMockArgs = { + @as("proposalId") + proposalId?: bigint, + @as("eta") + eta?: bigint, + mockEventData?: EventFunctions.mockEventData, + } + + @genType + let createMockEvent = args => { + let { + ?proposalId, + ?eta, + ?mockEventData, + } = args + + let params = + { + proposalId: proposalId->Belt.Option.getWithDefault(0n), + eta: eta->Belt.Option.getWithDefault(0n), + } +->(Utils.magic: Types.ENSGovernor.ProposalQueued.eventArgs => Internal.eventParams) + + EventFunctions.makeEventMocker( + ~params, + ~mockEventData, + ~register=(Types.ENSGovernor.ProposalQueued.register :> unit => Internal.eventConfig), + )->(Utils.magic: Internal.event => Types.ENSGovernor.ProposalQueued.event) + } + } + +} + + +module ENSToken = { + module Transfer = { + @genType + let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( + ~register=(Types.ENSToken.Transfer.register :> unit => Internal.eventConfig), + ) + + @genType + type createMockArgs = { + @as("from") + from?: Address.t, + @as("to") + to?: Address.t, + @as("value") + value?: bigint, + mockEventData?: EventFunctions.mockEventData, + } + + @genType + let createMockEvent = args => { + let { + ?from, + ?to, + ?value, + ?mockEventData, + } = args + + let params = + { + from: from->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), + to: to->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), + value: value->Belt.Option.getWithDefault(0n), + } +->(Utils.magic: Types.ENSToken.Transfer.eventArgs => Internal.eventParams) + + EventFunctions.makeEventMocker( + ~params, + ~mockEventData, + ~register=(Types.ENSToken.Transfer.register :> unit => Internal.eventConfig), + )->(Utils.magic: Internal.event => Types.ENSToken.Transfer.event) + } + } + + module DelegateChanged = { + @genType + let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( + ~register=(Types.ENSToken.DelegateChanged.register :> unit => Internal.eventConfig), + ) + + @genType + type createMockArgs = { + @as("delegator") + delegator?: Address.t, + @as("fromDelegate") + fromDelegate?: Address.t, + @as("toDelegate") + toDelegate?: Address.t, + mockEventData?: EventFunctions.mockEventData, + } + + @genType + let createMockEvent = args => { + let { + ?delegator, + ?fromDelegate, + ?toDelegate, + ?mockEventData, + } = args + + let params = + { + delegator: delegator->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), + fromDelegate: fromDelegate->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), + toDelegate: toDelegate->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), + } +->(Utils.magic: Types.ENSToken.DelegateChanged.eventArgs => Internal.eventParams) + + EventFunctions.makeEventMocker( + ~params, + ~mockEventData, + ~register=(Types.ENSToken.DelegateChanged.register :> unit => Internal.eventConfig), + )->(Utils.magic: Internal.event => Types.ENSToken.DelegateChanged.event) + } + } + + module DelegateVotesChanged = { + @genType + let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( + ~register=(Types.ENSToken.DelegateVotesChanged.register :> unit => Internal.eventConfig), + ) + + @genType + type createMockArgs = { + @as("delegate") + delegate?: Address.t, + @as("previousBalance") + previousBalance?: bigint, + @as("newBalance") + newBalance?: bigint, + mockEventData?: EventFunctions.mockEventData, + } + + @genType + let createMockEvent = args => { + let { + ?delegate, + ?previousBalance, + ?newBalance, + ?mockEventData, + } = args + + let params = + { + delegate: delegate->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), + previousBalance: previousBalance->Belt.Option.getWithDefault(0n), + newBalance: newBalance->Belt.Option.getWithDefault(0n), + } +->(Utils.magic: Types.ENSToken.DelegateVotesChanged.eventArgs => Internal.eventParams) + + EventFunctions.makeEventMocker( + ~params, + ~mockEventData, + ~register=(Types.ENSToken.DelegateVotesChanged.register :> unit => Internal.eventConfig), + )->(Utils.magic: Internal.event => Types.ENSToken.DelegateVotesChanged.event) + } + } + +} + diff --git a/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.gen.ts b/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.gen.ts new file mode 100644 index 000000000..5670668e1 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.gen.ts @@ -0,0 +1,12 @@ +/* TypeScript file generated from TestHelpers_MockAddresses.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +const TestHelpers_MockAddressesJS = require('./TestHelpers_MockAddresses.res.js'); + +import type {t as Address_t} from 'envio/src/Address.gen'; + +export const mockAddresses: Address_t[] = TestHelpers_MockAddressesJS.mockAddresses as any; + +export const defaultAddress: Address_t = TestHelpers_MockAddressesJS.defaultAddress as any; diff --git a/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.res b/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.res new file mode 100644 index 000000000..81b041358 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.res @@ -0,0 +1,30 @@ +/* +Note this file should remain top level since there are default types +that can point to TestHelpers_MockAddresses.defaultAddress +*/ +@genType +let mockAddresses = [ + "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + "0x70997970C51812dc3A010C7d01b50e0d17dc79C8", + "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC", + "0x90F79bf6EB2c4f870365E785982E1f101E93b906", + "0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65", + "0x9965507D1a55bcC2695C58ba16FB37d819B0A4dc", + "0x976EA74026E726554dB657fA54763abd0C3a0aa9", + "0x14dC79964da2C08b23698B3D3cc7Ca32193d9955", + "0x23618e81E3f5cdF7f54C3d65f7FBc0aBf5B21E8f", + "0xa0Ee7A142d267C1f36714E4a8F75612F20a79720", + "0xBcd4042DE499D14e55001CcbB24a551F3b954096", + "0x71bE63f3384f5fb98995898A86B02Fb2426c5788", + "0xFABB0ac9d68B0B445fB7357272Ff202C5651694a", + "0x1CBd3b2770909D4e10f157cABC84C7264073C9Ec", + "0xdF3e18d64BC6A983f673Ab319CCaE4f1a57C7097", + "0xcd3B766CCDd6AE721141F452C550Ca635964ce71", + "0x2546BcD3c84621e976D8185a91A922aE77ECEc30", + "0xbDA5747bFD65F08deb54cb465eB87D40e51B197E", + "0xdD2FD4581271e230360230F9337D5c0430Bf44C0", + "0x8626f6940E2eb28930eFb4CeF49B2d1F2C9C1199", +]->Belt.Array.map(Address.Evm.fromStringOrThrow) +@genType +let defaultAddress = + mockAddresses[0] diff --git a/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.gen.ts b/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.gen.ts new file mode 100644 index 000000000..960ca1393 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.gen.ts @@ -0,0 +1,87 @@ +/* TypeScript file generated from TestHelpers_MockDb.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +const TestHelpers_MockDbJS = require('./TestHelpers_MockDb.res.js'); + +import type {AccountBalance_t as Entities_AccountBalance_t} from '../src/db/Entities.gen'; + +import type {AccountPower_t as Entities_AccountPower_t} from '../src/db/Entities.gen'; + +import type {Account_t as Entities_Account_t} from '../src/db/Entities.gen'; + +import type {BalanceHistory_t as Entities_BalanceHistory_t} from '../src/db/Entities.gen'; + +import type {DaoMetricsDayBucket_t as Entities_DaoMetricsDayBucket_t} from '../src/db/Entities.gen'; + +import type {Delegation_t as Entities_Delegation_t} from '../src/db/Entities.gen'; + +import type {DynamicContractRegistry_t as InternalTable_DynamicContractRegistry_t} from 'envio/src/db/InternalTable.gen'; + +import type {FeedEvent_t as Entities_FeedEvent_t} from '../src/db/Entities.gen'; + +import type {ProposalOnchain_t as Entities_ProposalOnchain_t} from '../src/db/Entities.gen'; + +import type {RawEvents_t as InternalTable_RawEvents_t} from 'envio/src/db/InternalTable.gen'; + +import type {TokenPrice_t as Entities_TokenPrice_t} from '../src/db/Entities.gen'; + +import type {Token_t as Entities_Token_t} from '../src/db/Entities.gen'; + +import type {Transaction_t as Entities_Transaction_t} from '../src/db/Entities.gen'; + +import type {Transfer_t as Entities_Transfer_t} from '../src/db/Entities.gen'; + +import type {VoteOnchain_t as Entities_VoteOnchain_t} from '../src/db/Entities.gen'; + +import type {VotingPowerHistory_t as Entities_VotingPowerHistory_t} from '../src/db/Entities.gen'; + +import type {eventLog as Types_eventLog} from './Types.gen'; + +import type {rawEventsKey as InMemoryStore_rawEventsKey} from 'envio/src/InMemoryStore.gen'; + +/** The mockDb type is simply an InMemoryStore internally. __dbInternal__ holds a reference +to an inMemoryStore and all the the accessor methods point to the reference of that inMemory +store */ +export abstract class inMemoryStore { protected opaque!: any }; /* simulate opaque types */ + +export type t = { + readonly __dbInternal__: inMemoryStore; + readonly entities: entities; + readonly rawEvents: storeOperations; + readonly dynamicContractRegistry: entityStoreOperations; + readonly processEvents: (_1:Types_eventLog[]) => Promise +}; + +export type entities = { + readonly Account: entityStoreOperations; + readonly AccountBalance: entityStoreOperations; + readonly AccountPower: entityStoreOperations; + readonly BalanceHistory: entityStoreOperations; + readonly DaoMetricsDayBucket: entityStoreOperations; + readonly Delegation: entityStoreOperations; + readonly FeedEvent: entityStoreOperations; + readonly ProposalOnchain: entityStoreOperations; + readonly Token: entityStoreOperations; + readonly TokenPrice: entityStoreOperations; + readonly Transaction: entityStoreOperations; + readonly Transfer: entityStoreOperations; + readonly VoteOnchain: entityStoreOperations; + readonly VotingPowerHistory: entityStoreOperations +}; + +export type entityStoreOperations = storeOperations; + +export type storeOperations = { + readonly getAll: () => entity[]; + readonly get: (_1:entityKey) => (undefined | entity); + readonly set: (_1:entity) => t; + readonly delete: (_1:entityKey) => t +}; + +/** The constructor function for a mockDb. Call it and then set up the inital state by calling +any of the set functions it provides access to. A mockDb will be passed into a processEvent +helper. Note, process event helpers will not mutate the mockDb but return a new mockDb with +new state so you can compare states before and after. */ +export const createMockDb: () => t = TestHelpers_MockDbJS.createMockDb as any; diff --git a/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.res b/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.res new file mode 100644 index 000000000..6b0809ae5 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.res @@ -0,0 +1,820 @@ + +/***** TAKE NOTE ****** +This file module is a hack to get genType to work! + +In order for genType to produce recursive types, it needs to be at the +root module of a file. If it's defined in a nested module it does not +work. So all the MockDb types and internal functions are defined here in TestHelpers_MockDb +and only public functions are recreated and exported from TestHelpers.MockDb module. + +the following module: +```rescript +module MyModule = { + @genType + type rec a = {fieldB: b} + @genType and b = {fieldA: a} +} +``` + +produces the following in ts: +```ts +// tslint:disable-next-line:interface-over-type-literal +export type MyModule_a = { readonly fieldB: b }; + +// tslint:disable-next-line:interface-over-type-literal +export type MyModule_b = { readonly fieldA: MyModule_a }; +``` + +fieldB references type b which doesn't exist because it's defined +as MyModule_b +*/ + +open Belt + +let mockEventRegisters = Utils.WeakMap.make() + +/** +A raw js binding to allow deleting from a dict. Used in store delete operation +*/ +let deleteDictKey: (dict<'a>, string) => unit = %raw(` + function(dict, key) { + delete dict[key] + } + `) + +let config = Generated.configWithoutRegistrations +EventRegister.startRegistration( + ~ecosystem=config.ecosystem, + ~multichain=config.multichain, + ~preloadHandlers=config.preloadHandlers, +) + +/** +The mockDb type is simply an InMemoryStore internally. __dbInternal__ holds a reference +to an inMemoryStore and all the the accessor methods point to the reference of that inMemory +store +*/ +@genType.opaque +type inMemoryStore = InMemoryStore.t + +@genType +type rec t = { + __dbInternal__: inMemoryStore, + entities: entities, + rawEvents: storeOperations, + dynamicContractRegistry: entityStoreOperations, + processEvents: array> => promise, +} + +// Each user defined entity will be in this record with all the store or "mockdb" operators +@genType +and entities = { + @as("Account") account: entityStoreOperations, + @as("AccountBalance") accountBalance: entityStoreOperations, + @as("AccountPower") accountPower: entityStoreOperations, + @as("BalanceHistory") balanceHistory: entityStoreOperations, + @as("DaoMetricsDayBucket") daoMetricsDayBucket: entityStoreOperations, + @as("Delegation") delegation: entityStoreOperations, + @as("FeedEvent") feedEvent: entityStoreOperations, + @as("ProposalOnchain") proposalOnchain: entityStoreOperations, + @as("Token") token: entityStoreOperations, + @as("TokenPrice") tokenPrice: entityStoreOperations, + @as("Transaction") transaction: entityStoreOperations, + @as("Transfer") transfer: entityStoreOperations, + @as("VoteOnchain") voteOnchain: entityStoreOperations, + @as("VotingPowerHistory") votingPowerHistory: entityStoreOperations, + } +// User defined entities always have a string for an id which is used as the +// key for entity stores +@genType +and entityStoreOperations<'entity> = storeOperations +// all the operator functions a user can access on an entity in the mock db +// stores refer to the the module that MakeStore functor outputs in IO.res +@genType +and storeOperations<'entityKey, 'entity> = { + getAll: unit => array<'entity>, + get: 'entityKey => option<'entity>, + set: 'entity => t, + delete: 'entityKey => t, +} + +/** +a composable function to make the "storeOperations" record to represent all the mock +db operations for each entity. +*/ +let makeStoreOperatorEntity = ( + ~inMemoryStore: InMemoryStore.t, + ~makeMockDb, + ~getStore: InMemoryStore.t => InMemoryTable.Entity.t<'entity>, + ~getKey: 'entity => Types.id, +): storeOperations => { + let {getUnsafe, values, set} = module(InMemoryTable.Entity) + + let get = id => { + let store = inMemoryStore->getStore + if store.table->InMemoryTable.hasByHash(id) { + getUnsafe(store)(id) + } else { + None + } + } + + let getAll = () => + inMemoryStore + ->getStore + ->values + + let delete = entityId => { + let cloned = inMemoryStore->InMemoryStore.clone + let table = cloned->getStore + + table->set( + { + entityId, + entityUpdateAction: Delete, + checkpointId: 0, + }, + ~shouldSaveHistory=false, + ) + + cloned->makeMockDb + } + + let set = entity => { + let cloned = inMemoryStore->InMemoryStore.clone + let table = cloned->getStore + let entityId = entity->getKey + + table->set( + { + entityId, + entityUpdateAction: Set(entity), + checkpointId: 0, + }, + ~shouldSaveHistory=false, + ) + + cloned->makeMockDb + } + + { + getAll, + get, + set, + delete, + } +} + +let makeStoreOperatorMeta = ( + ~inMemoryStore: InMemoryStore.t, + ~makeMockDb, + ~getStore: InMemoryStore.t => InMemoryTable.t<'key, 'value>, + ~getKey: 'value => 'key, +): storeOperations<'key, 'value> => { + let {get, values, set} = module(InMemoryTable) + + let get = id => get(inMemoryStore->getStore, id) + + let getAll = () => inMemoryStore->getStore->values->Array.map(row => row) + + let set = metaData => { + let cloned = inMemoryStore->InMemoryStore.clone + cloned->getStore->set(metaData->getKey, metaData) + cloned->makeMockDb + } + + // TODO: Remove. Is delete needed for meta data? + let delete = key => { + let cloned = inMemoryStore->InMemoryStore.clone + let store = cloned->getStore + store.dict->deleteDictKey(key->store.hash) + cloned->makeMockDb + } + + { + getAll, + get, + set, + delete, + } +} + +/** +Accessor function for getting the internal inMemoryStore in the mockDb +*/ +let getInternalDb = (self: t) => self.__dbInternal__ + +let getEntityOperations = (mockDb: t, ~entityName: string): entityStoreOperations< + Internal.entity, +> => { + mockDb.entities + ->Utils.magic + ->Utils.Dict.dangerouslyGetNonOption(entityName) + ->Utils.Option.getExn("Mocked operations for entity " ++ entityName ++ " not found") +} + +/** +A function composer for simulating the writing of an inMemoryStore to the external db with a mockDb. +Runs all set and delete operations currently cached in an inMemory store against the mockDb +*/ +let executeRowsEntity = ( + mockDb: t, + ~inMemoryStore: InMemoryStore.t, + ~entityConfig: Internal.entityConfig, +) => { + let getInMemTable = (inMemoryStore: InMemoryStore.t) => + inMemoryStore->InMemoryStore.getInMemTable(~entityConfig) + + let inMemTable = getInMemTable(inMemoryStore) + + inMemTable.table + ->InMemoryTable.values + ->Array.forEach(row => { + let mockDbTable = mockDb->getInternalDb->getInMemTable + switch row.entityRow { + | Updated({latest: {entityUpdateAction: Set(entity)}}) + | InitialReadFromDb(AlreadySet(entity)) => + let key = (entity: Internal.entity).id + mockDbTable->InMemoryTable.Entity.initValue( + ~allowOverWriteEntity=true, + ~key, + ~entity=Some(entity), + ) + | Updated({latest: {entityUpdateAction: Delete, entityId}}) => + mockDbTable.table.dict->deleteDictKey(entityId) + | InitialReadFromDb(NotSet) => () + } + }) +} + +let executeRowsMeta = ( + mockDb: t, + ~inMemoryStore: InMemoryStore.t, + ~getInMemTable: InMemoryStore.t => InMemoryTable.t<'key, 'entity>, + ~getKey: 'entity => 'key, +) => { + let mockDbTable = mockDb->getInternalDb->getInMemTable + inMemoryStore + ->getInMemTable + ->InMemoryTable.values + ->Array.forEach(row => { + mockDbTable->InMemoryTable.set(getKey(row), row) + }) +} + +/** +The internal make function which can be passed an in memory store and +instantiate a "MockDb". This is useful for cloning or making a MockDb +out of an existing inMemoryStore +*/ +let rec makeWithInMemoryStore: InMemoryStore.t => t = (inMemoryStore: InMemoryStore.t) => { + let rawEvents = makeStoreOperatorMeta( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db.rawEvents, + ~getKey=({chainId, eventId}) => { + chainId, + eventId: eventId->BigInt.toString, + }, + ) + + let dynamicContractRegistry = makeStoreOperatorEntity( + ~inMemoryStore, + ~getStore=db => + db + ->InMemoryStore.getInMemTable( + ~entityConfig=InternalTable.DynamicContractRegistry.config, + ) + ->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + InternalTable.DynamicContractRegistry.t, + > + ), + ~makeMockDb=makeWithInMemoryStore, + ~getKey=({chainId, contractAddress}) => { + InternalTable.DynamicContractRegistry.makeId(~chainId, ~contractAddress) + }, + ) + + let entities = { + account: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.Account)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.Account.t, + > + ), + ~getKey=({id}) => id, + ) + }, + accountBalance: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.AccountBalance)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.AccountBalance.t, + > + ), + ~getKey=({id}) => id, + ) + }, + accountPower: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.AccountPower)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.AccountPower.t, + > + ), + ~getKey=({id}) => id, + ) + }, + balanceHistory: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.BalanceHistory)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.BalanceHistory.t, + > + ), + ~getKey=({id}) => id, + ) + }, + daoMetricsDayBucket: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.DaoMetricsDayBucket)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.DaoMetricsDayBucket.t, + > + ), + ~getKey=({id}) => id, + ) + }, + delegation: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.Delegation)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.Delegation.t, + > + ), + ~getKey=({id}) => id, + ) + }, + feedEvent: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.FeedEvent)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.FeedEvent.t, + > + ), + ~getKey=({id}) => id, + ) + }, + proposalOnchain: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.ProposalOnchain)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.ProposalOnchain.t, + > + ), + ~getKey=({id}) => id, + ) + }, + token: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.Token)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.Token.t, + > + ), + ~getKey=({id}) => id, + ) + }, + tokenPrice: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.TokenPrice)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.TokenPrice.t, + > + ), + ~getKey=({id}) => id, + ) + }, + transaction: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.Transaction)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.Transaction.t, + > + ), + ~getKey=({id}) => id, + ) + }, + transfer: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.Transfer)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.Transfer.t, + > + ), + ~getKey=({id}) => id, + ) + }, + voteOnchain: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.VoteOnchain)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.VoteOnchain.t, + > + ), + ~getKey=({id}) => id, + ) + }, + votingPowerHistory: { + makeStoreOperatorEntity( + ~inMemoryStore, + ~makeMockDb=makeWithInMemoryStore, + ~getStore=db => db->InMemoryStore.getInMemTable( + ~entityConfig=module(Entities.VotingPowerHistory)->Entities.entityModToInternal, + )->( + Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< + Entities.VotingPowerHistory.t, + > + ), + ~getKey=({id}) => id, + ) + }, + } + + let mockDb = { + __dbInternal__: inMemoryStore, + entities, + rawEvents, + dynamicContractRegistry, + processEvents: %raw(`null`), + } + (mockDb->Utils.magic)["processEvents"] = makeProcessEvents(mockDb, ~chainId=?None) + mockDb +} +and makeProcessEvents = (mockDb: t, ~chainId=?) => async ( + events: array>, +) => { + if events->Utils.Array.isEmpty { + mockDb + } else { + let itemsWithContractRegister = [] + + let registrations = EventRegister.finishRegistration() + let registrations = if ( + registrations.hasEvents || !(registrations.onBlockByChainId->Utils.Dict.isEmpty) + ) { + registrations + } else { + Generated.registerAllHandlers() + } + + let config = Generated.makeGeneratedConfig() + let indexer: Indexer.t = { + registrations, + config, + persistence: Generated.codegenPersistence, + } + + let processingChainId = ref(chainId) + let latestFetchedBlockNumber = ref(0) + let newItems = events->Array.map(event => { + let event = event->Internal.fromGenericEvent + let eventConfig = switch mockEventRegisters->Utils.WeakMap.get(event) { + | Some(register) => register() + | None => + Js.Exn.raiseError( + "Events must be created using the mock API (e.g. createMockEvent) to be processed by mockDb.processEvents", + ) + } + let chainId = switch chainId { + | Some(chainId) => chainId + | None => event.chainId + } + + switch processingChainId.contents { + | Some(prevItemChainId) => + if prevItemChainId !== chainId { + Js.Exn.raiseError( + `Processing events on multiple chains is not supported yet. Got chainId ${chainId->Belt.Int.toString} but expected ${prevItemChainId->Belt.Int.toString}`, + ) + } + | None => processingChainId.contents = Some(chainId) + } + + let chain = config->Config.getChain(~chainId) + let item = Internal.Event({ + eventConfig, + event, + chain, + logIndex: event.logIndex, + timestamp: event.block->Types.Block.getTimestamp, + blockNumber: event.block->Types.Block.getNumber, + }) + latestFetchedBlockNumber.contents = Pervasives.max( + latestFetchedBlockNumber.contents, + event.block->Types.Block.getNumber, + ) + if eventConfig.contractRegister->Option.isSome { + itemsWithContractRegister->Js.Array2.push(item)->ignore + } + item + }) + + let processingChainId = switch processingChainId.contents { + | Some(chainId) => chainId + | None => + Js.Exn.raiseError("No events provided to processEvents. Please provide at least one event.") + } + let processingChain = config->Config.getChain(~chainId=processingChainId) + + let chainFetcher = ChainFetcher.makeFromConfig( + config.chainMap->ChainMap.get(processingChain), + ~config, + ~registrations=indexer.registrations, + ~targetBufferSize=5000, + ) + + //Deep copy the data in mockDb, mutate the clone and return the clone + //So no side effects occur here and state can be compared between process + //steps + let mockDbClone = mockDb->cloneMockDb + + //Construct a new instance of an in memory store to run for the given event + let inMemoryStore = InMemoryStore.make(~entities=Entities.allEntities) + let loadManager = LoadManager.make() + let persistence = { + ...indexer.persistence, + storage: makeMockStorage(mockDb), + storageStatus: Ready({ + cleanRun: false, + cache: Js.Dict.empty(), + chains: [], + reorgCheckpoints: [], + checkpointId: 0, + }), + } + let indexer = { + ...indexer, + persistence, + } + + let newItemsWithDcs = if itemsWithContractRegister->Utils.Array.notEmpty { + await ChainFetcher.runContractRegistersOrThrow( + ~itemsWithContractRegister, + ~chain=processingChain, + ~config, + ) + } else { + itemsWithContractRegister + } + + let updatedFetchState = ref(chainFetcher.fetchState) + + switch newItemsWithDcs { + | [] => () + | _ => + updatedFetchState := + updatedFetchState.contents->FetchState.registerDynamicContracts(newItemsWithDcs) + } + + updatedFetchState := + updatedFetchState.contents + ->FetchState.handleQueryResult( + ~latestFetchedBlock={ + blockNumber: latestFetchedBlockNumber.contents, + blockTimestamp: 0, + }, + ~query={ + partitionId: (updatedFetchState.contents.partitions->Array.getUnsafe(0)).id, + fromBlock: 0, + selection: {eventConfigs: [], dependsOnAddresses: false}, + addressesByContractName: Js.Dict.empty(), + target: FetchState.Head, + indexingContracts: Js.Dict.empty(), + }, + ~newItems, + ) + ->Result.getExn + + // Handle query for the rest partitions without items + // to catch up the latest fully fetched block + for idx in 1 to updatedFetchState.contents.partitions->Array.length - 1 { + let partition = updatedFetchState.contents.partitions->Array.getUnsafe(idx) + updatedFetchState := + updatedFetchState.contents + ->FetchState.handleQueryResult( + ~latestFetchedBlock={ + blockNumber: latestFetchedBlockNumber.contents, + blockTimestamp: 0, + }, + ~query={ + partitionId: partition.id, + fromBlock: 0, + selection: {eventConfigs: [], dependsOnAddresses: false}, + addressesByContractName: Js.Dict.empty(), + target: FetchState.Head, + indexingContracts: Js.Dict.empty(), + }, + ~newItems=[], + ) + ->Result.getExn + } + + let batch = Batch.prepareUnorderedBatch( + ~checkpointIdBeforeBatch=0, + ~chainsBeforeBatch=ChainMap.fromArrayUnsafe([ + ( + processingChain, + ( + { + fetchState: updatedFetchState.contents, + reorgDetection: chainFetcher.reorgDetection, + progressBlockNumber: chainFetcher.committedProgressBlockNumber, + sourceBlockNumber: chainFetcher.currentBlockHeight, + totalEventsProcessed: chainFetcher.numEventsProcessed, + }: Batch.chainBeforeBatch + ), + ), + ]), + ~batchSizeTarget=newItems->Array.length, + ) + + inMemoryStore->InMemoryStore.setBatchDcs(~batch, ~shouldSaveHistory=false) + + // Create a mock chains state where the processing chain is ready (simulating "Live" mode) + let chains = Js.Dict.empty() + chains->Js.Dict.set(processingChainId->Int.toString, {Internal.isReady: true}) + + try { + await batch->EventProcessing.preloadBatchOrThrow( + ~loadManager, + ~persistence, + ~inMemoryStore, + ~chains, + ) + await batch->EventProcessing.runBatchHandlersOrThrow( + ~inMemoryStore, + ~loadManager, + ~indexer, + ~shouldSaveHistory=false, + ~shouldBenchmark=false, + ~chains, + ) + } catch { + | EventProcessing.ProcessingError({message, exn, item}) => + exn + ->ErrorHandling.make(~msg=message, ~logger=item->Logging.getItemLogger) + ->ErrorHandling.logAndRaise + } + + //In mem store can still contatin raw events and dynamic contracts for the + //testing framework in cases where either contract register or loaderHandler + //is None + mockDbClone->writeFromMemoryStore(~inMemoryStore) + mockDbClone + } +} +and makeMockStorage = (mockDb: t): Persistence.storage => { + { + isInitialized: () => Js.Exn.raiseError("Not used yet"), + initialize: (~chainConfigs as _=?, ~entities as _=?, ~enums as _=?) => + Js.Exn.raiseError("Not used yet"), + resumeInitialState: () => Js.Exn.raiseError("Not used yet"), + loadByIdsOrThrow: ( + type item, + ~ids, + ~table: Table.table, + ~rowsSchema as _: S.t>, + ) => { + let operations = mockDb->getEntityOperations(~entityName=table.tableName) + ids + ->Array.keepMap(id => operations.get(id)) + ->(Utils.magic: array => array) + ->Promise.resolve + }, + loadByFieldOrThrow: ( + ~fieldName, + ~fieldSchema as _, + ~fieldValue, + ~operator, + ~table, + ~rowsSchema as _, + ) => { + let mockDbTable = + mockDb.__dbInternal__.entities->InMemoryStore.EntityTables.get(~entityName=table.tableName) + let index = TableIndices.Index.makeSingle( + ~fieldName, + ~fieldValue, + ~operator=switch operator { + | #"=" => Eq + | #">" => Gt + | #"<" => Lt + }, + ) + mockDbTable + ->InMemoryTable.Entity.values + ->Array.keep(entity => { + index->TableIndices.Index.evaluate( + ~fieldName, + ~fieldValue=entity->Utils.magic->Js.Dict.unsafeGet(fieldName), + ) + }) + ->Promise.resolve + }, + setOrThrow: (~items as _, ~table as _, ~itemSchema as _) => Js.Exn.raiseError("Not used yet"), + setEffectCacheOrThrow: (~effect as _, ~items as _, ~initialize as _) => Promise.resolve(), + dumpEffectCache: () => Js.Exn.raiseError("Not used yet"), + } +} +and /** +Deep copies the in memory store data and returns a new mockDb with the same +state and no references to data from the passed in mockDb +*/ +cloneMockDb = (self: t) => { + let clonedInternalDb = self->getInternalDb->InMemoryStore.clone + clonedInternalDb->makeWithInMemoryStore +} +and /** +Simulates the writing of processed data in the inMemoryStore to a mockDb. This function +executes all the rows on each "store" (or pg table) in the inMemoryStore +*/ +writeFromMemoryStore = (mockDb: t, ~inMemoryStore: InMemoryStore.t) => { + //INTERNAL STORES/TABLES EXECUTION + mockDb->executeRowsMeta( + ~inMemoryStore, + ~getInMemTable=inMemStore => {inMemStore.rawEvents}, + ~getKey=(entity): InMemoryStore.rawEventsKey => { + chainId: entity.chainId, + eventId: entity.eventId->BigInt.toString, + }, + ) + + Generated.codegenPersistence.allEntities->Array.forEach(entityConfig => { + mockDb->executeRowsEntity(~inMemoryStore, ~entityConfig) + }) +} + +/** +The constructor function for a mockDb. Call it and then set up the inital state by calling +any of the set functions it provides access to. A mockDb will be passed into a processEvent +helper. Note, process event helpers will not mutate the mockDb but return a new mockDb with +new state so you can compare states before and after. +*/ +@genType +let //Note: It's called createMockDb over "make" to make it more intuitive in JS and TS + +createMockDb = () => makeWithInMemoryStore(InMemoryStore.make(~entities=Entities.allEntities)) diff --git a/apps/hypersync-indexer/generated/src/Types.gen.ts b/apps/hypersync-indexer/generated/src/Types.gen.ts new file mode 100644 index 000000000..d923ea555 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Types.gen.ts @@ -0,0 +1,557 @@ +/* TypeScript file generated from Types.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +import type {AccountBalance_t as Entities_AccountBalance_t} from '../src/db/Entities.gen'; + +import type {AccountPower_t as Entities_AccountPower_t} from '../src/db/Entities.gen'; + +import type {Account_t as Entities_Account_t} from '../src/db/Entities.gen'; + +import type {BalanceHistory_t as Entities_BalanceHistory_t} from '../src/db/Entities.gen'; + +import type {DaoMetricsDayBucket_t as Entities_DaoMetricsDayBucket_t} from '../src/db/Entities.gen'; + +import type {Delegation_t as Entities_Delegation_t} from '../src/db/Entities.gen'; + +import type {FeedEvent_t as Entities_FeedEvent_t} from '../src/db/Entities.gen'; + +import type {HandlerContext as $$handlerContext} from './Types.ts'; + +import type {HandlerWithOptions as $$fnWithEventConfig} from './bindings/OpaqueTypes.ts'; + +import type {LoaderContext as $$loaderContext} from './Types.ts'; + +import type {ProposalOnchain_t as Entities_ProposalOnchain_t} from '../src/db/Entities.gen'; + +import type {SingleOrMultiple as $$SingleOrMultiple_t} from './bindings/OpaqueTypes'; + +import type {TokenPrice_t as Entities_TokenPrice_t} from '../src/db/Entities.gen'; + +import type {Token_t as Entities_Token_t} from '../src/db/Entities.gen'; + +import type {Transaction_t as Entities_Transaction_t} from '../src/db/Entities.gen'; + +import type {Transfer_t as Entities_Transfer_t} from '../src/db/Entities.gen'; + +import type {VoteOnchain_t as Entities_VoteOnchain_t} from '../src/db/Entities.gen'; + +import type {VotingPowerHistory_t as Entities_VotingPowerHistory_t} from '../src/db/Entities.gen'; + +import type {entityHandlerContext as Internal_entityHandlerContext} from 'envio/src/Internal.gen'; + +import type {eventOptions as Internal_eventOptions} from 'envio/src/Internal.gen'; + +import type {genericContractRegisterArgs as Internal_genericContractRegisterArgs} from 'envio/src/Internal.gen'; + +import type {genericContractRegister as Internal_genericContractRegister} from 'envio/src/Internal.gen'; + +import type {genericEvent as Internal_genericEvent} from 'envio/src/Internal.gen'; + +import type {genericHandlerArgs as Internal_genericHandlerArgs} from 'envio/src/Internal.gen'; + +import type {genericHandlerWithLoader as Internal_genericHandlerWithLoader} from 'envio/src/Internal.gen'; + +import type {genericHandler as Internal_genericHandler} from 'envio/src/Internal.gen'; + +import type {genericLoaderArgs as Internal_genericLoaderArgs} from 'envio/src/Internal.gen'; + +import type {genericLoader as Internal_genericLoader} from 'envio/src/Internal.gen'; + +import type {logger as Envio_logger} from 'envio/src/Envio.gen'; + +import type {noEventFilters as Internal_noEventFilters} from 'envio/src/Internal.gen'; + +import type {t as Address_t} from 'envio/src/Address.gen'; + +export type id = string; +export type Id = id; + +export type contractRegistrations = { + readonly log: Envio_logger; + readonly addENSGovernor: (_1:Address_t) => void; + readonly addENSToken: (_1:Address_t) => void +}; + +export type entityLoaderContext = { + readonly get: (_1:id) => Promise<(undefined | entity)>; + readonly getOrThrow: (_1:id, message:(undefined | string)) => Promise; + readonly getWhere: indexedFieldOperations; + readonly getOrCreate: (_1:entity) => Promise; + readonly set: (_1:entity) => void; + readonly deleteUnsafe: (_1:id) => void +}; + +export type loaderContext = $$loaderContext; + +export type entityHandlerContext = Internal_entityHandlerContext; + +export type handlerContext = $$handlerContext; + +export type account = Entities_Account_t; +export type Account = account; + +export type accountBalance = Entities_AccountBalance_t; +export type AccountBalance = accountBalance; + +export type accountPower = Entities_AccountPower_t; +export type AccountPower = accountPower; + +export type balanceHistory = Entities_BalanceHistory_t; +export type BalanceHistory = balanceHistory; + +export type daoMetricsDayBucket = Entities_DaoMetricsDayBucket_t; +export type DaoMetricsDayBucket = daoMetricsDayBucket; + +export type delegation = Entities_Delegation_t; +export type Delegation = delegation; + +export type feedEvent = Entities_FeedEvent_t; +export type FeedEvent = feedEvent; + +export type proposalOnchain = Entities_ProposalOnchain_t; +export type ProposalOnchain = proposalOnchain; + +export type token = Entities_Token_t; +export type Token = token; + +export type tokenPrice = Entities_TokenPrice_t; +export type TokenPrice = tokenPrice; + +export type transaction = Entities_Transaction_t; +export type Transaction = transaction; + +export type transfer = Entities_Transfer_t; +export type Transfer = transfer; + +export type voteOnchain = Entities_VoteOnchain_t; +export type VoteOnchain = voteOnchain; + +export type votingPowerHistory = Entities_VotingPowerHistory_t; +export type VotingPowerHistory = votingPowerHistory; + +export type Transaction_t = { + readonly hash: string; + readonly to: (undefined | Address_t); + readonly from: (undefined | Address_t) +}; + +export type Block_t = { + readonly number: number; + readonly timestamp: number; + readonly hash: string +}; + +export type AggregatedBlock_t = { + readonly hash: string; + readonly number: number; + readonly timestamp: number +}; + +export type AggregatedTransaction_t = { + readonly from: (undefined | Address_t); + readonly hash: string; + readonly to: (undefined | Address_t) +}; + +export type eventLog = Internal_genericEvent; +export type EventLog = eventLog; + +export type SingleOrMultiple_t = $$SingleOrMultiple_t; + +export type HandlerTypes_args = { readonly event: eventLog; readonly context: context }; + +export type HandlerTypes_contractRegisterArgs = Internal_genericContractRegisterArgs,contractRegistrations>; + +export type HandlerTypes_contractRegister = Internal_genericContractRegister>; + +export type HandlerTypes_loaderArgs = Internal_genericLoaderArgs,loaderContext>; + +export type HandlerTypes_loader = Internal_genericLoader,loaderReturn>; + +export type HandlerTypes_handlerArgs = Internal_genericHandlerArgs,handlerContext,loaderReturn>; + +export type HandlerTypes_handler = Internal_genericHandler>; + +export type HandlerTypes_loaderHandler = Internal_genericHandlerWithLoader,HandlerTypes_handler,eventFilters>; + +export type HandlerTypes_eventConfig = Internal_eventOptions; + +export type fnWithEventConfig = $$fnWithEventConfig; + +export type handlerWithOptions = fnWithEventConfig,HandlerTypes_eventConfig>; + +export type contractRegisterWithOptions = fnWithEventConfig,HandlerTypes_eventConfig>; + +export type ENSGovernor_chainId = 1; + +export type ENSGovernor_ProposalCreated_eventArgs = { + readonly proposalId: bigint; + readonly proposer: Address_t; + readonly targets: Address_t[]; + readonly values: bigint[]; + readonly signatures: string[]; + readonly calldatas: string[]; + readonly startBlock: bigint; + readonly endBlock: bigint; + readonly description: string +}; + +export type ENSGovernor_ProposalCreated_block = Block_t; + +export type ENSGovernor_ProposalCreated_transaction = Transaction_t; + +export type ENSGovernor_ProposalCreated_event = { + /** The parameters or arguments associated with this event. */ + readonly params: ENSGovernor_ProposalCreated_eventArgs; + /** The unique identifier of the blockchain network where this event occurred. */ + readonly chainId: ENSGovernor_chainId; + /** The address of the contract that emitted this event. */ + readonly srcAddress: Address_t; + /** The index of this event's log within the block. */ + readonly logIndex: number; + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + readonly transaction: ENSGovernor_ProposalCreated_transaction; + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + readonly block: ENSGovernor_ProposalCreated_block +}; + +export type ENSGovernor_ProposalCreated_loaderArgs = Internal_genericLoaderArgs; + +export type ENSGovernor_ProposalCreated_loader = Internal_genericLoader; + +export type ENSGovernor_ProposalCreated_handlerArgs = Internal_genericHandlerArgs; + +export type ENSGovernor_ProposalCreated_handler = Internal_genericHandler>; + +export type ENSGovernor_ProposalCreated_contractRegister = Internal_genericContractRegister>; + +export type ENSGovernor_ProposalCreated_eventFilter = {}; + +export type ENSGovernor_ProposalCreated_eventFilters = Internal_noEventFilters; + +export type ENSGovernor_VoteCast_eventArgs = { + readonly voter: Address_t; + readonly proposalId: bigint; + readonly support: bigint; + readonly weight: bigint; + readonly reason: string +}; + +export type ENSGovernor_VoteCast_block = Block_t; + +export type ENSGovernor_VoteCast_transaction = Transaction_t; + +export type ENSGovernor_VoteCast_event = { + /** The parameters or arguments associated with this event. */ + readonly params: ENSGovernor_VoteCast_eventArgs; + /** The unique identifier of the blockchain network where this event occurred. */ + readonly chainId: ENSGovernor_chainId; + /** The address of the contract that emitted this event. */ + readonly srcAddress: Address_t; + /** The index of this event's log within the block. */ + readonly logIndex: number; + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + readonly transaction: ENSGovernor_VoteCast_transaction; + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + readonly block: ENSGovernor_VoteCast_block +}; + +export type ENSGovernor_VoteCast_loaderArgs = Internal_genericLoaderArgs; + +export type ENSGovernor_VoteCast_loader = Internal_genericLoader; + +export type ENSGovernor_VoteCast_handlerArgs = Internal_genericHandlerArgs; + +export type ENSGovernor_VoteCast_handler = Internal_genericHandler>; + +export type ENSGovernor_VoteCast_contractRegister = Internal_genericContractRegister>; + +export type ENSGovernor_VoteCast_eventFilter = { readonly voter?: SingleOrMultiple_t }; + +export type ENSGovernor_VoteCast_eventFiltersArgs = { +/** The unique identifier of the blockchain network where this event occurred. */ +readonly chainId: ENSGovernor_chainId; +/** Addresses of the contracts indexing the event. */ +readonly addresses: Address_t[] }; + +export type ENSGovernor_VoteCast_eventFiltersDefinition = + ENSGovernor_VoteCast_eventFilter + | ENSGovernor_VoteCast_eventFilter[]; + +export type ENSGovernor_VoteCast_eventFilters = + ENSGovernor_VoteCast_eventFilter + | ENSGovernor_VoteCast_eventFilter[] + | ((_1:ENSGovernor_VoteCast_eventFiltersArgs) => ENSGovernor_VoteCast_eventFiltersDefinition); + +export type ENSGovernor_ProposalCanceled_eventArgs = { readonly proposalId: bigint }; + +export type ENSGovernor_ProposalCanceled_block = Block_t; + +export type ENSGovernor_ProposalCanceled_transaction = Transaction_t; + +export type ENSGovernor_ProposalCanceled_event = { + /** The parameters or arguments associated with this event. */ + readonly params: ENSGovernor_ProposalCanceled_eventArgs; + /** The unique identifier of the blockchain network where this event occurred. */ + readonly chainId: ENSGovernor_chainId; + /** The address of the contract that emitted this event. */ + readonly srcAddress: Address_t; + /** The index of this event's log within the block. */ + readonly logIndex: number; + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + readonly transaction: ENSGovernor_ProposalCanceled_transaction; + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + readonly block: ENSGovernor_ProposalCanceled_block +}; + +export type ENSGovernor_ProposalCanceled_loaderArgs = Internal_genericLoaderArgs; + +export type ENSGovernor_ProposalCanceled_loader = Internal_genericLoader; + +export type ENSGovernor_ProposalCanceled_handlerArgs = Internal_genericHandlerArgs; + +export type ENSGovernor_ProposalCanceled_handler = Internal_genericHandler>; + +export type ENSGovernor_ProposalCanceled_contractRegister = Internal_genericContractRegister>; + +export type ENSGovernor_ProposalCanceled_eventFilter = {}; + +export type ENSGovernor_ProposalCanceled_eventFilters = Internal_noEventFilters; + +export type ENSGovernor_ProposalExecuted_eventArgs = { readonly proposalId: bigint }; + +export type ENSGovernor_ProposalExecuted_block = Block_t; + +export type ENSGovernor_ProposalExecuted_transaction = Transaction_t; + +export type ENSGovernor_ProposalExecuted_event = { + /** The parameters or arguments associated with this event. */ + readonly params: ENSGovernor_ProposalExecuted_eventArgs; + /** The unique identifier of the blockchain network where this event occurred. */ + readonly chainId: ENSGovernor_chainId; + /** The address of the contract that emitted this event. */ + readonly srcAddress: Address_t; + /** The index of this event's log within the block. */ + readonly logIndex: number; + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + readonly transaction: ENSGovernor_ProposalExecuted_transaction; + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + readonly block: ENSGovernor_ProposalExecuted_block +}; + +export type ENSGovernor_ProposalExecuted_loaderArgs = Internal_genericLoaderArgs; + +export type ENSGovernor_ProposalExecuted_loader = Internal_genericLoader; + +export type ENSGovernor_ProposalExecuted_handlerArgs = Internal_genericHandlerArgs; + +export type ENSGovernor_ProposalExecuted_handler = Internal_genericHandler>; + +export type ENSGovernor_ProposalExecuted_contractRegister = Internal_genericContractRegister>; + +export type ENSGovernor_ProposalExecuted_eventFilter = {}; + +export type ENSGovernor_ProposalExecuted_eventFilters = Internal_noEventFilters; + +export type ENSGovernor_ProposalQueued_eventArgs = { readonly proposalId: bigint; readonly eta: bigint }; + +export type ENSGovernor_ProposalQueued_block = Block_t; + +export type ENSGovernor_ProposalQueued_transaction = Transaction_t; + +export type ENSGovernor_ProposalQueued_event = { + /** The parameters or arguments associated with this event. */ + readonly params: ENSGovernor_ProposalQueued_eventArgs; + /** The unique identifier of the blockchain network where this event occurred. */ + readonly chainId: ENSGovernor_chainId; + /** The address of the contract that emitted this event. */ + readonly srcAddress: Address_t; + /** The index of this event's log within the block. */ + readonly logIndex: number; + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + readonly transaction: ENSGovernor_ProposalQueued_transaction; + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + readonly block: ENSGovernor_ProposalQueued_block +}; + +export type ENSGovernor_ProposalQueued_loaderArgs = Internal_genericLoaderArgs; + +export type ENSGovernor_ProposalQueued_loader = Internal_genericLoader; + +export type ENSGovernor_ProposalQueued_handlerArgs = Internal_genericHandlerArgs; + +export type ENSGovernor_ProposalQueued_handler = Internal_genericHandler>; + +export type ENSGovernor_ProposalQueued_contractRegister = Internal_genericContractRegister>; + +export type ENSGovernor_ProposalQueued_eventFilter = {}; + +export type ENSGovernor_ProposalQueued_eventFilters = Internal_noEventFilters; + +export type ENSToken_chainId = 1; + +export type ENSToken_Transfer_eventArgs = { + readonly from: Address_t; + readonly to: Address_t; + readonly value: bigint +}; + +export type ENSToken_Transfer_block = Block_t; + +export type ENSToken_Transfer_transaction = Transaction_t; + +export type ENSToken_Transfer_event = { + /** The parameters or arguments associated with this event. */ + readonly params: ENSToken_Transfer_eventArgs; + /** The unique identifier of the blockchain network where this event occurred. */ + readonly chainId: ENSToken_chainId; + /** The address of the contract that emitted this event. */ + readonly srcAddress: Address_t; + /** The index of this event's log within the block. */ + readonly logIndex: number; + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + readonly transaction: ENSToken_Transfer_transaction; + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + readonly block: ENSToken_Transfer_block +}; + +export type ENSToken_Transfer_loaderArgs = Internal_genericLoaderArgs; + +export type ENSToken_Transfer_loader = Internal_genericLoader; + +export type ENSToken_Transfer_handlerArgs = Internal_genericHandlerArgs; + +export type ENSToken_Transfer_handler = Internal_genericHandler>; + +export type ENSToken_Transfer_contractRegister = Internal_genericContractRegister>; + +export type ENSToken_Transfer_eventFilter = { readonly from?: SingleOrMultiple_t; readonly to?: SingleOrMultiple_t }; + +export type ENSToken_Transfer_eventFiltersArgs = { +/** The unique identifier of the blockchain network where this event occurred. */ +readonly chainId: ENSToken_chainId; +/** Addresses of the contracts indexing the event. */ +readonly addresses: Address_t[] }; + +export type ENSToken_Transfer_eventFiltersDefinition = + ENSToken_Transfer_eventFilter + | ENSToken_Transfer_eventFilter[]; + +export type ENSToken_Transfer_eventFilters = + ENSToken_Transfer_eventFilter + | ENSToken_Transfer_eventFilter[] + | ((_1:ENSToken_Transfer_eventFiltersArgs) => ENSToken_Transfer_eventFiltersDefinition); + +export type ENSToken_DelegateChanged_eventArgs = { + readonly delegator: Address_t; + readonly fromDelegate: Address_t; + readonly toDelegate: Address_t +}; + +export type ENSToken_DelegateChanged_block = Block_t; + +export type ENSToken_DelegateChanged_transaction = Transaction_t; + +export type ENSToken_DelegateChanged_event = { + /** The parameters or arguments associated with this event. */ + readonly params: ENSToken_DelegateChanged_eventArgs; + /** The unique identifier of the blockchain network where this event occurred. */ + readonly chainId: ENSToken_chainId; + /** The address of the contract that emitted this event. */ + readonly srcAddress: Address_t; + /** The index of this event's log within the block. */ + readonly logIndex: number; + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + readonly transaction: ENSToken_DelegateChanged_transaction; + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + readonly block: ENSToken_DelegateChanged_block +}; + +export type ENSToken_DelegateChanged_loaderArgs = Internal_genericLoaderArgs; + +export type ENSToken_DelegateChanged_loader = Internal_genericLoader; + +export type ENSToken_DelegateChanged_handlerArgs = Internal_genericHandlerArgs; + +export type ENSToken_DelegateChanged_handler = Internal_genericHandler>; + +export type ENSToken_DelegateChanged_contractRegister = Internal_genericContractRegister>; + +export type ENSToken_DelegateChanged_eventFilter = { + readonly delegator?: SingleOrMultiple_t; + readonly fromDelegate?: SingleOrMultiple_t; + readonly toDelegate?: SingleOrMultiple_t +}; + +export type ENSToken_DelegateChanged_eventFiltersArgs = { +/** The unique identifier of the blockchain network where this event occurred. */ +readonly chainId: ENSToken_chainId; +/** Addresses of the contracts indexing the event. */ +readonly addresses: Address_t[] }; + +export type ENSToken_DelegateChanged_eventFiltersDefinition = + ENSToken_DelegateChanged_eventFilter + | ENSToken_DelegateChanged_eventFilter[]; + +export type ENSToken_DelegateChanged_eventFilters = + ENSToken_DelegateChanged_eventFilter + | ENSToken_DelegateChanged_eventFilter[] + | ((_1:ENSToken_DelegateChanged_eventFiltersArgs) => ENSToken_DelegateChanged_eventFiltersDefinition); + +export type ENSToken_DelegateVotesChanged_eventArgs = { + readonly delegate: Address_t; + readonly previousBalance: bigint; + readonly newBalance: bigint +}; + +export type ENSToken_DelegateVotesChanged_block = Block_t; + +export type ENSToken_DelegateVotesChanged_transaction = Transaction_t; + +export type ENSToken_DelegateVotesChanged_event = { + /** The parameters or arguments associated with this event. */ + readonly params: ENSToken_DelegateVotesChanged_eventArgs; + /** The unique identifier of the blockchain network where this event occurred. */ + readonly chainId: ENSToken_chainId; + /** The address of the contract that emitted this event. */ + readonly srcAddress: Address_t; + /** The index of this event's log within the block. */ + readonly logIndex: number; + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + readonly transaction: ENSToken_DelegateVotesChanged_transaction; + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + readonly block: ENSToken_DelegateVotesChanged_block +}; + +export type ENSToken_DelegateVotesChanged_loaderArgs = Internal_genericLoaderArgs; + +export type ENSToken_DelegateVotesChanged_loader = Internal_genericLoader; + +export type ENSToken_DelegateVotesChanged_handlerArgs = Internal_genericHandlerArgs; + +export type ENSToken_DelegateVotesChanged_handler = Internal_genericHandler>; + +export type ENSToken_DelegateVotesChanged_contractRegister = Internal_genericContractRegister>; + +export type ENSToken_DelegateVotesChanged_eventFilter = { readonly delegate?: SingleOrMultiple_t }; + +export type ENSToken_DelegateVotesChanged_eventFiltersArgs = { +/** The unique identifier of the blockchain network where this event occurred. */ +readonly chainId: ENSToken_chainId; +/** Addresses of the contracts indexing the event. */ +readonly addresses: Address_t[] }; + +export type ENSToken_DelegateVotesChanged_eventFiltersDefinition = + ENSToken_DelegateVotesChanged_eventFilter + | ENSToken_DelegateVotesChanged_eventFilter[]; + +export type ENSToken_DelegateVotesChanged_eventFilters = + ENSToken_DelegateVotesChanged_eventFilter + | ENSToken_DelegateVotesChanged_eventFilter[] + | ((_1:ENSToken_DelegateVotesChanged_eventFiltersArgs) => ENSToken_DelegateVotesChanged_eventFiltersDefinition); + +export type chainId = number; + +export type chain = 1; diff --git a/apps/hypersync-indexer/generated/src/Types.res b/apps/hypersync-indexer/generated/src/Types.res new file mode 100644 index 000000000..6e575921d --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Types.res @@ -0,0 +1,957 @@ +//************* +//***ENTITIES** +//************* +@genType.as("Id") +type id = string + +@genType +type contractRegistrations = { + log: Envio.logger, + // TODO: only add contracts we've registered for the event in the config + addENSGovernor: (Address.t) => unit, + addENSToken: (Address.t) => unit, +} + +@genType +type entityLoaderContext<'entity, 'indexedFieldOperations> = { + get: id => promise>, + getOrThrow: (id, ~message: string=?) => promise<'entity>, + getWhere: 'indexedFieldOperations, + getOrCreate: ('entity) => promise<'entity>, + set: 'entity => unit, + deleteUnsafe: id => unit, +} + +@genType.import(("./Types.ts", "LoaderContext")) +type loaderContext = { + log: Envio.logger, + effect: 'input 'output. (Envio.effect<'input, 'output>, 'input) => promise<'output>, + isPreload: bool, + chains: Internal.chains, + @as("Account") account: entityLoaderContext, + @as("AccountBalance") accountBalance: entityLoaderContext, + @as("AccountPower") accountPower: entityLoaderContext, + @as("BalanceHistory") balanceHistory: entityLoaderContext, + @as("DaoMetricsDayBucket") daoMetricsDayBucket: entityLoaderContext, + @as("Delegation") delegation: entityLoaderContext, + @as("FeedEvent") feedEvent: entityLoaderContext, + @as("ProposalOnchain") proposalOnchain: entityLoaderContext, + @as("Token") token: entityLoaderContext, + @as("TokenPrice") tokenPrice: entityLoaderContext, + @as("Transaction") transaction: entityLoaderContext, + @as("Transfer") transfer: entityLoaderContext, + @as("VoteOnchain") voteOnchain: entityLoaderContext, + @as("VotingPowerHistory") votingPowerHistory: entityLoaderContext, +} + +@genType +type entityHandlerContext<'entity> = Internal.entityHandlerContext<'entity> + +@genType.import(("./Types.ts", "HandlerContext")) +type handlerContext = { + log: Envio.logger, + effect: 'input 'output. (Envio.effect<'input, 'output>, 'input) => promise<'output>, + chains: Internal.chains, + @as("Account") account: entityHandlerContext, + @as("AccountBalance") accountBalance: entityHandlerContext, + @as("AccountPower") accountPower: entityHandlerContext, + @as("BalanceHistory") balanceHistory: entityHandlerContext, + @as("DaoMetricsDayBucket") daoMetricsDayBucket: entityHandlerContext, + @as("Delegation") delegation: entityHandlerContext, + @as("FeedEvent") feedEvent: entityHandlerContext, + @as("ProposalOnchain") proposalOnchain: entityHandlerContext, + @as("Token") token: entityHandlerContext, + @as("TokenPrice") tokenPrice: entityHandlerContext, + @as("Transaction") transaction: entityHandlerContext, + @as("Transfer") transfer: entityHandlerContext, + @as("VoteOnchain") voteOnchain: entityHandlerContext, + @as("VotingPowerHistory") votingPowerHistory: entityHandlerContext, +} + +//Re-exporting types for backwards compatability +@genType.as("Account") +type account = Entities.Account.t +@genType.as("AccountBalance") +type accountBalance = Entities.AccountBalance.t +@genType.as("AccountPower") +type accountPower = Entities.AccountPower.t +@genType.as("BalanceHistory") +type balanceHistory = Entities.BalanceHistory.t +@genType.as("DaoMetricsDayBucket") +type daoMetricsDayBucket = Entities.DaoMetricsDayBucket.t +@genType.as("Delegation") +type delegation = Entities.Delegation.t +@genType.as("FeedEvent") +type feedEvent = Entities.FeedEvent.t +@genType.as("ProposalOnchain") +type proposalOnchain = Entities.ProposalOnchain.t +@genType.as("Token") +type token = Entities.Token.t +@genType.as("TokenPrice") +type tokenPrice = Entities.TokenPrice.t +@genType.as("Transaction") +type transaction = Entities.Transaction.t +@genType.as("Transfer") +type transfer = Entities.Transfer.t +@genType.as("VoteOnchain") +type voteOnchain = Entities.VoteOnchain.t +@genType.as("VotingPowerHistory") +type votingPowerHistory = Entities.VotingPowerHistory.t + +//************* +//**CONTRACTS** +//************* + +module Transaction = { + @genType + type t = {hash: string, to: option, from: option} + + let schema = S.object((s): t => {hash: s.field("hash", S.string), to: s.field("to", S.nullable(Address.schema)), from: s.field("from", S.nullable(Address.schema))}) +} + +module Block = { + @genType + type t = {number: int, timestamp: int, hash: string} + + let schema = S.object((s): t => {number: s.field("number", S.int), timestamp: s.field("timestamp", S.int), hash: s.field("hash", S.string)}) + + @get + external getNumber: Internal.eventBlock => int = "number" + + @get + external getTimestamp: Internal.eventBlock => int = "timestamp" + + @get + external getId: Internal.eventBlock => string = "hash" + + let cleanUpRawEventFieldsInPlace: Js.Json.t => () = %raw(`fields => { + delete fields.hash + delete fields.number + delete fields.timestamp + }`) +} + +module AggregatedBlock = { + @genType + type t = {hash: string, number: int, timestamp: int} +} +module AggregatedTransaction = { + @genType + type t = {from: option, hash: string, to: option} +} + +@genType.as("EventLog") +type eventLog<'params> = Internal.genericEvent<'params, Block.t, Transaction.t> + +module SingleOrMultiple: { + @genType.import(("./bindings/OpaqueTypes", "SingleOrMultiple")) + type t<'a> + let normalizeOrThrow: (t<'a>, ~nestedArrayDepth: int=?) => array<'a> + let single: 'a => t<'a> + let multiple: array<'a> => t<'a> +} = { + type t<'a> = Js.Json.t + + external single: 'a => t<'a> = "%identity" + external multiple: array<'a> => t<'a> = "%identity" + external castMultiple: t<'a> => array<'a> = "%identity" + external castSingle: t<'a> => 'a = "%identity" + + exception AmbiguousEmptyNestedArray + + let rec isMultiple = (t: t<'a>, ~nestedArrayDepth): bool => + switch t->Js.Json.decodeArray { + | None => false + | Some(_arr) if nestedArrayDepth == 0 => true + | Some([]) if nestedArrayDepth > 0 => + AmbiguousEmptyNestedArray->ErrorHandling.mkLogAndRaise( + ~msg="The given empty array could be interperated as a flat array (value) or nested array. Since it's ambiguous, + please pass in a nested empty array if the intention is to provide an empty array as a value", + ) + | Some(arr) => arr->Js.Array2.unsafe_get(0)->isMultiple(~nestedArrayDepth=nestedArrayDepth - 1) + } + + let normalizeOrThrow = (t: t<'a>, ~nestedArrayDepth=0): array<'a> => { + if t->isMultiple(~nestedArrayDepth) { + t->castMultiple + } else { + [t->castSingle] + } + } +} + +module HandlerTypes = { + @genType + type args<'eventArgs, 'context> = { + event: eventLog<'eventArgs>, + context: 'context, + } + + @genType + type contractRegisterArgs<'eventArgs> = Internal.genericContractRegisterArgs, contractRegistrations> + @genType + type contractRegister<'eventArgs> = Internal.genericContractRegister> + + @genType + type loaderArgs<'eventArgs> = Internal.genericLoaderArgs, loaderContext> + @genType + type loader<'eventArgs, 'loaderReturn> = Internal.genericLoader, 'loaderReturn> + + @genType + type handlerArgs<'eventArgs, 'loaderReturn> = Internal.genericHandlerArgs, handlerContext, 'loaderReturn> + + @genType + type handler<'eventArgs, 'loaderReturn> = Internal.genericHandler> + + @genType + type loaderHandler<'eventArgs, 'loaderReturn, 'eventFilters> = Internal.genericHandlerWithLoader< + loader<'eventArgs, 'loaderReturn>, + handler<'eventArgs, 'loaderReturn>, + 'eventFilters + > + + @genType + type eventConfig<'eventFilters> = Internal.eventOptions<'eventFilters> +} + +module type Event = { + type event + + let handlerRegister: EventRegister.t + + type eventFilters +} + +@genType.import(("./bindings/OpaqueTypes.ts", "HandlerWithOptions")) +type fnWithEventConfig<'fn, 'eventConfig> = ('fn, ~eventConfig: 'eventConfig=?) => unit + +@genType +type handlerWithOptions<'eventArgs, 'loaderReturn, 'eventFilters> = fnWithEventConfig< + HandlerTypes.handler<'eventArgs, 'loaderReturn>, + HandlerTypes.eventConfig<'eventFilters>, +> + +@genType +type contractRegisterWithOptions<'eventArgs, 'eventFilters> = fnWithEventConfig< + HandlerTypes.contractRegister<'eventArgs>, + HandlerTypes.eventConfig<'eventFilters>, +> + +module MakeRegister = (Event: Event) => { + let contractRegister: fnWithEventConfig< + Internal.genericContractRegister< + Internal.genericContractRegisterArgs, + >, + HandlerTypes.eventConfig, + > = (contractRegister, ~eventConfig=?) => + Event.handlerRegister->EventRegister.setContractRegister( + contractRegister, + ~eventOptions=eventConfig, + ) + + let handler: fnWithEventConfig< + Internal.genericHandler>, + HandlerTypes.eventConfig, + > = (handler, ~eventConfig=?) => { + Event.handlerRegister->EventRegister.setHandler(args => { + if args.context.isPreload { + Promise.resolve() + } else { + handler( + args->( + Utils.magic: Internal.genericHandlerArgs< + Event.event, + Internal.handlerContext, + 'loaderReturn, + > => Internal.genericHandlerArgs + ), + ) + } + }, ~eventOptions=eventConfig) + } + + let handlerWithLoader = ( + eventConfig: Internal.genericHandlerWithLoader< + Internal.genericLoader, 'loaderReturn>, + Internal.genericHandler< + Internal.genericHandlerArgs, + >, + Event.eventFilters, + >, + ) => { + Event.handlerRegister->EventRegister.setHandler( + args => { + let promise = eventConfig.loader( + args->( + Utils.magic: Internal.genericHandlerArgs< + Event.event, + Internal.handlerContext, + 'loaderReturn, + > => Internal.genericLoaderArgs + ), + ) + if args.context.isPreload { + promise->Promise.ignoreValue + } else { + promise->Promise.then(loaderReturn => { + (args->Obj.magic)["loaderReturn"] = loaderReturn + eventConfig.handler( + args->( + Utils.magic: Internal.genericHandlerArgs< + Event.event, + Internal.handlerContext, + 'loaderReturn, + > => Internal.genericHandlerArgs + ), + ) + }) + } + }, + ~eventOptions=switch eventConfig { + | {wildcard: ?None, eventFilters: ?None} => None + | _ => + Some({ + wildcard: ?eventConfig.wildcard, + eventFilters: ?eventConfig.eventFilters, + preRegisterDynamicContracts: ?eventConfig.preRegisterDynamicContracts, + }) + }, + ) + } +} + +module ENSGovernor = { +let abi = Ethers.makeAbi((%raw(`[{"type":"event","name":"ProposalCanceled","inputs":[{"name":"proposalId","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"ProposalCreated","inputs":[{"name":"proposalId","type":"uint256","indexed":false},{"name":"proposer","type":"address","indexed":false},{"name":"targets","type":"address[]","indexed":false},{"name":"values","type":"uint256[]","indexed":false},{"name":"signatures","type":"string[]","indexed":false},{"name":"calldatas","type":"bytes[]","indexed":false},{"name":"startBlock","type":"uint256","indexed":false},{"name":"endBlock","type":"uint256","indexed":false},{"name":"description","type":"string","indexed":false}],"anonymous":false},{"type":"event","name":"ProposalExecuted","inputs":[{"name":"proposalId","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"ProposalQueued","inputs":[{"name":"proposalId","type":"uint256","indexed":false},{"name":"eta","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"VoteCast","inputs":[{"name":"voter","type":"address","indexed":true},{"name":"proposalId","type":"uint256","indexed":false},{"name":"support","type":"uint8","indexed":false},{"name":"weight","type":"uint256","indexed":false},{"name":"reason","type":"string","indexed":false}],"anonymous":false}]`): Js.Json.t)) +let eventSignatures = ["ProposalCanceled(uint256 proposalId)", "ProposalCreated(uint256 proposalId, address proposer, address[] targets, uint256[] values, string[] signatures, bytes[] calldatas, uint256 startBlock, uint256 endBlock, string description)", "ProposalExecuted(uint256 proposalId)", "ProposalQueued(uint256 proposalId, uint256 eta)", "VoteCast(address indexed voter, uint256 proposalId, uint8 support, uint256 weight, string reason)"] +@genType type chainId = [#1] +let contractName = "ENSGovernor" + +module ProposalCreated = { + +let id = "0x7d84a6263ae0d98d3329bd7b46bb4e8d6f98cd35a7adb45c274c8b7fd5ebd5e0_1" +let sighash = "0x7d84a6263ae0d98d3329bd7b46bb4e8d6f98cd35a7adb45c274c8b7fd5ebd5e0" +let name = "ProposalCreated" +let contractName = contractName + +@genType +type eventArgs = {proposalId: bigint, proposer: Address.t, targets: array, values: array, signatures: array, calldatas: array, startBlock: bigint, endBlock: bigint, description: string} +@genType +type block = Block.t +@genType +type transaction = Transaction.t + +@genType +type event = { + /** The parameters or arguments associated with this event. */ + params: eventArgs, + /** The unique identifier of the blockchain network where this event occurred. */ + chainId: chainId, + /** The address of the contract that emitted this event. */ + srcAddress: Address.t, + /** The index of this event's log within the block. */ + logIndex: int, + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + transaction: transaction, + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + block: block, +} + +@genType +type loaderArgs = Internal.genericLoaderArgs +@genType +type loader<'loaderReturn> = Internal.genericLoader +@genType +type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs +@genType +type handler<'loaderReturn> = Internal.genericHandler> +@genType +type contractRegister = Internal.genericContractRegister> + +let paramsRawEventSchema = S.object((s): eventArgs => {proposalId: s.field("proposalId", BigInt.schema), proposer: s.field("proposer", Address.schema), targets: s.field("targets", S.array(Address.schema)), values: s.field("values", S.array(BigInt.schema)), signatures: s.field("signatures", S.array(S.string)), calldatas: s.field("calldatas", S.array(S.string)), startBlock: s.field("startBlock", BigInt.schema), endBlock: s.field("endBlock", BigInt.schema), description: s.field("description", S.string)}) +let blockSchema = Block.schema +let transactionSchema = Transaction.schema + +let handlerRegister: EventRegister.t = EventRegister.make( + ~contractName, + ~eventName=name, +) + +@genType +type eventFilter = {} + +@genType type eventFilters = Internal.noEventFilters + +let register = (): Internal.evmEventConfig => { + let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=[]) + { + getEventFiltersOrThrow, + filterByAddresses, + dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, + blockSchema: blockSchema->(Utils.magic: S.t => S.t), + transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), + convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, proposer: decodedEvent.body->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, targets: decodedEvent.body->Js.Array2.unsafe_get(2)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, values: decodedEvent.body->Js.Array2.unsafe_get(3)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, signatures: decodedEvent.body->Js.Array2.unsafe_get(4)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, calldatas: decodedEvent.body->Js.Array2.unsafe_get(5)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, startBlock: decodedEvent.body->Js.Array2.unsafe_get(6)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, endBlock: decodedEvent.body->Js.Array2.unsafe_get(7)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, description: decodedEvent.body->Js.Array2.unsafe_get(8)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), + id, + name, + contractName, + isWildcard: (handlerRegister->EventRegister.isWildcard), + handler: handlerRegister->EventRegister.getHandler, + contractRegister: handlerRegister->EventRegister.getContractRegister, + paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), + } +} +} + +module VoteCast = { + +let id = "0xb8e138887d0aa13bab447e82de9d5c1777041ecd21ca36ba824ff1e6c07ddda4_2" +let sighash = "0xb8e138887d0aa13bab447e82de9d5c1777041ecd21ca36ba824ff1e6c07ddda4" +let name = "VoteCast" +let contractName = contractName + +@genType +type eventArgs = {voter: Address.t, proposalId: bigint, support: bigint, weight: bigint, reason: string} +@genType +type block = Block.t +@genType +type transaction = Transaction.t + +@genType +type event = { + /** The parameters or arguments associated with this event. */ + params: eventArgs, + /** The unique identifier of the blockchain network where this event occurred. */ + chainId: chainId, + /** The address of the contract that emitted this event. */ + srcAddress: Address.t, + /** The index of this event's log within the block. */ + logIndex: int, + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + transaction: transaction, + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + block: block, +} + +@genType +type loaderArgs = Internal.genericLoaderArgs +@genType +type loader<'loaderReturn> = Internal.genericLoader +@genType +type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs +@genType +type handler<'loaderReturn> = Internal.genericHandler> +@genType +type contractRegister = Internal.genericContractRegister> + +let paramsRawEventSchema = S.object((s): eventArgs => {voter: s.field("voter", Address.schema), proposalId: s.field("proposalId", BigInt.schema), support: s.field("support", BigInt.schema), weight: s.field("weight", BigInt.schema), reason: s.field("reason", S.string)}) +let blockSchema = Block.schema +let transactionSchema = Transaction.schema + +let handlerRegister: EventRegister.t = EventRegister.make( + ~contractName, + ~eventName=name, +) + +@genType +type eventFilter = {@as("voter") voter?: SingleOrMultiple.t} + +@genType type eventFiltersArgs = {/** The unique identifier of the blockchain network where this event occurred. */ chainId: chainId, /** Addresses of the contracts indexing the event. */ addresses: array} + +@genType @unboxed type eventFiltersDefinition = Single(eventFilter) | Multiple(array) + +@genType @unboxed type eventFilters = | ...eventFiltersDefinition | Dynamic(eventFiltersArgs => eventFiltersDefinition) + +let register = (): Internal.evmEventConfig => { + let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=["voter",], ~topic1=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("voter")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress))) + { + getEventFiltersOrThrow, + filterByAddresses, + dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, + blockSchema: blockSchema->(Utils.magic: S.t => S.t), + transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), + convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {voter: decodedEvent.indexed->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, support: decodedEvent.body->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, weight: decodedEvent.body->Js.Array2.unsafe_get(2)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, reason: decodedEvent.body->Js.Array2.unsafe_get(3)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), + id, + name, + contractName, + isWildcard: (handlerRegister->EventRegister.isWildcard), + handler: handlerRegister->EventRegister.getHandler, + contractRegister: handlerRegister->EventRegister.getContractRegister, + paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), + } +} +} + +module ProposalCanceled = { + +let id = "0x789cf55be980739dad1d0699b93b58e806b51c9d96619bfa8fe0a28abaa7b30c_1" +let sighash = "0x789cf55be980739dad1d0699b93b58e806b51c9d96619bfa8fe0a28abaa7b30c" +let name = "ProposalCanceled" +let contractName = contractName + +@genType +type eventArgs = {proposalId: bigint} +@genType +type block = Block.t +@genType +type transaction = Transaction.t + +@genType +type event = { + /** The parameters or arguments associated with this event. */ + params: eventArgs, + /** The unique identifier of the blockchain network where this event occurred. */ + chainId: chainId, + /** The address of the contract that emitted this event. */ + srcAddress: Address.t, + /** The index of this event's log within the block. */ + logIndex: int, + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + transaction: transaction, + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + block: block, +} + +@genType +type loaderArgs = Internal.genericLoaderArgs +@genType +type loader<'loaderReturn> = Internal.genericLoader +@genType +type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs +@genType +type handler<'loaderReturn> = Internal.genericHandler> +@genType +type contractRegister = Internal.genericContractRegister> + +let paramsRawEventSchema = S.object((s): eventArgs => {proposalId: s.field("proposalId", BigInt.schema)}) +let blockSchema = Block.schema +let transactionSchema = Transaction.schema + +let handlerRegister: EventRegister.t = EventRegister.make( + ~contractName, + ~eventName=name, +) + +@genType +type eventFilter = {} + +@genType type eventFilters = Internal.noEventFilters + +let register = (): Internal.evmEventConfig => { + let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=[]) + { + getEventFiltersOrThrow, + filterByAddresses, + dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, + blockSchema: blockSchema->(Utils.magic: S.t => S.t), + transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), + convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), + id, + name, + contractName, + isWildcard: (handlerRegister->EventRegister.isWildcard), + handler: handlerRegister->EventRegister.getHandler, + contractRegister: handlerRegister->EventRegister.getContractRegister, + paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), + } +} +} + +module ProposalExecuted = { + +let id = "0x712ae1383f79ac853f8d882153778e0260ef8f03b504e2866e0593e04d2b291f_1" +let sighash = "0x712ae1383f79ac853f8d882153778e0260ef8f03b504e2866e0593e04d2b291f" +let name = "ProposalExecuted" +let contractName = contractName + +@genType +type eventArgs = {proposalId: bigint} +@genType +type block = Block.t +@genType +type transaction = Transaction.t + +@genType +type event = { + /** The parameters or arguments associated with this event. */ + params: eventArgs, + /** The unique identifier of the blockchain network where this event occurred. */ + chainId: chainId, + /** The address of the contract that emitted this event. */ + srcAddress: Address.t, + /** The index of this event's log within the block. */ + logIndex: int, + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + transaction: transaction, + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + block: block, +} + +@genType +type loaderArgs = Internal.genericLoaderArgs +@genType +type loader<'loaderReturn> = Internal.genericLoader +@genType +type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs +@genType +type handler<'loaderReturn> = Internal.genericHandler> +@genType +type contractRegister = Internal.genericContractRegister> + +let paramsRawEventSchema = S.object((s): eventArgs => {proposalId: s.field("proposalId", BigInt.schema)}) +let blockSchema = Block.schema +let transactionSchema = Transaction.schema + +let handlerRegister: EventRegister.t = EventRegister.make( + ~contractName, + ~eventName=name, +) + +@genType +type eventFilter = {} + +@genType type eventFilters = Internal.noEventFilters + +let register = (): Internal.evmEventConfig => { + let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=[]) + { + getEventFiltersOrThrow, + filterByAddresses, + dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, + blockSchema: blockSchema->(Utils.magic: S.t => S.t), + transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), + convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), + id, + name, + contractName, + isWildcard: (handlerRegister->EventRegister.isWildcard), + handler: handlerRegister->EventRegister.getHandler, + contractRegister: handlerRegister->EventRegister.getContractRegister, + paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), + } +} +} + +module ProposalQueued = { + +let id = "0x9a2e42fd6722813d69113e7d0079d3d940171428df7373df9c7f7617cfda2892_1" +let sighash = "0x9a2e42fd6722813d69113e7d0079d3d940171428df7373df9c7f7617cfda2892" +let name = "ProposalQueued" +let contractName = contractName + +@genType +type eventArgs = {proposalId: bigint, eta: bigint} +@genType +type block = Block.t +@genType +type transaction = Transaction.t + +@genType +type event = { + /** The parameters or arguments associated with this event. */ + params: eventArgs, + /** The unique identifier of the blockchain network where this event occurred. */ + chainId: chainId, + /** The address of the contract that emitted this event. */ + srcAddress: Address.t, + /** The index of this event's log within the block. */ + logIndex: int, + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + transaction: transaction, + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + block: block, +} + +@genType +type loaderArgs = Internal.genericLoaderArgs +@genType +type loader<'loaderReturn> = Internal.genericLoader +@genType +type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs +@genType +type handler<'loaderReturn> = Internal.genericHandler> +@genType +type contractRegister = Internal.genericContractRegister> + +let paramsRawEventSchema = S.object((s): eventArgs => {proposalId: s.field("proposalId", BigInt.schema), eta: s.field("eta", BigInt.schema)}) +let blockSchema = Block.schema +let transactionSchema = Transaction.schema + +let handlerRegister: EventRegister.t = EventRegister.make( + ~contractName, + ~eventName=name, +) + +@genType +type eventFilter = {} + +@genType type eventFilters = Internal.noEventFilters + +let register = (): Internal.evmEventConfig => { + let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=[]) + { + getEventFiltersOrThrow, + filterByAddresses, + dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, + blockSchema: blockSchema->(Utils.magic: S.t => S.t), + transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), + convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, eta: decodedEvent.body->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), + id, + name, + contractName, + isWildcard: (handlerRegister->EventRegister.isWildcard), + handler: handlerRegister->EventRegister.getHandler, + contractRegister: handlerRegister->EventRegister.getContractRegister, + paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), + } +} +} +} + +module ENSToken = { +let abi = Ethers.makeAbi((%raw(`[{"type":"event","name":"DelegateChanged","inputs":[{"name":"delegator","type":"address","indexed":true},{"name":"fromDelegate","type":"address","indexed":true},{"name":"toDelegate","type":"address","indexed":true}],"anonymous":false},{"type":"event","name":"DelegateVotesChanged","inputs":[{"name":"delegate","type":"address","indexed":true},{"name":"previousBalance","type":"uint256","indexed":false},{"name":"newBalance","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"Transfer","inputs":[{"name":"from","type":"address","indexed":true},{"name":"to","type":"address","indexed":true},{"name":"value","type":"uint256","indexed":false}],"anonymous":false}]`): Js.Json.t)) +let eventSignatures = ["DelegateChanged(address indexed delegator, address indexed fromDelegate, address indexed toDelegate)", "DelegateVotesChanged(address indexed delegate, uint256 previousBalance, uint256 newBalance)", "Transfer(address indexed from, address indexed to, uint256 value)"] +@genType type chainId = [#1] +let contractName = "ENSToken" + +module Transfer = { + +let id = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef_3" +let sighash = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" +let name = "Transfer" +let contractName = contractName + +@genType +type eventArgs = {from: Address.t, to: Address.t, value: bigint} +@genType +type block = Block.t +@genType +type transaction = Transaction.t + +@genType +type event = { + /** The parameters or arguments associated with this event. */ + params: eventArgs, + /** The unique identifier of the blockchain network where this event occurred. */ + chainId: chainId, + /** The address of the contract that emitted this event. */ + srcAddress: Address.t, + /** The index of this event's log within the block. */ + logIndex: int, + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + transaction: transaction, + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + block: block, +} + +@genType +type loaderArgs = Internal.genericLoaderArgs +@genType +type loader<'loaderReturn> = Internal.genericLoader +@genType +type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs +@genType +type handler<'loaderReturn> = Internal.genericHandler> +@genType +type contractRegister = Internal.genericContractRegister> + +let paramsRawEventSchema = S.object((s): eventArgs => {from: s.field("from", Address.schema), to: s.field("to", Address.schema), value: s.field("value", BigInt.schema)}) +let blockSchema = Block.schema +let transactionSchema = Transaction.schema + +let handlerRegister: EventRegister.t = EventRegister.make( + ~contractName, + ~eventName=name, +) + +@genType +type eventFilter = {@as("from") from?: SingleOrMultiple.t, @as("to") to?: SingleOrMultiple.t} + +@genType type eventFiltersArgs = {/** The unique identifier of the blockchain network where this event occurred. */ chainId: chainId, /** Addresses of the contracts indexing the event. */ addresses: array} + +@genType @unboxed type eventFiltersDefinition = Single(eventFilter) | Multiple(array) + +@genType @unboxed type eventFilters = | ...eventFiltersDefinition | Dynamic(eventFiltersArgs => eventFiltersDefinition) + +let register = (): Internal.evmEventConfig => { + let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=["from","to",], ~topic1=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("from")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress)), ~topic2=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("to")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress))) + { + getEventFiltersOrThrow, + filterByAddresses, + dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, + blockSchema: blockSchema->(Utils.magic: S.t => S.t), + transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), + convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {from: decodedEvent.indexed->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, to: decodedEvent.indexed->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, value: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), + id, + name, + contractName, + isWildcard: (handlerRegister->EventRegister.isWildcard), + handler: handlerRegister->EventRegister.getHandler, + contractRegister: handlerRegister->EventRegister.getContractRegister, + paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), + } +} +} + +module DelegateChanged = { + +let id = "0x3134e8a2e6d97e929a7e54011ea5485d7d196dd5f0ba4d4ef95803e8e3fc257f_4" +let sighash = "0x3134e8a2e6d97e929a7e54011ea5485d7d196dd5f0ba4d4ef95803e8e3fc257f" +let name = "DelegateChanged" +let contractName = contractName + +@genType +type eventArgs = {delegator: Address.t, fromDelegate: Address.t, toDelegate: Address.t} +@genType +type block = Block.t +@genType +type transaction = Transaction.t + +@genType +type event = { + /** The parameters or arguments associated with this event. */ + params: eventArgs, + /** The unique identifier of the blockchain network where this event occurred. */ + chainId: chainId, + /** The address of the contract that emitted this event. */ + srcAddress: Address.t, + /** The index of this event's log within the block. */ + logIndex: int, + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + transaction: transaction, + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + block: block, +} + +@genType +type loaderArgs = Internal.genericLoaderArgs +@genType +type loader<'loaderReturn> = Internal.genericLoader +@genType +type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs +@genType +type handler<'loaderReturn> = Internal.genericHandler> +@genType +type contractRegister = Internal.genericContractRegister> + +let paramsRawEventSchema = S.object((s): eventArgs => {delegator: s.field("delegator", Address.schema), fromDelegate: s.field("fromDelegate", Address.schema), toDelegate: s.field("toDelegate", Address.schema)}) +let blockSchema = Block.schema +let transactionSchema = Transaction.schema + +let handlerRegister: EventRegister.t = EventRegister.make( + ~contractName, + ~eventName=name, +) + +@genType +type eventFilter = {@as("delegator") delegator?: SingleOrMultiple.t, @as("fromDelegate") fromDelegate?: SingleOrMultiple.t, @as("toDelegate") toDelegate?: SingleOrMultiple.t} + +@genType type eventFiltersArgs = {/** The unique identifier of the blockchain network where this event occurred. */ chainId: chainId, /** Addresses of the contracts indexing the event. */ addresses: array} + +@genType @unboxed type eventFiltersDefinition = Single(eventFilter) | Multiple(array) + +@genType @unboxed type eventFilters = | ...eventFiltersDefinition | Dynamic(eventFiltersArgs => eventFiltersDefinition) + +let register = (): Internal.evmEventConfig => { + let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=["delegator","fromDelegate","toDelegate",], ~topic1=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("delegator")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress)), ~topic2=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("fromDelegate")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress)), ~topic3=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("toDelegate")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress))) + { + getEventFiltersOrThrow, + filterByAddresses, + dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, + blockSchema: blockSchema->(Utils.magic: S.t => S.t), + transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), + convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {delegator: decodedEvent.indexed->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, fromDelegate: decodedEvent.indexed->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, toDelegate: decodedEvent.indexed->Js.Array2.unsafe_get(2)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), + id, + name, + contractName, + isWildcard: (handlerRegister->EventRegister.isWildcard), + handler: handlerRegister->EventRegister.getHandler, + contractRegister: handlerRegister->EventRegister.getContractRegister, + paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), + } +} +} + +module DelegateVotesChanged = { + +let id = "0xdec2bacdd2f05b59de34da9b523dff8be42e5e38e818c82fdb0bae774387a724_2" +let sighash = "0xdec2bacdd2f05b59de34da9b523dff8be42e5e38e818c82fdb0bae774387a724" +let name = "DelegateVotesChanged" +let contractName = contractName + +@genType +type eventArgs = {delegate: Address.t, previousBalance: bigint, newBalance: bigint} +@genType +type block = Block.t +@genType +type transaction = Transaction.t + +@genType +type event = { + /** The parameters or arguments associated with this event. */ + params: eventArgs, + /** The unique identifier of the blockchain network where this event occurred. */ + chainId: chainId, + /** The address of the contract that emitted this event. */ + srcAddress: Address.t, + /** The index of this event's log within the block. */ + logIndex: int, + /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ + transaction: transaction, + /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ + block: block, +} + +@genType +type loaderArgs = Internal.genericLoaderArgs +@genType +type loader<'loaderReturn> = Internal.genericLoader +@genType +type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs +@genType +type handler<'loaderReturn> = Internal.genericHandler> +@genType +type contractRegister = Internal.genericContractRegister> + +let paramsRawEventSchema = S.object((s): eventArgs => {delegate: s.field("delegate", Address.schema), previousBalance: s.field("previousBalance", BigInt.schema), newBalance: s.field("newBalance", BigInt.schema)}) +let blockSchema = Block.schema +let transactionSchema = Transaction.schema + +let handlerRegister: EventRegister.t = EventRegister.make( + ~contractName, + ~eventName=name, +) + +@genType +type eventFilter = {@as("delegate") delegate?: SingleOrMultiple.t} + +@genType type eventFiltersArgs = {/** The unique identifier of the blockchain network where this event occurred. */ chainId: chainId, /** Addresses of the contracts indexing the event. */ addresses: array} + +@genType @unboxed type eventFiltersDefinition = Single(eventFilter) | Multiple(array) + +@genType @unboxed type eventFilters = | ...eventFiltersDefinition | Dynamic(eventFiltersArgs => eventFiltersDefinition) + +let register = (): Internal.evmEventConfig => { + let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=["delegate",], ~topic1=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("delegate")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress))) + { + getEventFiltersOrThrow, + filterByAddresses, + dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, + blockSchema: blockSchema->(Utils.magic: S.t => S.t), + transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), + convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {delegate: decodedEvent.indexed->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, previousBalance: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, newBalance: decodedEvent.body->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), + id, + name, + contractName, + isWildcard: (handlerRegister->EventRegister.isWildcard), + handler: handlerRegister->EventRegister.getHandler, + contractRegister: handlerRegister->EventRegister.getContractRegister, + paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), + } +} +} +} + +@genType +type chainId = int + +@genType +type chain = [#1] diff --git a/apps/hypersync-indexer/generated/src/Types.ts b/apps/hypersync-indexer/generated/src/Types.ts new file mode 100644 index 000000000..232ca6123 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/Types.ts @@ -0,0 +1,832 @@ +// This file is to dynamically generate TS types +// which we can't get using GenType +// Use @genType.import to link the types back to ReScript code + +import type { Logger, EffectCaller } from "envio"; +import type * as Entities from "./db/Entities.gen.ts"; + +export type LoaderContext = { + /** + * Access the logger instance with event as a context. The logs will be displayed in the console and Envio Hosted Service. + */ + readonly log: Logger; + /** + * Call the provided Effect with the given input. + * Effects are the best for external calls with automatic deduplication, error handling and caching. + * Define a new Effect using createEffect outside of the handler. + */ + readonly effect: EffectCaller; + /** + * True when the handlers run in preload mode - in parallel for the whole batch. + * Handlers run twice per batch of events, and the first time is the "preload" run + * During preload entities aren't set, logs are ignored and exceptions are silently swallowed. + * Preload mode is the best time to populate data to in-memory cache. + * After preload the handler will run for the second time in sequential order of events. + */ + readonly isPreload: boolean; + /** + * Per-chain state information accessible in event handlers and block handlers. + * Each chain ID maps to an object containing chain-specific state: + * - isReady: true when the chain has completed initial sync and is processing live events, + * false during historical synchronization + */ + readonly chains: { + [chainId: string]: { + readonly isReady: boolean; + }; + }; + readonly Account: { + /** + * Load the entity Account from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Account from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.Account_indexedFieldOperations, + /** + * Returns the entity Account from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Account_t) => Promise, + /** + * Set the entity Account in the storage. + */ + readonly set: (entity: Entities.Account_t) => void, + /** + * Delete the entity Account from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly AccountBalance: { + /** + * Load the entity AccountBalance from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity AccountBalance from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.AccountBalance_indexedFieldOperations, + /** + * Returns the entity AccountBalance from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.AccountBalance_t) => Promise, + /** + * Set the entity AccountBalance in the storage. + */ + readonly set: (entity: Entities.AccountBalance_t) => void, + /** + * Delete the entity AccountBalance from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly AccountPower: { + /** + * Load the entity AccountPower from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity AccountPower from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.AccountPower_indexedFieldOperations, + /** + * Returns the entity AccountPower from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.AccountPower_t) => Promise, + /** + * Set the entity AccountPower in the storage. + */ + readonly set: (entity: Entities.AccountPower_t) => void, + /** + * Delete the entity AccountPower from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly BalanceHistory: { + /** + * Load the entity BalanceHistory from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity BalanceHistory from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.BalanceHistory_indexedFieldOperations, + /** + * Returns the entity BalanceHistory from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.BalanceHistory_t) => Promise, + /** + * Set the entity BalanceHistory in the storage. + */ + readonly set: (entity: Entities.BalanceHistory_t) => void, + /** + * Delete the entity BalanceHistory from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly DaoMetricsDayBucket: { + /** + * Load the entity DaoMetricsDayBucket from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity DaoMetricsDayBucket from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.DaoMetricsDayBucket_indexedFieldOperations, + /** + * Returns the entity DaoMetricsDayBucket from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.DaoMetricsDayBucket_t) => Promise, + /** + * Set the entity DaoMetricsDayBucket in the storage. + */ + readonly set: (entity: Entities.DaoMetricsDayBucket_t) => void, + /** + * Delete the entity DaoMetricsDayBucket from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly Delegation: { + /** + * Load the entity Delegation from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Delegation from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.Delegation_indexedFieldOperations, + /** + * Returns the entity Delegation from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Delegation_t) => Promise, + /** + * Set the entity Delegation in the storage. + */ + readonly set: (entity: Entities.Delegation_t) => void, + /** + * Delete the entity Delegation from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly FeedEvent: { + /** + * Load the entity FeedEvent from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity FeedEvent from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.FeedEvent_indexedFieldOperations, + /** + * Returns the entity FeedEvent from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.FeedEvent_t) => Promise, + /** + * Set the entity FeedEvent in the storage. + */ + readonly set: (entity: Entities.FeedEvent_t) => void, + /** + * Delete the entity FeedEvent from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly ProposalOnchain: { + /** + * Load the entity ProposalOnchain from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity ProposalOnchain from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.ProposalOnchain_indexedFieldOperations, + /** + * Returns the entity ProposalOnchain from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.ProposalOnchain_t) => Promise, + /** + * Set the entity ProposalOnchain in the storage. + */ + readonly set: (entity: Entities.ProposalOnchain_t) => void, + /** + * Delete the entity ProposalOnchain from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly Token: { + /** + * Load the entity Token from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Token from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.Token_indexedFieldOperations, + /** + * Returns the entity Token from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Token_t) => Promise, + /** + * Set the entity Token in the storage. + */ + readonly set: (entity: Entities.Token_t) => void, + /** + * Delete the entity Token from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly TokenPrice: { + /** + * Load the entity TokenPrice from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity TokenPrice from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.TokenPrice_indexedFieldOperations, + /** + * Returns the entity TokenPrice from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.TokenPrice_t) => Promise, + /** + * Set the entity TokenPrice in the storage. + */ + readonly set: (entity: Entities.TokenPrice_t) => void, + /** + * Delete the entity TokenPrice from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly Transaction: { + /** + * Load the entity Transaction from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Transaction from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.Transaction_indexedFieldOperations, + /** + * Returns the entity Transaction from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Transaction_t) => Promise, + /** + * Set the entity Transaction in the storage. + */ + readonly set: (entity: Entities.Transaction_t) => void, + /** + * Delete the entity Transaction from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly Transfer: { + /** + * Load the entity Transfer from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Transfer from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.Transfer_indexedFieldOperations, + /** + * Returns the entity Transfer from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Transfer_t) => Promise, + /** + * Set the entity Transfer in the storage. + */ + readonly set: (entity: Entities.Transfer_t) => void, + /** + * Delete the entity Transfer from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly VoteOnchain: { + /** + * Load the entity VoteOnchain from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity VoteOnchain from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.VoteOnchain_indexedFieldOperations, + /** + * Returns the entity VoteOnchain from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.VoteOnchain_t) => Promise, + /** + * Set the entity VoteOnchain in the storage. + */ + readonly set: (entity: Entities.VoteOnchain_t) => void, + /** + * Delete the entity VoteOnchain from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly VotingPowerHistory: { + /** + * Load the entity VotingPowerHistory from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity VotingPowerHistory from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + readonly getWhere: Entities.VotingPowerHistory_indexedFieldOperations, + /** + * Returns the entity VotingPowerHistory from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.VotingPowerHistory_t) => Promise, + /** + * Set the entity VotingPowerHistory in the storage. + */ + readonly set: (entity: Entities.VotingPowerHistory_t) => void, + /** + * Delete the entity VotingPowerHistory from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } +}; + +export type HandlerContext = { + /** + * Access the logger instance with event as a context. The logs will be displayed in the console and Envio Hosted Service. + */ + readonly log: Logger; + /** + * Call the provided Effect with the given input. + * Effects are the best for external calls with automatic deduplication, error handling and caching. + * Define a new Effect using createEffect outside of the handler. + */ + readonly effect: EffectCaller; + /** + * Per-chain state information accessible in event handlers and block handlers. + * Each chain ID maps to an object containing chain-specific state: + * - isReady: true when the chain has completed initial sync and is processing live events, + * false during historical synchronization + */ + readonly chains: { + [chainId: string]: { + readonly isReady: boolean; + }; + }; + readonly Account: { + /** + * Load the entity Account from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Account from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity Account from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Account_t) => Promise, + /** + * Set the entity Account in the storage. + */ + readonly set: (entity: Entities.Account_t) => void, + /** + * Delete the entity Account from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly AccountBalance: { + /** + * Load the entity AccountBalance from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity AccountBalance from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity AccountBalance from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.AccountBalance_t) => Promise, + /** + * Set the entity AccountBalance in the storage. + */ + readonly set: (entity: Entities.AccountBalance_t) => void, + /** + * Delete the entity AccountBalance from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly AccountPower: { + /** + * Load the entity AccountPower from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity AccountPower from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity AccountPower from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.AccountPower_t) => Promise, + /** + * Set the entity AccountPower in the storage. + */ + readonly set: (entity: Entities.AccountPower_t) => void, + /** + * Delete the entity AccountPower from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly BalanceHistory: { + /** + * Load the entity BalanceHistory from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity BalanceHistory from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity BalanceHistory from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.BalanceHistory_t) => Promise, + /** + * Set the entity BalanceHistory in the storage. + */ + readonly set: (entity: Entities.BalanceHistory_t) => void, + /** + * Delete the entity BalanceHistory from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly DaoMetricsDayBucket: { + /** + * Load the entity DaoMetricsDayBucket from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity DaoMetricsDayBucket from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity DaoMetricsDayBucket from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.DaoMetricsDayBucket_t) => Promise, + /** + * Set the entity DaoMetricsDayBucket in the storage. + */ + readonly set: (entity: Entities.DaoMetricsDayBucket_t) => void, + /** + * Delete the entity DaoMetricsDayBucket from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly Delegation: { + /** + * Load the entity Delegation from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Delegation from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity Delegation from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Delegation_t) => Promise, + /** + * Set the entity Delegation in the storage. + */ + readonly set: (entity: Entities.Delegation_t) => void, + /** + * Delete the entity Delegation from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly FeedEvent: { + /** + * Load the entity FeedEvent from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity FeedEvent from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity FeedEvent from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.FeedEvent_t) => Promise, + /** + * Set the entity FeedEvent in the storage. + */ + readonly set: (entity: Entities.FeedEvent_t) => void, + /** + * Delete the entity FeedEvent from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly ProposalOnchain: { + /** + * Load the entity ProposalOnchain from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity ProposalOnchain from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity ProposalOnchain from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.ProposalOnchain_t) => Promise, + /** + * Set the entity ProposalOnchain in the storage. + */ + readonly set: (entity: Entities.ProposalOnchain_t) => void, + /** + * Delete the entity ProposalOnchain from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly Token: { + /** + * Load the entity Token from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Token from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity Token from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Token_t) => Promise, + /** + * Set the entity Token in the storage. + */ + readonly set: (entity: Entities.Token_t) => void, + /** + * Delete the entity Token from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly TokenPrice: { + /** + * Load the entity TokenPrice from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity TokenPrice from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity TokenPrice from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.TokenPrice_t) => Promise, + /** + * Set the entity TokenPrice in the storage. + */ + readonly set: (entity: Entities.TokenPrice_t) => void, + /** + * Delete the entity TokenPrice from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly Transaction: { + /** + * Load the entity Transaction from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Transaction from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity Transaction from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Transaction_t) => Promise, + /** + * Set the entity Transaction in the storage. + */ + readonly set: (entity: Entities.Transaction_t) => void, + /** + * Delete the entity Transaction from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly Transfer: { + /** + * Load the entity Transfer from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity Transfer from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity Transfer from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.Transfer_t) => Promise, + /** + * Set the entity Transfer in the storage. + */ + readonly set: (entity: Entities.Transfer_t) => void, + /** + * Delete the entity Transfer from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly VoteOnchain: { + /** + * Load the entity VoteOnchain from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity VoteOnchain from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity VoteOnchain from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.VoteOnchain_t) => Promise, + /** + * Set the entity VoteOnchain in the storage. + */ + readonly set: (entity: Entities.VoteOnchain_t) => void, + /** + * Delete the entity VoteOnchain from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } + readonly VotingPowerHistory: { + /** + * Load the entity VotingPowerHistory from the storage by ID. + * If the entity is not found, returns undefined. + */ + readonly get: (id: string) => Promise, + /** + * Load the entity VotingPowerHistory from the storage by ID. + * If the entity is not found, throws an error. + */ + readonly getOrThrow: (id: string, message?: string) => Promise, + /** + * Returns the entity VotingPowerHistory from the storage by ID. + * If the entity is not found, creates it using provided parameters and returns it. + */ + readonly getOrCreate: (entity: Entities.VotingPowerHistory_t) => Promise, + /** + * Set the entity VotingPowerHistory in the storage. + */ + readonly set: (entity: Entities.VotingPowerHistory_t) => void, + /** + * Delete the entity VotingPowerHistory from the storage. + * + * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. + */ + readonly deleteUnsafe: (id: string) => void, + } +}; diff --git a/apps/hypersync-indexer/generated/src/UserContext.res b/apps/hypersync-indexer/generated/src/UserContext.res new file mode 100644 index 000000000..3101e4e18 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/UserContext.res @@ -0,0 +1,360 @@ +let codegenHelpMessage = `Rerun 'pnpm dev' to update generated code after schema.graphql changes.` + +type contextParams = { + item: Internal.item, + checkpointId: int, + inMemoryStore: InMemoryStore.t, + loadManager: LoadManager.t, + persistence: Persistence.t, + isPreload: bool, + shouldSaveHistory: bool, + chains: Internal.chains, + mutable isResolved: bool, +} + +// We don't want to expose the params to the user +// so instead of storing _params on the context object, +// we use an external WeakMap +let paramsByThis: Utils.WeakMap.t = Utils.WeakMap.make() + +let effectContextPrototype = %raw(`Object.create(null)`) +Utils.Object.defineProperty( + effectContextPrototype, + "log", + { + get: () => { + (paramsByThis->Utils.WeakMap.unsafeGet(%raw(`this`))).item->Logging.getUserLogger + }, + }, +) +%%raw(` +var EffectContext = function(params, defaultShouldCache, callEffect) { + paramsByThis.set(this, params); + this.effect = callEffect; + this.cache = defaultShouldCache; +}; +EffectContext.prototype = effectContextPrototype; +`) + +@new +external makeEffectContext: ( + contextParams, + ~defaultShouldCache: bool, + ~callEffect: (Internal.effect, Internal.effectInput) => promise, +) => Internal.effectContext = "EffectContext" + +let initEffect = (params: contextParams) => { + let rec callEffect = (effect: Internal.effect, input: Internal.effectInput) => { + let effectContext = makeEffectContext( + params, + ~defaultShouldCache=effect.defaultShouldCache, + ~callEffect, + ) + let effectArgs: Internal.effectArgs = { + input, + context: effectContext, + cacheKey: input->S.reverseConvertOrThrow(effect.input)->Utils.Hash.makeOrThrow, + } + LoadLayer.loadEffect( + ~loadManager=params.loadManager, + ~persistence=params.persistence, + ~effect, + ~effectArgs, + ~inMemoryStore=params.inMemoryStore, + ~shouldGroup=params.isPreload, + ~item=params.item, + ) + } + callEffect +} + +type entityContextParams = { + ...contextParams, + entityConfig: Internal.entityConfig, +} + +let getWhereTraps: Utils.Proxy.traps = { + get: (~target as params, ~prop: unknown) => { + let entityConfig = params.entityConfig + if prop->Js.typeof !== "string" { + Js.Exn.raiseError( + `Invalid context.${entityConfig.name}.getWhere access by a non-string property.`, + ) + } else { + let dbFieldName = prop->(Utils.magic: unknown => string) + switch entityConfig.table->Table.getFieldByDbName(dbFieldName) { + | None => + Js.Exn.raiseError( + `Invalid context.${entityConfig.name}.getWhere.${dbFieldName} - the field doesn't exist. ${codegenHelpMessage}`, + ) + | Some(field) => + let fieldValueSchema = switch field { + | Field({fieldSchema}) => fieldSchema + | DerivedFrom(_) => S.string->S.toUnknown + } + { + Entities.eq: fieldValue => + LoadLayer.loadByField( + ~loadManager=params.loadManager, + ~persistence=params.persistence, + ~operator=Eq, + ~entityConfig, + ~fieldName=dbFieldName, + ~fieldValueSchema, + ~inMemoryStore=params.inMemoryStore, + ~shouldGroup=params.isPreload, + ~item=params.item, + ~fieldValue, + ), + gt: fieldValue => + LoadLayer.loadByField( + ~loadManager=params.loadManager, + ~persistence=params.persistence, + ~operator=Gt, + ~entityConfig, + ~fieldName=dbFieldName, + ~fieldValueSchema, + ~inMemoryStore=params.inMemoryStore, + ~shouldGroup=params.isPreload, + ~item=params.item, + ~fieldValue, + ), + lt: fieldValue => + LoadLayer.loadByField( + ~loadManager=params.loadManager, + ~persistence=params.persistence, + ~operator=Lt, + ~entityConfig, + ~fieldName=dbFieldName, + ~fieldValueSchema, + ~inMemoryStore=params.inMemoryStore, + ~shouldGroup=params.isPreload, + ~item=params.item, + ~fieldValue, + ), + }->Utils.magic + } + } + }, +} + +let noopSet = (_entity: Internal.entity) => () +let noopDeleteUnsafe = (_entityId: string) => () + +let entityTraps: Utils.Proxy.traps = { + get: (~target as params, ~prop: unknown) => { + let prop = prop->(Utils.magic: unknown => string) + + let set = params.isPreload + ? noopSet + : (entity: Internal.entity) => { + params.inMemoryStore + ->InMemoryStore.getInMemTable(~entityConfig=params.entityConfig) + ->InMemoryTable.Entity.set( + { + entityId: entity.id, + checkpointId: params.checkpointId, + entityUpdateAction: Set(entity), + }, + ~shouldSaveHistory=params.shouldSaveHistory, + ) + } + + switch prop { + | "get" => + ( + entityId => + LoadLayer.loadById( + ~loadManager=params.loadManager, + ~persistence=params.persistence, + ~entityConfig=params.entityConfig, + ~inMemoryStore=params.inMemoryStore, + ~shouldGroup=params.isPreload, + ~item=params.item, + ~entityId, + ) + )->Utils.magic + | "getWhere" => params->Utils.Proxy.make(getWhereTraps)->Utils.magic + | "getOrThrow" => + ( + (entityId, ~message=?) => + LoadLayer.loadById( + ~loadManager=params.loadManager, + ~persistence=params.persistence, + ~entityConfig=params.entityConfig, + ~inMemoryStore=params.inMemoryStore, + ~shouldGroup=params.isPreload, + ~item=params.item, + ~entityId, + )->Promise.thenResolve(entity => { + switch entity { + | Some(entity) => entity + | None => + Js.Exn.raiseError( + message->Belt.Option.getWithDefault( + `Entity '${params.entityConfig.name}' with ID '${entityId}' is expected to exist.`, + ), + ) + } + }) + )->Utils.magic + | "getOrCreate" => + ( + (entity: Internal.entity) => + LoadLayer.loadById( + ~loadManager=params.loadManager, + ~persistence=params.persistence, + ~entityConfig=params.entityConfig, + ~inMemoryStore=params.inMemoryStore, + ~shouldGroup=params.isPreload, + ~item=params.item, + ~entityId=entity.id, + )->Promise.thenResolve(storageEntity => { + switch storageEntity { + | Some(entity) => entity + | None => { + set(entity) + entity + } + } + }) + )->Utils.magic + | "set" => set->Utils.magic + | "deleteUnsafe" => + if params.isPreload { + noopDeleteUnsafe + } else { + entityId => { + params.inMemoryStore + ->InMemoryStore.getInMemTable(~entityConfig=params.entityConfig) + ->InMemoryTable.Entity.set( + { + entityId, + checkpointId: params.checkpointId, + entityUpdateAction: Delete, + }, + ~shouldSaveHistory=params.shouldSaveHistory, + ) + } + }->Utils.magic + | _ => Js.Exn.raiseError(`Invalid context.${params.entityConfig.name}.${prop} operation.`) + } + }, +} + +let handlerTraps: Utils.Proxy.traps = { + get: (~target as params, ~prop: unknown) => { + let prop = prop->(Utils.magic: unknown => string) + if params.isResolved { + Utils.Error.make( + `Impossible to access context.${prop} after the handler is resolved. Make sure you didn't miss an await in the handler.`, + )->ErrorHandling.mkLogAndRaise(~logger=params.item->Logging.getItemLogger) + } + switch prop { + | "log" => + (params.isPreload ? Logging.noopLogger : params.item->Logging.getUserLogger)->Utils.magic + | "effect" => + initEffect((params :> contextParams))->( + Utils.magic: ( + (Internal.effect, Internal.effectInput) => promise + ) => unknown + ) + + | "isPreload" => params.isPreload->Utils.magic + | "chains" => params.chains->Utils.magic + | _ => + switch Entities.byName->Utils.Dict.dangerouslyGetNonOption(prop) { + | Some(entityConfig) => + { + item: params.item, + isPreload: params.isPreload, + inMemoryStore: params.inMemoryStore, + loadManager: params.loadManager, + persistence: params.persistence, + shouldSaveHistory: params.shouldSaveHistory, + checkpointId: params.checkpointId, + chains: params.chains, + isResolved: params.isResolved, + entityConfig, + } + ->Utils.Proxy.make(entityTraps) + ->Utils.magic + | None => + Js.Exn.raiseError(`Invalid context access by '${prop}' property. ${codegenHelpMessage}`) + } + } + }, +} + +let getHandlerContext = (params: contextParams): Internal.handlerContext => { + params->Utils.Proxy.make(handlerTraps)->Utils.magic +} + +// Contract register context creation +type contractRegisterParams = { + item: Internal.item, + onRegister: ( + ~item: Internal.item, + ~contractAddress: Address.t, + ~contractName: Enums.ContractType.t, + ) => unit, + config: Config.t, + mutable isResolved: bool, +} + +let contractRegisterTraps: Utils.Proxy.traps = { + get: (~target as params, ~prop: unknown) => { + let prop = prop->(Utils.magic: unknown => string) + if params.isResolved { + Utils.Error.make( + `Impossible to access context.${prop} after the contract register is resolved. Make sure you didn't miss an await in the handler.`, + )->ErrorHandling.mkLogAndRaise(~logger=params.item->Logging.getItemLogger) + } + switch prop { + | "log" => params.item->Logging.getUserLogger->Utils.magic + | _ => + // Use the pre-built mapping for efficient lookup + switch params.config.addContractNameToContractNameMapping->Utils.Dict.dangerouslyGetNonOption( + prop, + ) { + | Some(contractName) => { + let addFunction = (contractAddress: Address.t) => { + let validatedAddress = if params.config.ecosystem === Evm { + // The value is passed from the user-land, + // so we need to validate and checksum/lowercase the address. + if params.config.lowercaseAddresses { + contractAddress->Address.Evm.fromAddressLowercaseOrThrow + } else { + contractAddress->Address.Evm.fromAddressOrThrow + } + } else { + // TODO: Ideally we should do the same for other ecosystems + contractAddress + } + + params.onRegister( + ~item=params.item, + ~contractAddress=validatedAddress, + ~contractName=contractName->(Utils.magic: string => Enums.ContractType.t), + ) + } + + addFunction->Utils.magic + } + | None => + Js.Exn.raiseError(`Invalid context access by '${prop}' property. ${codegenHelpMessage}`) + } + } + }, +} + +let getContractRegisterContext = (params: contractRegisterParams) => { + params + ->Utils.Proxy.make(contractRegisterTraps) + ->Utils.magic +} + +let getContractRegisterArgs = (params: contractRegisterParams): Internal.contractRegisterArgs => { + event: (params.item->Internal.castUnsafeEventItem).event, + context: getContractRegisterContext(params), +} diff --git a/apps/hypersync-indexer/generated/src/bindings/Dotenv.res b/apps/hypersync-indexer/generated/src/bindings/Dotenv.res new file mode 100644 index 000000000..dffee86dc --- /dev/null +++ b/apps/hypersync-indexer/generated/src/bindings/Dotenv.res @@ -0,0 +1,17 @@ +type config = {path?: string} +type envRes + +@module("dotenv") external config: config => envRes = "config" + +module Utils = { + type require = {resolve: string => string} + external require: require = "require" + + let getEnvFilePath = () => + switch require.resolve(`../../${Path.relativePathToRootFromGenerated}/.env`) { + | path => Some(path) + | exception _exn => None + } +} + +let initialize = () => config({path: ?Utils.getEnvFilePath()})->ignore diff --git a/apps/hypersync-indexer/generated/src/bindings/Ethers.gen.ts b/apps/hypersync-indexer/generated/src/bindings/Ethers.gen.ts new file mode 100644 index 000000000..abe963235 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/bindings/Ethers.gen.ts @@ -0,0 +1,15 @@ +/* +Reexport the types to keep backward compatibility +*/ + +/* eslint-disable */ +/* tslint:disable */ + +import type { t as Address_t } from "envio/src/Address.gen"; +export type { + Addresses_mockAddresses, + Addresses_defaultAddress, + Addresses, +} from "envio/src/bindings/Ethers.gen"; + +export type ethAddress = Address_t; diff --git a/apps/hypersync-indexer/generated/src/bindings/OpaqueTypes.ts b/apps/hypersync-indexer/generated/src/bindings/OpaqueTypes.ts new file mode 100644 index 000000000..285ec704a --- /dev/null +++ b/apps/hypersync-indexer/generated/src/bindings/OpaqueTypes.ts @@ -0,0 +1,5 @@ +export type EthersAddress = string; +export type Address = string; +export type Nullable = null | T; +export type SingleOrMultiple = T | T[]; +export type HandlerWithOptions = (fn: Fn, opt?: Opts) => void; diff --git a/apps/hypersync-indexer/generated/src/bindings/RescriptMocha.res b/apps/hypersync-indexer/generated/src/bindings/RescriptMocha.res new file mode 100644 index 000000000..52f857d5e --- /dev/null +++ b/apps/hypersync-indexer/generated/src/bindings/RescriptMocha.res @@ -0,0 +1,123 @@ +module Assert = { + type assertion<'a> = ('a, 'a, ~message: string=?) => unit + + @module("assert") external equal: assertion<'a> = "equal" + @module("assert") external notEqual: assertion<'a> = "notEqual" + + @module("assert") external deepEqual: assertion<'a> = "deepEqual" + @module("assert") + external notDeepEqual: assertion<'a> = "notDeepEqual" + + @module("assert") external strictEqual: assertion<'a> = "strictEqual" + @module("assert") + external notStrictEqual: assertion<'a> = "notStrictEqual" + + @module("assert") + external deepStrictEqual: assertion<'a> = "deepStrictEqual" + @module("assert") + external notDeepStrictEqual: assertion<'a> = "notDeepStrictEqual" + + @module("assert") external ifError: 'a => unit = "ifError" + + @module("assert") + external throws: (unit => 'a, ~error: 'error=?, ~message: string=?) => unit = "throws" + @module("assert") + external doesNotThrow: (unit => 'a, ~error: 'error=?, ~message: string=?) => unit = "doesNotThrow" + + @module("assert") + external rejects: (unit => promise<'a>, ~error: 'error=?, ~message: string=?) => promise = + "rejects" + + @module("assert") external ok: (bool, ~message: string=?) => unit = "ok" + @module("assert") external fail: string => 'a = "fail" +} + +/* Mocha bindings on `this` for `describe` and `it` functions */ +module This = { + @val external timeout: int => unit = "this.timeout" + @val external retries: int => unit = "this.retries" + @val external slow: int => unit = "this.slow" + @val external skip: unit => unit = "this.skip" +} + +@val +external describe: (string, unit => unit) => unit = "describe" +@val +external describe_only: (string, unit => unit) => unit = "describe.only" +@val +external describe_skip: (string, unit => unit) => unit = "describe.skip" + +@val +external it: (string, unit => unit) => unit = "it" +@val +external it_only: (string, unit => unit) => unit = "it.only" +@val +external it_skip: (string, unit => unit) => unit = "it.skip" +@val +external before: (unit => unit) => unit = "before" +@val +external after: (unit => unit) => unit = "after" +@val +external beforeEach: (unit => unit) => unit = "beforeEach" +@val +external afterEach: (unit => unit) => unit = "afterEach" +@val +external beforeWithTitle: (string, unit => unit) => unit = "before" +@val +external afterWithTitle: (string, unit => unit) => unit = "after" +@val +external beforeEachWithTitle: (string, unit => unit) => unit = "beforeEach" +@val +external afterEachWithTitle: (string, unit => unit) => unit = "afterEach" + +module Async = { + @val + external it: (string, unit => promise) => unit = "it" + @val + external it_only: (string, unit => promise) => unit = "it.only" + @val + external it_skip: (string, unit => promise) => unit = "it.skip" + @val + external before: (unit => promise) => unit = "before" + @val + external after: (unit => promise) => unit = "after" + @val + external beforeEach: (unit => promise) => unit = "beforeEach" + @val + external afterEach: (unit => promise) => unit = "afterEach" + @val + external beforeWithTitle: (string, unit => promise) => unit = "before" + @val + external afterWithTitle: (string, unit => promise) => unit = "after" + @val + external beforeEachWithTitle: (string, unit => promise) => unit = "beforeEach" + @val + external afterEachWithTitle: (string, unit => promise) => unit = "afterEach" +} + +module DoneCallback = { + type doneCallback = Js.Nullable.t => unit + + @val + external it: (string, doneCallback => unit) => unit = "it" + @val + external it_only: (string, doneCallback => unit) => unit = "it.only" + @val + external it_skip: (string, doneCallback => unit) => unit = "it.skip" + @val + external before: (doneCallback => unit) => unit = "before" + @val + external after: (doneCallback => unit) => unit = "after" + @val + external beforeEach: (doneCallback => unit) => unit = "beforeEach" + @val + external afterEach: (doneCallback => unit) => unit = "afterEach" + @val + external beforeWithTitle: (string, doneCallback => unit) => unit = "before" + @val + external afterWithTitle: (string, doneCallback => unit) => unit = "after" + @val + external beforeEachWithTitle: (string, doneCallback => unit) => unit = "beforeEach" + @val + external afterEachWithTitle: (string, doneCallback => unit) => unit = "afterEach" +} diff --git a/apps/hypersync-indexer/generated/src/bindings/Yargs.res b/apps/hypersync-indexer/generated/src/bindings/Yargs.res new file mode 100644 index 000000000..2df5064ac --- /dev/null +++ b/apps/hypersync-indexer/generated/src/bindings/Yargs.res @@ -0,0 +1,8 @@ +type arg = string + +type parsedArgs<'a> = 'a + +@module external yargs: array => parsedArgs<'a> = "yargs/yargs" +@module("yargs/helpers") external hideBin: array => array = "hideBin" + +@get external argv: parsedArgs<'a> => 'a = "argv" diff --git a/apps/hypersync-indexer/generated/src/db/Db.res b/apps/hypersync-indexer/generated/src/db/Db.res new file mode 100644 index 000000000..571ee1a05 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/Db.res @@ -0,0 +1,30 @@ +// This is a module with all the global configuration of the DB +// Ideally it should be moved to the config and passed with it + +let makeClient = () => { + Postgres.makeSql( + ~config={ + host: Env.Db.host, + port: Env.Db.port, + username: Env.Db.user, + password: Env.Db.password, + database: Env.Db.database, + ssl: Env.Db.ssl, + // TODO: think how we want to pipe these logs to pino. + onnotice: ?( + Env.userLogLevel == #warn || Env.userLogLevel == #error ? None : Some(_str => ()) + ), + transform: {undefined: Null}, + max: 2, + // debug: (~connection, ~query, ~params as _, ~types as _) => Js.log2(connection, query), + }, + ) +} + +let publicSchema = Env.Db.publicSchema + +let allEntityTables: array = Entities.allEntities->Belt.Array.map(entityConfig => { + entityConfig.table +}) + +let schema = Schema.make(allEntityTables) diff --git a/apps/hypersync-indexer/generated/src/db/DbFunctions.res b/apps/hypersync-indexer/generated/src/db/DbFunctions.res new file mode 100644 index 000000000..f4d6ab967 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/DbFunctions.res @@ -0,0 +1,33 @@ +module General = { + type existsRes = {exists: bool} + + let hasRows = async (sql, ~table: Table.table) => { + let query = `SELECT EXISTS(SELECT 1 FROM "${Env.Db.publicSchema}"."${table.tableName}");` + switch await sql->Postgres.unsafe(query) { + | [{exists}] => exists + | _ => Js.Exn.raiseError("Unexpected result from hasRows query: " ++ query) + } + } +} + +module EntityHistory = { + let hasRows = async sql => { + let all = + await Entities.allEntities + ->Belt.Array.map(async entityConfig => { + try await General.hasRows(sql, ~table=entityConfig.entityHistory.table) catch { + | exn => + exn->ErrorHandling.mkLogAndRaise( + ~msg=`Failed to check if entity history table has rows`, + ~logger=Logging.createChild( + ~params={ + "entityName": entityConfig.name, + }, + ), + ) + } + }) + ->Promise.all + all->Belt.Array.some(v => v) + } +} diff --git a/apps/hypersync-indexer/generated/src/db/DbFunctionsEntities.res b/apps/hypersync-indexer/generated/src/db/DbFunctionsEntities.res new file mode 100644 index 000000000..f4b1564e2 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/DbFunctionsEntities.res @@ -0,0 +1,22 @@ +type id = string + +@module("./DbFunctionsImplementation.js") +external batchDeleteItemsInTable: ( + ~table: Table.table, + ~sql: Postgres.sql, + ~ids: array, +) => promise = "batchDeleteItemsInTable" + +let makeBatchDelete = (~table) => async (~logger=?, sql, ids) => + switch await batchDeleteItemsInTable(~table, ~sql, ~ids) { + | exception exn => + exn->ErrorHandling.mkLogAndRaise( + ~logger?, + ~msg=`Failed during batch delete of entity ${table.tableName}`, + ) + | res => res + } + +let batchDelete = (~entityConfig: Internal.entityConfig) => { + makeBatchDelete(~table=entityConfig.table) +} \ No newline at end of file diff --git a/apps/hypersync-indexer/generated/src/db/DbFunctionsImplementation.js b/apps/hypersync-indexer/generated/src/db/DbFunctionsImplementation.js new file mode 100644 index 000000000..d36a66914 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/DbFunctionsImplementation.js @@ -0,0 +1,17 @@ +const TableModule = require("envio/src/db/Table.res.js"); +const { publicSchema } = require("./Db.res.js"); + +module.exports.batchDeleteItemsInTable = (table, sql, pkArray) => { + const primaryKeyFieldNames = TableModule.getPrimaryKeyFieldNames(table); + + if (primaryKeyFieldNames.length === 1) { + return sql` + DELETE + FROM ${sql(publicSchema)}.${sql(table.tableName)} + WHERE ${sql(primaryKeyFieldNames[0])} IN ${sql(pkArray)}; + `; + } else { + //TODO, if needed create a delete query for multiple field matches + //May be best to make pkArray an array of objects with fieldName -> value + } +}; diff --git a/apps/hypersync-indexer/generated/src/db/Entities.gen.ts b/apps/hypersync-indexer/generated/src/db/Entities.gen.ts new file mode 100644 index 000000000..9625ebee0 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/Entities.gen.ts @@ -0,0 +1,233 @@ +/* TypeScript file generated from Entities.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +import type {EventType_t as Enums_EventType_t} from './Enums.gen'; + +import type {Json_t as Js_Json_t} from '../../src/Js.shim'; + +import type {MetricType_t as Enums_MetricType_t} from './Enums.gen'; + +export type id = string; + +export type whereOperations = { + readonly eq: (_1:fieldType) => Promise; + readonly gt: (_1:fieldType) => Promise; + readonly lt: (_1:fieldType) => Promise +}; + +export type Account_t = { readonly id: id }; + +export type Account_indexedFieldOperations = {}; + +export type AccountBalance_t = { + readonly accountId: string; + readonly balance: bigint; + readonly delegate: string; + readonly id: id; + readonly tokenId: string +}; + +export type AccountBalance_indexedFieldOperations = { readonly accountId: whereOperations; readonly tokenId: whereOperations }; + +export type AccountPower_t = { + readonly accountId: string; + readonly daoId: string; + readonly delegationsCount: number; + readonly id: id; + readonly lastVoteTimestamp: bigint; + readonly proposalsCount: number; + readonly votesCount: number; + readonly votingPower: bigint +}; + +export type AccountPower_indexedFieldOperations = { readonly accountId: whereOperations }; + +export type BalanceHistory_t = { + readonly accountId: string; + readonly balance: bigint; + readonly daoId: string; + readonly delta: bigint; + readonly deltaMod: bigint; + readonly id: id; + readonly logIndex: number; + readonly timestamp: bigint; + readonly transactionHash: string +}; + +export type BalanceHistory_indexedFieldOperations = { readonly accountId: whereOperations; readonly transactionHash: whereOperations }; + +export type DaoMetricsDayBucket_t = { + readonly average: bigint; + readonly closeValue: bigint; + readonly count: number; + readonly daoId: string; + readonly date: bigint; + readonly high: bigint; + readonly id: id; + readonly lastUpdate: bigint; + readonly low: bigint; + readonly metricType: Enums_MetricType_t; + readonly openValue: bigint; + readonly tokenId: string; + readonly volume: bigint +}; + +export type DaoMetricsDayBucket_indexedFieldOperations = { readonly tokenId: whereOperations }; + +export type Delegation_t = { + readonly daoId: string; + readonly delegateAccountId: string; + readonly delegatedValue: bigint; + readonly delegationType: (undefined | number); + readonly delegatorAccountId: string; + readonly id: id; + readonly isCex: boolean; + readonly isDex: boolean; + readonly isLending: boolean; + readonly isTotal: boolean; + readonly logIndex: number; + readonly previousDelegate: (undefined | string); + readonly timestamp: bigint; + readonly transactionHash: string +}; + +export type Delegation_indexedFieldOperations = { + readonly delegateAccountId: whereOperations; + readonly delegatorAccountId: whereOperations; + readonly timestamp: whereOperations; + readonly transactionHash: whereOperations +}; + +export type FeedEvent_t = { + readonly eventType: Enums_EventType_t; + readonly id: id; + readonly logIndex: number; + readonly metadata: (undefined | Js_Json_t); + readonly timestamp: bigint; + readonly txHash: string; + readonly value: bigint +}; + +export type FeedEvent_indexedFieldOperations = { + readonly timestamp: whereOperations; + readonly txHash: whereOperations; + readonly value: whereOperations +}; + +export type ProposalOnchain_t = { + readonly abstainVotes: bigint; + readonly againstVotes: bigint; + readonly calldatas: Js_Json_t; + readonly daoId: string; + readonly description: string; + readonly endBlock: number; + readonly endTimestamp: bigint; + readonly forVotes: bigint; + readonly id: id; + readonly logIndex: number; + readonly proposalType: (undefined | number); + readonly proposerAccountId: string; + readonly signatures: Js_Json_t; + readonly startBlock: number; + readonly status: string; + readonly targets: Js_Json_t; + readonly timestamp: bigint; + readonly title: string; + readonly txHash: string; + readonly values: Js_Json_t +}; + +export type ProposalOnchain_indexedFieldOperations = { readonly proposerAccountId: whereOperations }; + +export type Token_t = { + readonly cexSupply: bigint; + readonly circulatingSupply: bigint; + readonly decimals: number; + readonly delegatedSupply: bigint; + readonly dexSupply: bigint; + readonly id: id; + readonly lendingSupply: bigint; + readonly name: (undefined | string); + readonly nonCirculatingSupply: bigint; + readonly totalSupply: bigint; + readonly treasury: bigint +}; + +export type Token_indexedFieldOperations = {}; + +export type TokenPrice_t = { + readonly id: id; + readonly price: bigint; + readonly timestamp: bigint +}; + +export type TokenPrice_indexedFieldOperations = {}; + +export type Transaction_t = { + readonly fromAddress: (undefined | string); + readonly id: id; + readonly isCex: boolean; + readonly isDex: boolean; + readonly isLending: boolean; + readonly isTotal: boolean; + readonly timestamp: bigint; + readonly toAddress: (undefined | string); + readonly transactionHash: string +}; + +export type Transaction_indexedFieldOperations = {}; + +export type Transfer_t = { + readonly amount: bigint; + readonly daoId: string; + readonly fromAccountId: string; + readonly id: id; + readonly isCex: boolean; + readonly isDex: boolean; + readonly isLending: boolean; + readonly isTotal: boolean; + readonly logIndex: number; + readonly timestamp: bigint; + readonly toAccountId: string; + readonly tokenId: string; + readonly transactionHash: string +}; + +export type Transfer_indexedFieldOperations = { + readonly amount: whereOperations; + readonly fromAccountId: whereOperations; + readonly timestamp: whereOperations; + readonly toAccountId: whereOperations; + readonly tokenId: whereOperations; + readonly transactionHash: whereOperations +}; + +export type VoteOnchain_t = { + readonly daoId: string; + readonly id: id; + readonly proposalId: string; + readonly reason: (undefined | string); + readonly support: string; + readonly timestamp: bigint; + readonly txHash: string; + readonly voterAccountId: string; + readonly votingPower: bigint +}; + +export type VoteOnchain_indexedFieldOperations = { readonly proposalId: whereOperations; readonly voterAccountId: whereOperations }; + +export type VotingPowerHistory_t = { + readonly accountId: string; + readonly daoId: string; + readonly delta: bigint; + readonly deltaMod: bigint; + readonly id: id; + readonly logIndex: number; + readonly timestamp: bigint; + readonly transactionHash: string; + readonly votingPower: bigint +}; + +export type VotingPowerHistory_indexedFieldOperations = { readonly accountId: whereOperations; readonly transactionHash: whereOperations }; diff --git a/apps/hypersync-indexer/generated/src/db/Entities.res b/apps/hypersync-indexer/generated/src/db/Entities.res new file mode 100644 index 000000000..65fb1048d --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/Entities.res @@ -0,0 +1,2077 @@ +open Table +open Enums.EntityType +type id = string + +type internalEntity = Internal.entity +module type Entity = { + type t + let index: int + let name: string + let schema: S.t + let rowsSchema: S.t> + let table: Table.table + let entityHistory: EntityHistory.t +} +external entityModToInternal: module(Entity with type t = 'a) => Internal.entityConfig = "%identity" +external entityModsToInternal: array => array = "%identity" +external entitiesToInternal: array<'a> => array = "%identity" + +@get +external getEntityId: internalEntity => string = "id" + +// Use InMemoryTable.Entity.getEntityIdUnsafe instead of duplicating the logic +let getEntityIdUnsafe = InMemoryTable.Entity.getEntityIdUnsafe + +//shorthand for punning +let isPrimaryKey = true +let isNullable = true +let isArray = true +let isIndex = true + +@genType +type whereOperations<'entity, 'fieldType> = { + eq: 'fieldType => promise>, + gt: 'fieldType => promise>, + lt: 'fieldType => promise> +} + +module Account = { + let name = (Account :> string) + let index = 0 + @genType + type t = { + id: id, + } + + let schema = S.object((s): t => { + id: s.field("id", S.string), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module AccountBalance = { + let name = (AccountBalance :> string) + let index = 1 + @genType + type t = { + accountId: string, + balance: bigint, + delegate: string, + id: id, + tokenId: string, + } + + let schema = S.object((s): t => { + accountId: s.field("accountId", S.string), + balance: s.field("balance", BigInt.schema), + delegate: s.field("delegate", S.string), + id: s.field("id", S.string), + tokenId: s.field("tokenId", S.string), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("accountId") accountId: whereOperations, + + @as("tokenId") tokenId: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "accountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "balance", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "delegate", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "tokenId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module AccountPower = { + let name = (AccountPower :> string) + let index = 2 + @genType + type t = { + accountId: string, + daoId: string, + delegationsCount: int, + id: id, + lastVoteTimestamp: bigint, + proposalsCount: int, + votesCount: int, + votingPower: bigint, + } + + let schema = S.object((s): t => { + accountId: s.field("accountId", S.string), + daoId: s.field("daoId", S.string), + delegationsCount: s.field("delegationsCount", S.int), + id: s.field("id", S.string), + lastVoteTimestamp: s.field("lastVoteTimestamp", BigInt.schema), + proposalsCount: s.field("proposalsCount", S.int), + votesCount: s.field("votesCount", S.int), + votingPower: s.field("votingPower", BigInt.schema), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("accountId") accountId: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "accountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "daoId", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "delegationsCount", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "lastVoteTimestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "proposalsCount", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "votesCount", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "votingPower", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module BalanceHistory = { + let name = (BalanceHistory :> string) + let index = 3 + @genType + type t = { + accountId: string, + balance: bigint, + daoId: string, + delta: bigint, + deltaMod: bigint, + id: id, + logIndex: int, + timestamp: bigint, + transactionHash: string, + } + + let schema = S.object((s): t => { + accountId: s.field("accountId", S.string), + balance: s.field("balance", BigInt.schema), + daoId: s.field("daoId", S.string), + delta: s.field("delta", BigInt.schema), + deltaMod: s.field("deltaMod", BigInt.schema), + id: s.field("id", S.string), + logIndex: s.field("logIndex", S.int), + timestamp: s.field("timestamp", BigInt.schema), + transactionHash: s.field("transactionHash", S.string), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("accountId") accountId: whereOperations, + + @as("transactionHash") transactionHash: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "accountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "balance", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "daoId", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "delta", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "deltaMod", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "logIndex", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "transactionHash", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module DaoMetricsDayBucket = { + let name = (DaoMetricsDayBucket :> string) + let index = 4 + @genType + type t = { + average: bigint, + closeValue: bigint, + count: int, + daoId: string, + date: bigint, + high: bigint, + id: id, + lastUpdate: bigint, + low: bigint, + metricType: Enums.MetricType.t, + openValue: bigint, + tokenId: string, + volume: bigint, + } + + let schema = S.object((s): t => { + average: s.field("average", BigInt.schema), + closeValue: s.field("closeValue", BigInt.schema), + count: s.field("count", S.int), + daoId: s.field("daoId", S.string), + date: s.field("date", BigInt.schema), + high: s.field("high", BigInt.schema), + id: s.field("id", S.string), + lastUpdate: s.field("lastUpdate", BigInt.schema), + low: s.field("low", BigInt.schema), + metricType: s.field("metricType", Enums.MetricType.config.schema), + openValue: s.field("openValue", BigInt.schema), + tokenId: s.field("tokenId", S.string), + volume: s.field("volume", BigInt.schema), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("tokenId") tokenId: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "average", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "closeValue", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "count", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "daoId", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "date", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "high", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "lastUpdate", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "low", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "metricType", + Custom(Enums.MetricType.config.name), + ~fieldSchema=Enums.MetricType.config.schema, + + + + + + ), + mkField( + "openValue", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "tokenId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "volume", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module Delegation = { + let name = (Delegation :> string) + let index = 5 + @genType + type t = { + daoId: string, + delegateAccountId: string, + delegatedValue: bigint, + delegationType: option, + delegatorAccountId: string, + id: id, + isCex: bool, + isDex: bool, + isLending: bool, + isTotal: bool, + logIndex: int, + previousDelegate: option, + timestamp: bigint, + transactionHash: string, + } + + let schema = S.object((s): t => { + daoId: s.field("daoId", S.string), + delegateAccountId: s.field("delegateAccountId", S.string), + delegatedValue: s.field("delegatedValue", BigInt.schema), + delegationType: s.field("delegationType", S.null(S.int)), + delegatorAccountId: s.field("delegatorAccountId", S.string), + id: s.field("id", S.string), + isCex: s.field("isCex", S.bool), + isDex: s.field("isDex", S.bool), + isLending: s.field("isLending", S.bool), + isTotal: s.field("isTotal", S.bool), + logIndex: s.field("logIndex", S.int), + previousDelegate: s.field("previousDelegate", S.null(S.string)), + timestamp: s.field("timestamp", BigInt.schema), + transactionHash: s.field("transactionHash", S.string), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("delegateAccountId") delegateAccountId: whereOperations, + + @as("delegatorAccountId") delegatorAccountId: whereOperations, + + @as("timestamp") timestamp: whereOperations, + + @as("transactionHash") transactionHash: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "daoId", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "delegateAccountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "delegatedValue", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "delegationType", + Integer, + ~fieldSchema=S.null(S.int), + + ~isNullable, + + + + ), + mkField( + "delegatorAccountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "isCex", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isDex", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isLending", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isTotal", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "logIndex", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "previousDelegate", + Text, + ~fieldSchema=S.null(S.string), + + ~isNullable, + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + ~isIndex, + + ), + mkField( + "transactionHash", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module FeedEvent = { + let name = (FeedEvent :> string) + let index = 6 + @genType + type t = { + eventType: Enums.EventType.t, + id: id, + logIndex: int, + metadata: option, + timestamp: bigint, + txHash: string, + value: bigint, + } + + let schema = S.object((s): t => { + eventType: s.field("eventType", Enums.EventType.config.schema), + id: s.field("id", S.string), + logIndex: s.field("logIndex", S.int), + metadata: s.field("metadata", S.null(S.json(~validate=false))), + timestamp: s.field("timestamp", BigInt.schema), + txHash: s.field("txHash", S.string), + value: s.field("value", BigInt.schema), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("timestamp") timestamp: whereOperations, + + @as("txHash") txHash: whereOperations, + + @as("value") value: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "eventType", + Custom(Enums.EventType.config.name), + ~fieldSchema=Enums.EventType.config.schema, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "logIndex", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "metadata", + JsonB, + ~fieldSchema=S.null(S.json(~validate=false)), + + ~isNullable, + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + ~isIndex, + + ), + mkField( + "txHash", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "value", + Numeric, + ~fieldSchema=BigInt.schema, + + + + ~isIndex, + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module ProposalOnchain = { + let name = (ProposalOnchain :> string) + let index = 7 + @genType + type t = { + abstainVotes: bigint, + againstVotes: bigint, + calldatas: Js.Json.t, + daoId: string, + description: string, + endBlock: int, + endTimestamp: bigint, + forVotes: bigint, + id: id, + logIndex: int, + proposalType: option, + proposerAccountId: string, + signatures: Js.Json.t, + startBlock: int, + status: string, + targets: Js.Json.t, + timestamp: bigint, + title: string, + txHash: string, + values: Js.Json.t, + } + + let schema = S.object((s): t => { + abstainVotes: s.field("abstainVotes", BigInt.schema), + againstVotes: s.field("againstVotes", BigInt.schema), + calldatas: s.field("calldatas", S.json(~validate=false)), + daoId: s.field("daoId", S.string), + description: s.field("description", S.string), + endBlock: s.field("endBlock", S.int), + endTimestamp: s.field("endTimestamp", BigInt.schema), + forVotes: s.field("forVotes", BigInt.schema), + id: s.field("id", S.string), + logIndex: s.field("logIndex", S.int), + proposalType: s.field("proposalType", S.null(S.int)), + proposerAccountId: s.field("proposerAccountId", S.string), + signatures: s.field("signatures", S.json(~validate=false)), + startBlock: s.field("startBlock", S.int), + status: s.field("status", S.string), + targets: s.field("targets", S.json(~validate=false)), + timestamp: s.field("timestamp", BigInt.schema), + title: s.field("title", S.string), + txHash: s.field("txHash", S.string), + values: s.field("values", S.json(~validate=false)), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("proposerAccountId") proposerAccountId: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "abstainVotes", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "againstVotes", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "calldatas", + JsonB, + ~fieldSchema=S.json(~validate=false), + + + + + + ), + mkField( + "daoId", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "description", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "endBlock", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "endTimestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "forVotes", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "logIndex", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "proposalType", + Integer, + ~fieldSchema=S.null(S.int), + + ~isNullable, + + + + ), + mkField( + "proposerAccountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "signatures", + JsonB, + ~fieldSchema=S.json(~validate=false), + + + + + + ), + mkField( + "startBlock", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "status", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "targets", + JsonB, + ~fieldSchema=S.json(~validate=false), + + + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "title", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "txHash", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "values", + JsonB, + ~fieldSchema=S.json(~validate=false), + + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module Token = { + let name = (Token :> string) + let index = 8 + @genType + type t = { + cexSupply: bigint, + circulatingSupply: bigint, + decimals: int, + delegatedSupply: bigint, + dexSupply: bigint, + id: id, + lendingSupply: bigint, + name: option, + nonCirculatingSupply: bigint, + totalSupply: bigint, + treasury: bigint, + } + + let schema = S.object((s): t => { + cexSupply: s.field("cexSupply", BigInt.schema), + circulatingSupply: s.field("circulatingSupply", BigInt.schema), + decimals: s.field("decimals", S.int), + delegatedSupply: s.field("delegatedSupply", BigInt.schema), + dexSupply: s.field("dexSupply", BigInt.schema), + id: s.field("id", S.string), + lendingSupply: s.field("lendingSupply", BigInt.schema), + name: s.field("name", S.null(S.string)), + nonCirculatingSupply: s.field("nonCirculatingSupply", BigInt.schema), + totalSupply: s.field("totalSupply", BigInt.schema), + treasury: s.field("treasury", BigInt.schema), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "cexSupply", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "circulatingSupply", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "decimals", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "delegatedSupply", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "dexSupply", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "lendingSupply", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "name", + Text, + ~fieldSchema=S.null(S.string), + + ~isNullable, + + + + ), + mkField( + "nonCirculatingSupply", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "totalSupply", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "treasury", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module TokenPrice = { + let name = (TokenPrice :> string) + let index = 9 + @genType + type t = { + id: id, + price: bigint, + timestamp: bigint, + } + + let schema = S.object((s): t => { + id: s.field("id", S.string), + price: s.field("price", BigInt.schema), + timestamp: s.field("timestamp", BigInt.schema), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "price", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module Transaction = { + let name = (Transaction :> string) + let index = 10 + @genType + type t = { + fromAddress: option, + id: id, + isCex: bool, + isDex: bool, + isLending: bool, + isTotal: bool, + timestamp: bigint, + toAddress: option, + transactionHash: string, + } + + let schema = S.object((s): t => { + fromAddress: s.field("fromAddress", S.null(S.string)), + id: s.field("id", S.string), + isCex: s.field("isCex", S.bool), + isDex: s.field("isDex", S.bool), + isLending: s.field("isLending", S.bool), + isTotal: s.field("isTotal", S.bool), + timestamp: s.field("timestamp", BigInt.schema), + toAddress: s.field("toAddress", S.null(S.string)), + transactionHash: s.field("transactionHash", S.string), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "fromAddress", + Text, + ~fieldSchema=S.null(S.string), + + ~isNullable, + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "isCex", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isDex", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isLending", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isTotal", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "toAddress", + Text, + ~fieldSchema=S.null(S.string), + + ~isNullable, + + + + ), + mkField( + "transactionHash", + Text, + ~fieldSchema=S.string, + + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module Transfer = { + let name = (Transfer :> string) + let index = 11 + @genType + type t = { + amount: bigint, + daoId: string, + fromAccountId: string, + id: id, + isCex: bool, + isDex: bool, + isLending: bool, + isTotal: bool, + logIndex: int, + timestamp: bigint, + toAccountId: string, + tokenId: string, + transactionHash: string, + } + + let schema = S.object((s): t => { + amount: s.field("amount", BigInt.schema), + daoId: s.field("daoId", S.string), + fromAccountId: s.field("fromAccountId", S.string), + id: s.field("id", S.string), + isCex: s.field("isCex", S.bool), + isDex: s.field("isDex", S.bool), + isLending: s.field("isLending", S.bool), + isTotal: s.field("isTotal", S.bool), + logIndex: s.field("logIndex", S.int), + timestamp: s.field("timestamp", BigInt.schema), + toAccountId: s.field("toAccountId", S.string), + tokenId: s.field("tokenId", S.string), + transactionHash: s.field("transactionHash", S.string), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("amount") amount: whereOperations, + + @as("fromAccountId") fromAccountId: whereOperations, + + @as("timestamp") timestamp: whereOperations, + + @as("toAccountId") toAccountId: whereOperations, + + @as("tokenId") tokenId: whereOperations, + + @as("transactionHash") transactionHash: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "amount", + Numeric, + ~fieldSchema=BigInt.schema, + + + + ~isIndex, + + ), + mkField( + "daoId", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "fromAccountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "isCex", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isDex", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isLending", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "isTotal", + Boolean, + ~fieldSchema=S.bool, + + + + + + ), + mkField( + "logIndex", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + ~isIndex, + + ), + mkField( + "toAccountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "tokenId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "transactionHash", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module VoteOnchain = { + let name = (VoteOnchain :> string) + let index = 12 + @genType + type t = { + daoId: string, + id: id, + proposalId: string, + reason: option, + support: string, + timestamp: bigint, + txHash: string, + voterAccountId: string, + votingPower: bigint, + } + + let schema = S.object((s): t => { + daoId: s.field("daoId", S.string), + id: s.field("id", S.string), + proposalId: s.field("proposalId", S.string), + reason: s.field("reason", S.null(S.string)), + support: s.field("support", S.string), + timestamp: s.field("timestamp", BigInt.schema), + txHash: s.field("txHash", S.string), + voterAccountId: s.field("voterAccountId", S.string), + votingPower: s.field("votingPower", BigInt.schema), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("proposalId") proposalId: whereOperations, + + @as("voterAccountId") voterAccountId: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "daoId", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "proposalId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "reason", + Text, + ~fieldSchema=S.null(S.string), + + ~isNullable, + + + + ), + mkField( + "support", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "txHash", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "voterAccountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "votingPower", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +module VotingPowerHistory = { + let name = (VotingPowerHistory :> string) + let index = 13 + @genType + type t = { + accountId: string, + daoId: string, + delta: bigint, + deltaMod: bigint, + id: id, + logIndex: int, + timestamp: bigint, + transactionHash: string, + votingPower: bigint, + } + + let schema = S.object((s): t => { + accountId: s.field("accountId", S.string), + daoId: s.field("daoId", S.string), + delta: s.field("delta", BigInt.schema), + deltaMod: s.field("deltaMod", BigInt.schema), + id: s.field("id", S.string), + logIndex: s.field("logIndex", S.int), + timestamp: s.field("timestamp", BigInt.schema), + transactionHash: s.field("transactionHash", S.string), + votingPower: s.field("votingPower", BigInt.schema), + }) + + let rowsSchema = S.array(schema) + + @genType + type indexedFieldOperations = { + + @as("accountId") accountId: whereOperations, + + @as("transactionHash") transactionHash: whereOperations, + + } + + let table = mkTable( + (name :> string), + ~fields=[ + mkField( + "accountId", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "daoId", + Text, + ~fieldSchema=S.string, + + + + + + ), + mkField( + "delta", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "deltaMod", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "id", + Text, + ~fieldSchema=S.string, + ~isPrimaryKey, + + + + + ), + mkField( + "logIndex", + Integer, + ~fieldSchema=S.int, + + + + + + ), + mkField( + "timestamp", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + mkField( + "transactionHash", + Text, + ~fieldSchema=S.string, + + + + ~isIndex, + + ), + mkField( + "votingPower", + Numeric, + ~fieldSchema=BigInt.schema, + + + + + + ), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) + + external castToInternal: t => Internal.entity = "%identity" +} + +let userEntities = [ + module(Account), + module(AccountBalance), + module(AccountPower), + module(BalanceHistory), + module(DaoMetricsDayBucket), + module(Delegation), + module(FeedEvent), + module(ProposalOnchain), + module(Token), + module(TokenPrice), + module(Transaction), + module(Transfer), + module(VoteOnchain), + module(VotingPowerHistory), +]->entityModsToInternal + +let allEntities = + userEntities->Js.Array2.concat( + [module(InternalTable.DynamicContractRegistry)]->entityModsToInternal, + ) + +let byName = + allEntities + ->Js.Array2.map(entityConfig => { + (entityConfig.name, entityConfig) + }) + ->Js.Dict.fromArray diff --git a/apps/hypersync-indexer/generated/src/db/Enums.gen.ts b/apps/hypersync-indexer/generated/src/db/Enums.gen.ts new file mode 100644 index 000000000..37c23647f --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/Enums.gen.ts @@ -0,0 +1,41 @@ +/* TypeScript file generated from Enums.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +export type ContractType_t = "ENSGovernor" | "ENSToken"; + +export type EntityType_t = + "Account" + | "AccountBalance" + | "AccountPower" + | "BalanceHistory" + | "DaoMetricsDayBucket" + | "Delegation" + | "FeedEvent" + | "ProposalOnchain" + | "Token" + | "TokenPrice" + | "Transaction" + | "Transfer" + | "VoteOnchain" + | "VotingPowerHistory" + | "dynamic_contract_registry"; + +export type EventType_t = + "VOTE" + | "PROPOSAL" + | "PROPOSAL_EXTENDED" + | "DELEGATION" + | "DELEGATION_VOTES_CHANGED" + | "TRANSFER"; + +export type MetricType_t = + "total" + | "delegated" + | "cex" + | "dex" + | "lending" + | "circulating" + | "treasury" + | "non_circulating"; diff --git a/apps/hypersync-indexer/generated/src/db/Enums.res b/apps/hypersync-indexer/generated/src/db/Enums.res new file mode 100644 index 000000000..79f5532c2 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/Enums.res @@ -0,0 +1,108 @@ +module ContractType = { + @genType + type t = + | @as("ENSGovernor") ENSGovernor + | @as("ENSToken") ENSToken + + let name = "CONTRACT_TYPE" + let variants = [ + ENSGovernor, + ENSToken, + ] + let config = Internal.makeEnumConfig(~name, ~variants) +} + +module EntityType = { + @genType + type t = + | @as("Account") Account + | @as("AccountBalance") AccountBalance + | @as("AccountPower") AccountPower + | @as("BalanceHistory") BalanceHistory + | @as("DaoMetricsDayBucket") DaoMetricsDayBucket + | @as("Delegation") Delegation + | @as("FeedEvent") FeedEvent + | @as("ProposalOnchain") ProposalOnchain + | @as("Token") Token + | @as("TokenPrice") TokenPrice + | @as("Transaction") Transaction + | @as("Transfer") Transfer + | @as("VoteOnchain") VoteOnchain + | @as("VotingPowerHistory") VotingPowerHistory + | @as("dynamic_contract_registry") DynamicContractRegistry + + let name = "ENTITY_TYPE" + let variants = [ + Account, + AccountBalance, + AccountPower, + BalanceHistory, + DaoMetricsDayBucket, + Delegation, + FeedEvent, + ProposalOnchain, + Token, + TokenPrice, + Transaction, + Transfer, + VoteOnchain, + VotingPowerHistory, + DynamicContractRegistry, + ] + let config = Internal.makeEnumConfig(~name, ~variants) +} + +module EventType = { + @genType + type t = + | @as("VOTE") VOTE + | @as("PROPOSAL") PROPOSAL + | @as("PROPOSAL_EXTENDED") PROPOSAL_EXTENDED + | @as("DELEGATION") DELEGATION + | @as("DELEGATION_VOTES_CHANGED") DELEGATION_VOTES_CHANGED + | @as("TRANSFER") TRANSFER + + let name = "EventType" + let variants = [ + VOTE, + PROPOSAL, + PROPOSAL_EXTENDED, + DELEGATION, + DELEGATION_VOTES_CHANGED, + TRANSFER, + ] + let config = Internal.makeEnumConfig(~name, ~variants) +} + +module MetricType = { + @genType + type t = + | @as("total") Total + | @as("delegated") Delegated + | @as("cex") Cex + | @as("dex") Dex + | @as("lending") Lending + | @as("circulating") Circulating + | @as("treasury") Treasury + | @as("non_circulating") Non_circulating + + let name = "MetricType" + let variants = [ + Total, + Delegated, + Cex, + Dex, + Lending, + Circulating, + Treasury, + Non_circulating, + ] + let config = Internal.makeEnumConfig(~name, ~variants) +} + +let allEnums = ([ + ContractType.config->Internal.fromGenericEnumConfig, + EntityType.config->Internal.fromGenericEnumConfig, + EventType.config->Internal.fromGenericEnumConfig, + MetricType.config->Internal.fromGenericEnumConfig, +]) diff --git a/apps/hypersync-indexer/generated/src/db/Migrations.res b/apps/hypersync-indexer/generated/src/db/Migrations.res new file mode 100644 index 000000000..a293165c7 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/db/Migrations.res @@ -0,0 +1,57 @@ +let unsafe = Postgres.unsafe + +let deleteAllTables: unit => promise = async () => { + Logging.trace("Dropping all tables") + let query = ` + DO $$ + BEGIN + DROP SCHEMA IF EXISTS ${Env.Db.publicSchema} CASCADE; + CREATE SCHEMA ${Env.Db.publicSchema}; + GRANT ALL ON SCHEMA ${Env.Db.publicSchema} TO "${Env.Db.user}"; + GRANT ALL ON SCHEMA ${Env.Db.publicSchema} TO public; + END $$;` + + await Generated.codegenPersistence.sql->unsafe(query) +} + +type t +@module external process: t = "process" + +type exitCode = | @as(0) Success | @as(1) Failure +@send external exit: (t, exitCode) => unit = "exit" + +let runUpMigrations = async ( + ~shouldExit, + // Reset is used for db-setup + ~reset=false, +) => { + let config = Generated.configWithoutRegistrations + let exitCode = try { + await Generated.codegenPersistence->Persistence.init( + ~reset, + ~chainConfigs=config.chainMap->ChainMap.values, + ) + Success + } catch { + | _ => Failure + } + if shouldExit { + process->exit(exitCode) + } + exitCode +} + +let runDownMigrations = async (~shouldExit) => { + let exitCode = ref(Success) + await deleteAllTables()->Promise.catch(err => { + exitCode := Failure + err + ->ErrorHandling.make(~msg="EE804: Error dropping entity tables") + ->ErrorHandling.log + Promise.resolve() + }) + if shouldExit { + process->exit(exitCode.contents) + } + exitCode.contents +} diff --git a/apps/hypersync-indexer/generated/src/eventFetching/ChainFetcher.res b/apps/hypersync-indexer/generated/src/eventFetching/ChainFetcher.res new file mode 100644 index 000000000..312cb2a37 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/eventFetching/ChainFetcher.res @@ -0,0 +1,464 @@ +open Belt + +//A filter should return true if the event should be kept and isValid should return +//false when the filter should be removed/cleaned up +type processingFilter = { + filter: Internal.item => bool, + isValid: (~fetchState: FetchState.t) => bool, +} + +type t = { + logger: Pino.t, + fetchState: FetchState.t, + sourceManager: SourceManager.t, + chainConfig: Config.chain, + //The latest known block of the chain + currentBlockHeight: int, + isProgressAtHead: bool, + timestampCaughtUpToHeadOrEndblock: option, + committedProgressBlockNumber: int, + firstEventBlockNumber: option, + numEventsProcessed: int, + numBatchesFetched: int, + reorgDetection: ReorgDetection.t, + safeCheckpointTracking: option, +} + +//CONSTRUCTION +let make = ( + ~chainConfig: Config.chain, + ~dynamicContracts: array, + ~startBlock, + ~endBlock, + ~firstEventBlockNumber, + ~progressBlockNumber, + ~config: Config.t, + ~registrations: EventRegister.registrations, + ~targetBufferSize, + ~logger, + ~timestampCaughtUpToHeadOrEndblock, + ~numEventsProcessed, + ~numBatchesFetched, + ~isInReorgThreshold, + ~reorgCheckpoints: array, + ~maxReorgDepth, +): t => { + // We don't need the router itself, but only validation logic, + // since now event router is created for selection of events + // and validation doesn't work correctly in routers. + // Ideally to split it into two different parts. + let eventRouter = EventRouter.empty() + + // Aggregate events we want to fetch + let contracts = [] + let eventConfigs: array = [] + + let notRegisteredEvents = [] + + chainConfig.contracts->Array.forEach(contract => { + let contractName = contract.name + + contract.events->Array.forEach(eventConfig => { + let {isWildcard} = eventConfig + let hasContractRegister = eventConfig.contractRegister->Option.isSome + + // Should validate the events + eventRouter->EventRouter.addOrThrow( + eventConfig.id, + (), + ~contractName, + ~chain=ChainMap.Chain.makeUnsafe(~chainId=chainConfig.id), + ~eventName=eventConfig.name, + ~isWildcard, + ) + + // Filter out non-preRegistration events on preRegistration phase + // so we don't care about it in fetch state and workers anymore + let shouldBeIncluded = if config.enableRawEvents { + true + } else { + let isRegistered = hasContractRegister || eventConfig.handler->Option.isSome + if !isRegistered { + notRegisteredEvents->Array.push(eventConfig) + } + isRegistered + } + + if shouldBeIncluded { + eventConfigs->Array.push(eventConfig) + } + }) + + switch contract.startBlock { + | Some(startBlock) if startBlock < chainConfig.startBlock => + Js.Exn.raiseError( + `The start block for contract "${contractName}" is less than the chain start block. This is not supported yet.`, + ) + | _ => () + } + + contract.addresses->Array.forEach(address => { + contracts->Array.push({ + Internal.address, + contractName: contract.name, + startBlock: switch contract.startBlock { + | Some(startBlock) => startBlock + | None => chainConfig.startBlock + }, + registrationBlock: None, + }) + }) + }) + + dynamicContracts->Array.forEach(dc => contracts->Array.push(dc)) + + if notRegisteredEvents->Utils.Array.notEmpty { + logger->Logging.childInfo( + `The event${if notRegisteredEvents->Array.length > 1 { + "s" + } else { + "" + }} ${notRegisteredEvents + ->Array.map(eventConfig => `${eventConfig.contractName}.${eventConfig.name}`) + ->Js.Array2.joinWith(", ")} don't have an event handler and skipped for indexing.`, + ) + } + + let onBlockConfigs = + registrations.onBlockByChainId->Utils.Dict.dangerouslyGetNonOption(chainConfig.id->Int.toString) + switch onBlockConfigs { + | Some(onBlockConfigs) => + // TODO: Move it to the EventRegister module + // so the error is thrown with better stack trace + onBlockConfigs->Array.forEach(onBlockConfig => { + if onBlockConfig.startBlock->Option.getWithDefault(startBlock) < startBlock { + Js.Exn.raiseError( + `The start block for onBlock handler "${onBlockConfig.name}" is less than the chain start block (${startBlock->Belt.Int.toString}). This is not supported yet.`, + ) + } + switch endBlock { + | Some(chainEndBlock) => + if onBlockConfig.endBlock->Option.getWithDefault(chainEndBlock) > chainEndBlock { + Js.Exn.raiseError( + `The end block for onBlock handler "${onBlockConfig.name}" is greater than the chain end block (${chainEndBlock->Belt.Int.toString}). This is not supported yet.`, + ) + } + | None => () + } + }) + | None => () + } + + let fetchState = FetchState.make( + ~maxAddrInPartition=config.maxAddrInPartition, + ~contracts, + ~progressBlockNumber, + ~startBlock, + ~endBlock, + ~eventConfigs, + ~targetBufferSize, + ~chainId=chainConfig.id, + // FIXME: Shouldn't set with full history + ~blockLag=Pervasives.max( + !config.shouldRollbackOnReorg || isInReorgThreshold ? 0 : chainConfig.maxReorgDepth, + Env.indexingBlockLag->Option.getWithDefault(0), + ), + ~onBlockConfigs?, + ) + + let chainReorgCheckpoints = reorgCheckpoints->Array.keepMapU(reorgCheckpoint => { + if reorgCheckpoint.chainId === chainConfig.id { + Some(reorgCheckpoint) + } else { + None + } + }) + + { + logger, + chainConfig, + sourceManager: SourceManager.make( + ~sources=chainConfig.sources, + ~maxPartitionConcurrency=Env.maxPartitionConcurrency, + ), + reorgDetection: ReorgDetection.make( + ~chainReorgCheckpoints, + ~maxReorgDepth, + ~shouldRollbackOnReorg=config.shouldRollbackOnReorg, + ), + safeCheckpointTracking: SafeCheckpointTracking.make( + ~maxReorgDepth, + ~shouldRollbackOnReorg=config.shouldRollbackOnReorg, + ~chainReorgCheckpoints, + ), + currentBlockHeight: 0, + isProgressAtHead: false, + fetchState, + firstEventBlockNumber, + committedProgressBlockNumber: progressBlockNumber, + timestampCaughtUpToHeadOrEndblock, + numEventsProcessed, + numBatchesFetched, + } +} + +let makeFromConfig = (chainConfig: Config.chain, ~config, ~registrations, ~targetBufferSize) => { + let logger = Logging.createChild(~params={"chainId": chainConfig.id}) + + make( + ~chainConfig, + ~config, + ~registrations, + ~startBlock=chainConfig.startBlock, + ~endBlock=chainConfig.endBlock, + ~reorgCheckpoints=[], + ~maxReorgDepth=chainConfig.maxReorgDepth, + ~firstEventBlockNumber=None, + ~progressBlockNumber=-1, + ~timestampCaughtUpToHeadOrEndblock=None, + ~numEventsProcessed=0, + ~numBatchesFetched=0, + ~targetBufferSize, + ~logger, + ~dynamicContracts=[], + ~isInReorgThreshold=false, + ) +} + +/** + * This function allows a chain fetcher to be created from metadata, in particular this is useful for restarting an indexer and making sure it fetches blocks from the same place. + */ +let makeFromDbState = async ( + chainConfig: Config.chain, + ~resumedChainState: Persistence.initialChainState, + ~reorgCheckpoints, + ~isInReorgThreshold, + ~config, + ~registrations, + ~targetBufferSize, +) => { + let chainId = chainConfig.id + let logger = Logging.createChild(~params={"chainId": chainId}) + + Prometheus.ProgressEventsCount.set(~processedCount=resumedChainState.numEventsProcessed, ~chainId) + + let progressBlockNumber = + // Can be -1 when not set + resumedChainState.progressBlockNumber >= 0 + ? resumedChainState.progressBlockNumber + : resumedChainState.startBlock - 1 + + make( + ~dynamicContracts=resumedChainState.dynamicContracts, + ~chainConfig, + ~startBlock=resumedChainState.startBlock, + ~endBlock=resumedChainState.endBlock, + ~config, + ~registrations, + ~reorgCheckpoints, + ~maxReorgDepth=resumedChainState.maxReorgDepth, + ~firstEventBlockNumber=resumedChainState.firstEventBlockNumber, + ~progressBlockNumber, + ~timestampCaughtUpToHeadOrEndblock=Env.updateSyncTimeOnRestart + ? None + : resumedChainState.timestampCaughtUpToHeadOrEndblock, + ~numEventsProcessed=resumedChainState.numEventsProcessed, + ~numBatchesFetched=0, + ~logger, + ~targetBufferSize, + ~isInReorgThreshold, + ) +} + +/** + * Helper function to get the configured start block for a contract from config + */ +let getContractStartBlock = ( + config: Config.t, + ~chain: ChainMap.Chain.t, + ~contractName: string, +): option => { + let chainConfig = config.chainMap->ChainMap.get(chain) + chainConfig.contracts + ->Js.Array2.find(contract => contract.name === contractName) + ->Option.flatMap(contract => contract.startBlock) +} + +let runContractRegistersOrThrow = async ( + ~itemsWithContractRegister: array, + ~chain: ChainMap.Chain.t, + ~config: Config.t, +) => { + let itemsWithDcs = [] + + let onRegister = (~item: Internal.item, ~contractAddress, ~contractName) => { + let eventItem = item->Internal.castUnsafeEventItem + let {blockNumber} = eventItem + + // Use contract-specific start block if configured, otherwise fall back to registration block + let contractStartBlock = switch getContractStartBlock( + config, + ~chain, + ~contractName=(contractName: Enums.ContractType.t :> string), + ) { + | Some(configuredStartBlock) => configuredStartBlock + | None => blockNumber + } + + let dc: Internal.indexingContract = { + address: contractAddress, + contractName: (contractName: Enums.ContractType.t :> string), + startBlock: contractStartBlock, + registrationBlock: Some(blockNumber), + } + + switch item->Internal.getItemDcs { + | None => { + item->Internal.setItemDcs([dc]) + itemsWithDcs->Array.push(item) + } + | Some(dcs) => dcs->Array.push(dc) + } + } + + let promises = [] + for idx in 0 to itemsWithContractRegister->Array.length - 1 { + let item = itemsWithContractRegister->Array.getUnsafe(idx) + let eventItem = item->Internal.castUnsafeEventItem + let contractRegister = switch eventItem { + | {eventConfig: {contractRegister: Some(contractRegister)}} => contractRegister + | {eventConfig: {contractRegister: None, name: eventName}} => + // Unexpected case, since we should pass only events with contract register to this function + Js.Exn.raiseError("Contract register is not set for event " ++ eventName) + } + + let errorMessage = "Event contractRegister failed, please fix the error to keep the indexer running smoothly" + + // Catch sync and async errors + try { + let params: UserContext.contractRegisterParams = { + item, + onRegister, + config, + isResolved: false, + } + let result = contractRegister(UserContext.getContractRegisterArgs(params)) + + // Even though `contractRegister` always returns a promise, + // in the ReScript type, but it might return a non-promise value for TS API. + if result->Promise.isCatchable { + promises->Array.push( + result + ->Promise.thenResolve(r => { + params.isResolved = true + r + }) + ->Promise.catch(exn => { + params.isResolved = true + exn->ErrorHandling.mkLogAndRaise(~msg=errorMessage, ~logger=item->Logging.getItemLogger) + }), + ) + } else { + params.isResolved = true + } + } catch { + | exn => + exn->ErrorHandling.mkLogAndRaise(~msg=errorMessage, ~logger=item->Logging.getItemLogger) + } + } + + if promises->Utils.Array.notEmpty { + let _ = await Promise.all(promises) + } + + itemsWithDcs +} + +let handleQueryResult = ( + chainFetcher: t, + ~query: FetchState.query, + ~newItems, + ~newItemsWithDcs, + ~latestFetchedBlock, +) => { + let fs = switch newItemsWithDcs { + | [] => chainFetcher.fetchState + | _ => chainFetcher.fetchState->FetchState.registerDynamicContracts(newItemsWithDcs) + } + + fs + ->FetchState.handleQueryResult(~query, ~latestFetchedBlock, ~newItems) + ->Result.map(fs => { + ...chainFetcher, + fetchState: fs, + }) +} + +/** +Gets the latest item on the front of the queue and returns updated fetcher +*/ +let hasProcessedToEndblock = (self: t) => { + let {committedProgressBlockNumber, fetchState} = self + switch fetchState.endBlock { + | Some(endBlock) => committedProgressBlockNumber >= endBlock + | None => false + } +} + +let hasNoMoreEventsToProcess = (self: t) => { + self.fetchState->FetchState.bufferSize === 0 +} + +let getHighestBlockBelowThreshold = (cf: t): int => { + let highestBlockBelowThreshold = cf.currentBlockHeight - cf.chainConfig.maxReorgDepth + highestBlockBelowThreshold < 0 ? 0 : highestBlockBelowThreshold +} + +/** +Finds the last known valid block number below the reorg block +If not found, returns the highest block below threshold +*/ +let getLastKnownValidBlock = async ( + chainFetcher: t, + ~reorgBlockNumber: int, + //Parameter used for dependency injecting in tests + ~getBlockHashes=(chainFetcher.sourceManager->SourceManager.getActiveSource).getBlockHashes, +) => { + // Improtant: It's important to not include the reorg detection block number + // because there might be different instances of the source + // with mismatching hashes between them. + // So we MUST always rollback the block number where we detected a reorg. + let scannedBlockNumbers = + chainFetcher.reorgDetection->ReorgDetection.getThresholdBlockNumbersBelowBlock( + ~blockNumber=reorgBlockNumber, + ~currentBlockHeight=chainFetcher.currentBlockHeight, + ) + + let getBlockHashes = blockNumbers => { + getBlockHashes(~blockNumbers, ~logger=chainFetcher.logger)->Promise.thenResolve(res => + switch res { + | Ok(v) => v + | Error(exn) => + exn->ErrorHandling.mkLogAndRaise( + ~msg="Failed to fetch blockHashes for given blockNumbers during rollback", + ) + } + ) + } + + switch scannedBlockNumbers { + | [] => chainFetcher->getHighestBlockBelowThreshold + | _ => { + let blockNumbersAndHashes = await getBlockHashes(scannedBlockNumbers) + + switch chainFetcher.reorgDetection->ReorgDetection.getLatestValidScannedBlock( + ~blockNumbersAndHashes, + ) { + | Some(blockNumber) => blockNumber + | None => chainFetcher->getHighestBlockBelowThreshold + } + } + } +} + +let isActivelyIndexing = (chainFetcher: t) => chainFetcher.fetchState->FetchState.isActivelyIndexing diff --git a/apps/hypersync-indexer/generated/src/eventFetching/ChainManager.res b/apps/hypersync-indexer/generated/src/eventFetching/ChainManager.res new file mode 100644 index 000000000..11bce6b59 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/eventFetching/ChainManager.res @@ -0,0 +1,174 @@ +open Belt + +type t = { + committedCheckpointId: int, + chainFetchers: ChainMap.t, + multichain: Config.multichain, + isInReorgThreshold: bool, +} + +let calculateTargetBufferSize = (~activeChainsCount, ~config: Config.t) => { + let targetBatchesInBuffer = 3 + switch Env.targetBufferSize { + | Some(size) => size + | None => + config.batchSize * (activeChainsCount > targetBatchesInBuffer ? 1 : targetBatchesInBuffer) + } +} + +let makeFromConfig = (~config: Config.t, ~registrations): t => { + let targetBufferSize = calculateTargetBufferSize( + ~activeChainsCount=config.chainMap->ChainMap.size, + ~config, + ) + let chainFetchers = + config.chainMap->ChainMap.map( + ChainFetcher.makeFromConfig(_, ~config, ~registrations, ~targetBufferSize), + ) + { + committedCheckpointId: 0, + chainFetchers, + multichain: config.multichain, + isInReorgThreshold: false, + } +} + +let makeFromDbState = async ( + ~initialState: Persistence.initialState, + ~config: Config.t, + ~registrations, + ~persistence: Persistence.t, +): t => { + let isInReorgThreshold = if initialState.cleanRun { + false + } else { + // TODO: Move to Persistence.initialState + // Since now it's possible not to have rows in the history table + // even after the indexer started saving history (entered reorg threshold), + // This rows check might incorrectly return false for recovering the isInReorgThreshold option. + // But this is not a problem. There's no history anyways, and the indexer will be able to + // correctly calculate isInReorgThreshold as it starts. + let hasStartedSavingHistory = await persistence.sql->DbFunctions.EntityHistory.hasRows + + //If we have started saving history, continue to save history + //as regardless of whether we are still in a reorg threshold + hasStartedSavingHistory + } + + let targetBufferSize = calculateTargetBufferSize( + ~activeChainsCount=initialState.chains->Array.length, + ~config, + ) + Prometheus.ProcessingMaxBatchSize.set(~maxBatchSize=config.batchSize) + Prometheus.IndexingTargetBufferSize.set(~targetBufferSize) + Prometheus.ReorgThreshold.set(~isInReorgThreshold) + initialState.cache->Utils.Dict.forEach(({effectName, count}) => { + Prometheus.EffectCacheCount.set(~count, ~effectName) + }) + + let chainFetchersArr = + await initialState.chains + ->Array.map(async (resumedChainState: Persistence.initialChainState) => { + let chain = Config.getChain(config, ~chainId=resumedChainState.id) + let chainConfig = config.chainMap->ChainMap.get(chain) + + ( + chain, + await chainConfig->ChainFetcher.makeFromDbState( + ~resumedChainState, + ~reorgCheckpoints=initialState.reorgCheckpoints, + ~isInReorgThreshold, + ~targetBufferSize, + ~config, + ~registrations, + ), + ) + }) + ->Promise.all + + let chainFetchers = ChainMap.fromArrayUnsafe(chainFetchersArr) + + { + committedCheckpointId: initialState.checkpointId, + multichain: config.multichain, + chainFetchers, + isInReorgThreshold, + } +} + +let getChainFetcher = (chainManager: t, ~chain: ChainMap.Chain.t): ChainFetcher.t => { + chainManager.chainFetchers->ChainMap.get(chain) +} + +let setChainFetcher = (chainManager: t, chainFetcher: ChainFetcher.t) => { + { + ...chainManager, + chainFetchers: chainManager.chainFetchers->ChainMap.set( + ChainMap.Chain.makeUnsafe(~chainId=chainFetcher.chainConfig.id), + chainFetcher, + ), + } +} + +let nextItemIsNone = (chainManager: t): bool => { + !Batch.hasMultichainReadyItem( + chainManager.chainFetchers->ChainMap.map(cf => { + cf.fetchState + }), + ~multichain=chainManager.multichain, + ) +} + +let createBatch = (chainManager: t, ~batchSizeTarget: int): Batch.t => { + Batch.make( + ~checkpointIdBeforeBatch=chainManager.committedCheckpointId, + ~chainsBeforeBatch=chainManager.chainFetchers->ChainMap.map((cf): Batch.chainBeforeBatch => { + fetchState: cf.fetchState, + progressBlockNumber: cf.committedProgressBlockNumber, + totalEventsProcessed: cf.numEventsProcessed, + sourceBlockNumber: cf.currentBlockHeight, + reorgDetection: cf.reorgDetection, + }), + ~multichain=chainManager.multichain, + ~batchSizeTarget, + ) +} + +let isProgressAtHead = chainManager => + chainManager.chainFetchers + ->ChainMap.values + ->Js.Array2.every(cf => cf.isProgressAtHead) + +let isActivelyIndexing = chainManager => + chainManager.chainFetchers + ->ChainMap.values + ->Js.Array2.every(ChainFetcher.isActivelyIndexing) + +let getSafeCheckpointId = (chainManager: t) => { + let chainFetchers = chainManager.chainFetchers->ChainMap.values + + let infinity = (%raw(`Infinity`): int) + let result = ref(infinity) + + for idx in 0 to chainFetchers->Array.length - 1 { + let chainFetcher = chainFetchers->Array.getUnsafe(idx) + switch chainFetcher.safeCheckpointTracking { + | None => () // Skip chains with maxReorgDepth = 0 + | Some(safeCheckpointTracking) => { + let safeCheckpointId = + safeCheckpointTracking->SafeCheckpointTracking.getSafeCheckpointId( + ~sourceBlockNumber=chainFetcher.currentBlockHeight, + ) + if safeCheckpointId < result.contents { + result := safeCheckpointId + } + } + } + } + + if result.contents === infinity || result.contents === 0 { + None // No safe checkpoint found + } else { + Some(result.contents) + } +} diff --git a/apps/hypersync-indexer/generated/src/eventFetching/NetworkSources.res b/apps/hypersync-indexer/generated/src/eventFetching/NetworkSources.res new file mode 100644 index 000000000..b402c2301 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/eventFetching/NetworkSources.res @@ -0,0 +1,95 @@ +open Belt + +type rpc = { + url: string, + sourceFor: Source.sourceFor, + syncConfig?: Config.sourceSyncOptions, +} + +let getSyncConfig = ( + { + ?initialBlockInterval, + ?backoffMultiplicative, + ?accelerationAdditive, + ?intervalCeiling, + ?backoffMillis, + ?queryTimeoutMillis, + ?fallbackStallTimeout, + }: Config.sourceSyncOptions, +): Config.sourceSync => { + let queryTimeoutMillis = queryTimeoutMillis->Option.getWithDefault(20_000) + { + initialBlockInterval: Env.Configurable.SyncConfig.initialBlockInterval->Option.getWithDefault( + initialBlockInterval->Option.getWithDefault(10_000), + ), + // After an RPC error, how much to scale back the number of blocks requested at once + backoffMultiplicative: Env.Configurable.SyncConfig.backoffMultiplicative->Option.getWithDefault( + backoffMultiplicative->Option.getWithDefault(0.8), + ), + // Without RPC errors or timeouts, how much to increase the number of blocks requested by for the next batch + accelerationAdditive: Env.Configurable.SyncConfig.accelerationAdditive->Option.getWithDefault( + accelerationAdditive->Option.getWithDefault(500), + ), + // Do not further increase the block interval past this limit + intervalCeiling: Env.Configurable.SyncConfig.intervalCeiling->Option.getWithDefault( + intervalCeiling->Option.getWithDefault(10_000), + ), + // After an error, how long to wait before retrying + backoffMillis: backoffMillis->Option.getWithDefault(5000), + // How long to wait before cancelling an RPC request + queryTimeoutMillis, + fallbackStallTimeout: fallbackStallTimeout->Option.getWithDefault(queryTimeoutMillis / 2), + } +} + +let evm = ( + ~chain, + ~contracts: array, + ~hyperSync, + ~allEventSignatures, + ~shouldUseHypersyncClientDecoder, + ~rpcs: array, + ~lowercaseAddresses, +) => { + let eventRouter = + contracts + ->Belt.Array.flatMap(contract => contract.events) + ->EventRouter.fromEvmEventModsOrThrow(~chain) + + let sources = switch hyperSync { + | Some(endpointUrl) => [ + HyperSyncSource.make({ + chain, + contracts, + endpointUrl, + allEventSignatures, + eventRouter, + shouldUseHypersyncClientDecoder, + apiToken: Env.envioApiToken, + clientMaxRetries: Env.hyperSyncClientMaxRetries, + clientTimeoutMillis: Env.hyperSyncClientTimeoutMillis, + lowercaseAddresses, + serializationFormat: Env.hypersyncClientSerializationFormat, + enableQueryCaching: Env.hypersyncClientEnableQueryCaching, + }), + ] + | _ => [] + } + rpcs->Js.Array2.forEach(({?syncConfig, url, sourceFor}) => { + let _ = sources->Js.Array2.push( + RpcSource.make({ + chain, + sourceFor, + contracts, + syncConfig: getSyncConfig(syncConfig->Option.getWithDefault({})), + url, + eventRouter, + allEventSignatures, + shouldUseHypersyncClientDecoder, + lowercaseAddresses, + }), + ) + }) + + sources +} diff --git a/apps/hypersync-indexer/generated/src/globalState/GlobalState.res b/apps/hypersync-indexer/generated/src/globalState/GlobalState.res new file mode 100644 index 000000000..030aff31b --- /dev/null +++ b/apps/hypersync-indexer/generated/src/globalState/GlobalState.res @@ -0,0 +1,1188 @@ +open Belt + +type chain = ChainMap.Chain.t +type rollbackState = + | NoRollback + | ReorgDetected({chain: chain, blockNumber: int}) + | FindingReorgDepth + | FoundReorgDepth({chain: chain, rollbackTargetBlockNumber: int}) + | RollbackReady({diffInMemoryStore: InMemoryStore.t, eventsProcessedDiffByChain: dict}) + +module WriteThrottlers = { + type t = { + chainMetaData: Throttler.t, + pruneStaleEntityHistory: Throttler.t, + } + let make = (): t => { + let chainMetaData = { + let intervalMillis = Env.ThrottleWrites.chainMetadataIntervalMillis + let logger = Logging.createChild( + ~params={ + "context": "Throttler for chain metadata writes", + "intervalMillis": intervalMillis, + }, + ) + Throttler.make(~intervalMillis, ~logger) + } + + let pruneStaleEntityHistory = { + let intervalMillis = Env.ThrottleWrites.pruneStaleDataIntervalMillis + let logger = Logging.createChild( + ~params={ + "context": "Throttler for pruning stale entity history data", + "intervalMillis": intervalMillis, + }, + ) + Throttler.make(~intervalMillis, ~logger) + } + {chainMetaData, pruneStaleEntityHistory} + } +} + +type t = { + indexer: Indexer.t, + chainManager: ChainManager.t, + processedBatches: int, + currentlyProcessingBatch: bool, + rollbackState: rollbackState, + indexerStartTime: Js.Date.t, + writeThrottlers: WriteThrottlers.t, + loadManager: LoadManager.t, + keepProcessAlive: bool, + //Initialized as 0, increments, when rollbacks occur to invalidate + //responses based on the wrong stateId + id: int, +} + +let make = ( + ~indexer: Indexer.t, + ~chainManager: ChainManager.t, + ~isDevelopmentMode=false, + ~shouldUseTui=false, +) => { + { + indexer, + currentlyProcessingBatch: false, + processedBatches: 0, + chainManager, + indexerStartTime: Js.Date.make(), + rollbackState: NoRollback, + writeThrottlers: WriteThrottlers.make(), + loadManager: LoadManager.make(), + keepProcessAlive: isDevelopmentMode || shouldUseTui, + id: 0, + } +} + +let getId = self => self.id +let incrementId = self => {...self, id: self.id + 1} +let setChainManager = (self, chainManager) => { + ...self, + chainManager, +} + +let isPreparingRollback = state => + switch state.rollbackState { + | NoRollback + | // We already updated fetch states here + // so we treat it as not rolling back + RollbackReady(_) => false + | FindingReorgDepth + | ReorgDetected(_) + | FoundReorgDepth(_) => true + } + +type partitionQueryResponse = { + chain: chain, + response: Source.blockRangeFetchResponse, + query: FetchState.query, +} + +type shouldExit = ExitWithSuccess | NoExit + +// Need to dispatch an action for every async operation +// to get access to the latest state. +type action = + // After a response is received, we validate it with the new state + // if there's no reorg to continue processing the response. + | ValidatePartitionQueryResponse(partitionQueryResponse) + // This should be a separate action from ValidatePartitionQueryResponse + // because when processing the response, there might be an async contract registration. + // So after it's finished we dispatch the submit action to get the latest fetch state. + | SubmitPartitionQueryResponse({ + newItems: array, + newItemsWithDcs: array, + currentBlockHeight: int, + latestFetchedBlock: FetchState.blockNumberAndTimestamp, + query: FetchState.query, + chain: chain, + }) + | FinishWaitingForNewBlock({chain: chain, currentBlockHeight: int}) + | EventBatchProcessed({batch: Batch.t}) + | StartProcessingBatch + | StartFindingReorgDepth + | FindReorgDepth({chain: chain, rollbackTargetBlockNumber: int}) + | EnterReorgThreshold + | UpdateQueues({ + progressedChainsById: dict, + // Needed to prevent overwriting the blockLag + // set by EnterReorgThreshold + shouldEnterReorgThreshold: bool, + }) + | SuccessExit + | ErrorExit(ErrorHandling.t) + | SetRollbackState({ + diffInMemoryStore: InMemoryStore.t, + rollbackedChainManager: ChainManager.t, + eventsProcessedDiffByChain: dict, + }) + +type queryChain = CheckAllChains | Chain(chain) +type task = + | NextQuery(queryChain) + | ProcessPartitionQueryResponse(partitionQueryResponse) + | ProcessEventBatch + | UpdateChainMetaDataAndCheckForExit(shouldExit) + | Rollback + | PruneStaleEntityHistory + +let updateChainFetcherCurrentBlockHeight = (chainFetcher: ChainFetcher.t, ~currentBlockHeight) => { + if currentBlockHeight > chainFetcher.currentBlockHeight { + Prometheus.setSourceChainHeight( + ~blockNumber=currentBlockHeight, + ~chainId=chainFetcher.chainConfig.id, + ) + + { + ...chainFetcher, + currentBlockHeight, + } + } else { + chainFetcher + } +} + +let updateChainMetadataTable = ( + cm: ChainManager.t, + ~persistence: Persistence.t, + ~throttler: Throttler.t, +) => { + let chainsData: dict = Js.Dict.empty() + + cm.chainFetchers + ->ChainMap.values + ->Belt.Array.forEach(cf => { + chainsData->Js.Dict.set( + cf.chainConfig.id->Belt.Int.toString, + { + blockHeight: cf.currentBlockHeight, + firstEventBlockNumber: cf.firstEventBlockNumber->Js.Null.fromOption, + isHyperSync: (cf.sourceManager->SourceManager.getActiveSource).poweredByHyperSync, + latestFetchedBlockNumber: cf.fetchState->FetchState.bufferBlockNumber, + timestampCaughtUpToHeadOrEndblock: cf.timestampCaughtUpToHeadOrEndblock->Js.Null.fromOption, + numBatchesFetched: cf.numBatchesFetched, + }, + ) + }) + + //Don't await this set, it can happen in its own time + throttler->Throttler.schedule(() => + persistence.sql + ->InternalTable.Chains.setMeta(~pgSchema=Db.publicSchema, ~chainsData) + ->Promise.ignoreValue + ) +} + +/** +Takes in a chain manager and sets all chains timestamp caught up to head +when valid state lines up and returns an updated chain manager +*/ +let updateProgressedChains = (chainManager: ChainManager.t, ~batch: Batch.t) => { + Prometheus.ProgressBatchCount.increment() + + let nextQueueItemIsNone = chainManager->ChainManager.nextItemIsNone + + let allChainsAtHead = chainManager->ChainManager.isProgressAtHead + //Update the timestampCaughtUpToHeadOrEndblock values + let chainFetchers = chainManager.chainFetchers->ChainMap.map(cf => { + let chain = ChainMap.Chain.makeUnsafe(~chainId=cf.chainConfig.id) + + let maybeChainAfterBatch = + batch.progressedChainsById->Utils.Dict.dangerouslyGetByIntNonOption( + chain->ChainMap.Chain.toChainId, + ) + + let cf = switch maybeChainAfterBatch { + | Some(chainAfterBatch) => { + if cf.committedProgressBlockNumber !== chainAfterBatch.progressBlockNumber { + Prometheus.ProgressBlockNumber.set( + ~blockNumber=chainAfterBatch.progressBlockNumber, + ~chainId=chain->ChainMap.Chain.toChainId, + ) + } + if cf.numEventsProcessed !== chainAfterBatch.totalEventsProcessed { + Prometheus.ProgressEventsCount.set( + ~processedCount=chainAfterBatch.totalEventsProcessed, + ~chainId=chain->ChainMap.Chain.toChainId, + ) + } + + // Calculate and set latency metrics + switch batch->Batch.findLastEventItem(~chainId=chain->ChainMap.Chain.toChainId) { + | Some(eventItem) => { + let blockTimestamp = eventItem.event.block->Types.Block.getTimestamp + let currentTimeMs = Js.Date.now()->Float.toInt + let blockTimestampMs = blockTimestamp * 1000 + let latencyMs = currentTimeMs - blockTimestampMs + + Prometheus.ProgressLatency.set(~latencyMs, ~chainId=chain->ChainMap.Chain.toChainId) + } + | None => () + } + + { + ...cf, + // Since we process per chain always in order, + // we need to calculate it once, by using the first item in a batch + firstEventBlockNumber: switch cf.firstEventBlockNumber { + | Some(_) => cf.firstEventBlockNumber + | None => batch->Batch.findFirstEventBlockNumber(~chainId=chain->ChainMap.Chain.toChainId) + }, + committedProgressBlockNumber: chainAfterBatch.progressBlockNumber, + numEventsProcessed: chainAfterBatch.totalEventsProcessed, + isProgressAtHead: cf.isProgressAtHead || chainAfterBatch.isProgressAtHeadWhenBatchCreated, + safeCheckpointTracking: switch cf.safeCheckpointTracking { + | Some(safeCheckpointTracking) => + Some( + safeCheckpointTracking->SafeCheckpointTracking.updateOnNewBatch( + ~sourceBlockNumber=cf.currentBlockHeight, + ~chainId=chain->ChainMap.Chain.toChainId, + ~batchCheckpointIds=batch.checkpointIds, + ~batchCheckpointBlockNumbers=batch.checkpointBlockNumbers, + ~batchCheckpointChainIds=batch.checkpointChainIds, + ), + ) + | None => None + }, + } + } + | None => cf + } + + /* strategy for TUI synced status: + * Firstly -> only update synced status after batch is processed (not on batch creation). But also set when a batch tries to be created and there is no batch + * + * Secondly -> reset timestampCaughtUpToHead and isFetching at head when dynamic contracts get registered to a chain if they are not within 0.001 percent of the current block height + * + * New conditions for valid synced: + * + * CASE 1 (chains are being synchronised at the head) + * + * All chain fetchers are fetching at the head AND + * No events that can be processed on the queue (even if events still exist on the individual queues) + * CASE 2 (chain finishes earlier than any other chain) + * + * CASE 3 endblock has been reached and latest processed block is greater than or equal to endblock (both fields must be Some) + * + * The given chain fetcher is fetching at the head or latest processed block >= endblock + * The given chain has processed all events on the queue + * see https://github.com/Float-Capital/indexer/pull/1388 */ + if cf->ChainFetcher.hasProcessedToEndblock { + // in the case this is already set, don't reset and instead propagate the existing value + let timestampCaughtUpToHeadOrEndblock = + cf.timestampCaughtUpToHeadOrEndblock->Option.isSome + ? cf.timestampCaughtUpToHeadOrEndblock + : Js.Date.make()->Some + { + ...cf, + timestampCaughtUpToHeadOrEndblock, + } + } else if cf.timestampCaughtUpToHeadOrEndblock->Option.isNone && cf.isProgressAtHead { + //Only calculate and set timestampCaughtUpToHeadOrEndblock if chain fetcher is at the head and + //its not already set + //CASE1 + //All chains are caught up to head chainManager queue returns None + //Meaning we are busy synchronizing chains at the head + if nextQueueItemIsNone && allChainsAtHead { + { + ...cf, + timestampCaughtUpToHeadOrEndblock: Js.Date.make()->Some, + } + } else { + //CASE2 -> Only calculate if case1 fails + //All events have been processed on the chain fetchers queue + //Other chains may be busy syncing + let hasNoMoreEventsToProcess = cf->ChainFetcher.hasNoMoreEventsToProcess + + if hasNoMoreEventsToProcess { + { + ...cf, + timestampCaughtUpToHeadOrEndblock: Js.Date.make()->Some, + } + } else { + //Default to just returning cf + cf + } + } + } else { + //Default to just returning cf + cf + } + }) + + let allChainsSyncedAtHead = + chainFetchers + ->ChainMap.values + ->Array.every(cf => cf.timestampCaughtUpToHeadOrEndblock->Option.isSome) + + if allChainsSyncedAtHead { + Prometheus.setAllChainsSyncedToHead() + } + + { + ...chainManager, + committedCheckpointId: switch batch.checkpointIds->Utils.Array.last { + | Some(checkpointId) => checkpointId + | None => chainManager.committedCheckpointId + }, + chainFetchers, + } +} + +let validatePartitionQueryResponse = ( + state, + {chain, response, query} as partitionQueryResponse: partitionQueryResponse, +) => { + let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(chain) + let { + parsedQueueItems, + latestFetchedBlockNumber, + stats, + currentBlockHeight, + reorgGuard, + fromBlockQueried, + } = response + + if currentBlockHeight > chainFetcher.currentBlockHeight { + Prometheus.SourceHeight.set( + ~blockNumber=currentBlockHeight, + ~chainId=chainFetcher.chainConfig.id, + // The currentBlockHeight from response won't necessarily + // belong to the currently active source. + // But for simplicity, assume it does. + ~sourceName=(chainFetcher.sourceManager->SourceManager.getActiveSource).name, + ) + } + + if Env.Benchmark.shouldSaveData { + Benchmark.addBlockRangeFetched( + ~totalTimeElapsed=stats.totalTimeElapsed, + ~parsingTimeElapsed=stats.parsingTimeElapsed->Belt.Option.getWithDefault(0), + ~pageFetchTime=stats.pageFetchTime->Belt.Option.getWithDefault(0), + ~chainId=chain->ChainMap.Chain.toChainId, + ~fromBlock=fromBlockQueried, + ~toBlock=latestFetchedBlockNumber, + ~numEvents=parsedQueueItems->Array.length, + ~numAddresses=query.addressesByContractName->FetchState.addressesByContractNameCount, + ~queryName=switch query { + | {target: Merge(_)} => `Merge Query` + | {selection: {dependsOnAddresses: false}} => `Wildcard Query` + | {selection: {dependsOnAddresses: true}} => `Normal Query` + }, + ) + } + + let (updatedReorgDetection, reorgResult: ReorgDetection.reorgResult) = + chainFetcher.reorgDetection->ReorgDetection.registerReorgGuard(~reorgGuard, ~currentBlockHeight) + + let updatedChainFetcher = { + ...chainFetcher, + reorgDetection: updatedReorgDetection, + } + + let nextState = { + ...state, + chainManager: { + ...state.chainManager, + chainFetchers: state.chainManager.chainFetchers->ChainMap.set(chain, updatedChainFetcher), + }, + } + + let rollbackWithReorgDetectedBlockNumber = switch reorgResult { + | ReorgDetected(reorgDetected) => { + chainFetcher.logger->Logging.childInfo( + reorgDetected->ReorgDetection.reorgDetectedToLogParams( + ~shouldRollbackOnReorg=state.indexer.config.shouldRollbackOnReorg, + ), + ) + Prometheus.ReorgCount.increment(~chain) + Prometheus.ReorgDetectionBlockNumber.set( + ~blockNumber=reorgDetected.scannedBlock.blockNumber, + ~chain, + ) + if state.indexer.config.shouldRollbackOnReorg { + Some(reorgDetected.scannedBlock.blockNumber) + } else { + None + } + } + | NoReorg => None + } + + switch rollbackWithReorgDetectedBlockNumber { + | None => (nextState, [ProcessPartitionQueryResponse(partitionQueryResponse)]) + | Some(reorgDetectedBlockNumber) => { + let chainManager = switch state.rollbackState { + | RollbackReady({eventsProcessedDiffByChain}) => { + ...state.chainManager, + // Restore event counters for ALL chains, not just the reorg chain. + // The previous rollback subtracted from all chains' counters, + // but was never committed to DB. So we must undo the subtraction + // for every chain before the new rollback subtracts again. + chainFetchers: state.chainManager.chainFetchers->ChainMap.mapWithKey((c, chainFetcher) => { + switch eventsProcessedDiffByChain->Utils.Dict.dangerouslyGetByIntNonOption( + c->ChainMap.Chain.toChainId, + ) { + | Some(eventsProcessedDiff) => { + ...chainFetcher, + // Since we detected a reorg, until rollback wasn't completed in the db + // We return the events processed counter to the pre-rollback value, + // to decrease it once more for the new rollback. + numEventsProcessed: chainFetcher.numEventsProcessed + eventsProcessedDiff, + } + | None => chainFetcher + } + }), + } + | _ => state.chainManager + } + ( + { + ...nextState->incrementId, + chainManager, + rollbackState: ReorgDetected({ + chain, + blockNumber: reorgDetectedBlockNumber, + }), + }, + [Rollback], + ) + } + } +} + +let submitPartitionQueryResponse = ( + state, + ~newItems, + ~newItemsWithDcs, + ~currentBlockHeight, + ~latestFetchedBlock, + ~query, + ~chain, +) => { + let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(chain) + + let updatedChainFetcher = + chainFetcher + ->ChainFetcher.handleQueryResult(~query, ~latestFetchedBlock, ~newItems, ~newItemsWithDcs) + ->Utils.unwrapResultExn + ->updateChainFetcherCurrentBlockHeight(~currentBlockHeight) + + let updatedChainFetcher = { + ...updatedChainFetcher, + numBatchesFetched: updatedChainFetcher.numBatchesFetched + 1, + } + + if !chainFetcher.isProgressAtHead && updatedChainFetcher.isProgressAtHead { + updatedChainFetcher.logger->Logging.childInfo("All events have been fetched") + } + + let nextState = { + ...state, + chainManager: { + ...state.chainManager, + chainFetchers: state.chainManager.chainFetchers->ChainMap.set(chain, updatedChainFetcher), + }, + } + + ( + nextState, + [UpdateChainMetaDataAndCheckForExit(NoExit), ProcessEventBatch, NextQuery(Chain(chain))], + ) +} + +let processPartitionQueryResponse = async ( + state, + {chain, response, query}: partitionQueryResponse, + ~dispatchAction, +) => { + let { + parsedQueueItems, + latestFetchedBlockNumber, + currentBlockHeight, + latestFetchedBlockTimestamp, + } = response + + let itemsWithContractRegister = [] + let newItems = [] + + for idx in 0 to parsedQueueItems->Array.length - 1 { + let item = parsedQueueItems->Array.getUnsafe(idx) + let eventItem = item->Internal.castUnsafeEventItem + if eventItem.eventConfig.contractRegister !== None { + itemsWithContractRegister->Array.push(item) + } + + // TODO: Don't really need to keep it in the queue + // when there's no handler (besides raw_events, processed counter, and dcsToStore consuming) + newItems->Array.push(item) + } + + let newItemsWithDcs = switch itemsWithContractRegister { + | [] as empty => empty + | _ => + await ChainFetcher.runContractRegistersOrThrow( + ~itemsWithContractRegister, + ~chain, + ~config=state.indexer.config, + ) + } + + dispatchAction( + SubmitPartitionQueryResponse({ + newItems, + newItemsWithDcs, + currentBlockHeight, + latestFetchedBlock: { + blockNumber: latestFetchedBlockNumber, + blockTimestamp: latestFetchedBlockTimestamp, + }, + chain, + query, + }), + ) +} + +let updateChainFetcher = (chainFetcherUpdate, ~state, ~chain) => { + ( + { + ...state, + chainManager: { + ...state.chainManager, + chainFetchers: state.chainManager.chainFetchers->ChainMap.update(chain, chainFetcherUpdate), + }, + }, + [], + ) +} + +let onEnterReorgThreshold = (~state: t) => { + Logging.info("Reorg threshold reached") + Prometheus.ReorgThreshold.set(~isInReorgThreshold=true) + + let chainFetchers = state.chainManager.chainFetchers->ChainMap.map(chainFetcher => { + { + ...chainFetcher, + fetchState: chainFetcher.fetchState->FetchState.updateInternal( + ~blockLag=Env.indexingBlockLag->Option.getWithDefault(0), + ), + } + }) + + { + ...state, + chainManager: { + ...state.chainManager, + chainFetchers, + isInReorgThreshold: true, + }, + } +} + +let actionReducer = (state: t, action: action) => { + switch action { + | FinishWaitingForNewBlock({chain, currentBlockHeight}) => { + let isBelowReorgThreshold = + !state.chainManager.isInReorgThreshold && state.indexer.config.shouldRollbackOnReorg + let shouldEnterReorgThreshold = + isBelowReorgThreshold && + state.chainManager.chainFetchers + ->ChainMap.values + ->Array.every(chainFetcher => { + chainFetcher.fetchState->FetchState.isReadyToEnterReorgThreshold(~currentBlockHeight) + }) + + let state = { + ...state, + chainManager: { + ...state.chainManager, + chainFetchers: state.chainManager.chainFetchers->ChainMap.update(chain, chainFetcher => { + chainFetcher->updateChainFetcherCurrentBlockHeight(~currentBlockHeight) + }), + }, + } + + if shouldEnterReorgThreshold { + (onEnterReorgThreshold(~state), [NextQuery(CheckAllChains)]) + } else { + (state, [NextQuery(Chain(chain))]) + } + } + | ValidatePartitionQueryResponse(partitionQueryResponse) => + state->validatePartitionQueryResponse(partitionQueryResponse) + | SubmitPartitionQueryResponse({ + newItems, + newItemsWithDcs, + currentBlockHeight, + latestFetchedBlock, + query, + chain, + }) => + state->submitPartitionQueryResponse( + ~newItems, + ~newItemsWithDcs, + ~currentBlockHeight, + ~latestFetchedBlock, + ~query, + ~chain, + ) + | EventBatchProcessed({batch}) => + let maybePruneEntityHistory = + state.indexer.config->Config.shouldPruneHistory( + ~isInReorgThreshold=state.chainManager.isInReorgThreshold, + ) + ? [PruneStaleEntityHistory] + : [] + + let state = { + ...state, + // Can safely reset rollback state, since overwrite is not possible. + // If rollback is pending, the EventBatchProcessed will be handled by the invalid action reducer instead. + rollbackState: NoRollback, + chainManager: state.chainManager->updateProgressedChains(~batch), + currentlyProcessingBatch: false, + processedBatches: state.processedBatches + 1, + } + + let shouldExit = EventProcessing.allChainsEventsProcessedToEndblock( + state.chainManager.chainFetchers, + ) + ? { + Logging.info("All chains are caught up to end blocks.") + + // Keep the indexer process running when in development mode (for Dev Console) + // or when TUI is enabled (for display) + if state.keepProcessAlive { + NoExit + } else { + ExitWithSuccess + } + } + : NoExit + + ( + state, + [UpdateChainMetaDataAndCheckForExit(shouldExit), ProcessEventBatch]->Array.concat( + maybePruneEntityHistory, + ), + ) + + | StartProcessingBatch => ({...state, currentlyProcessingBatch: true}, []) + | StartFindingReorgDepth => ({...state, rollbackState: FindingReorgDepth}, []) + | FindReorgDepth({chain, rollbackTargetBlockNumber}) => ( + { + ...state, + rollbackState: FoundReorgDepth({ + chain, + rollbackTargetBlockNumber, + }), + }, + [Rollback], + ) + | EnterReorgThreshold => (onEnterReorgThreshold(~state), [NextQuery(CheckAllChains)]) + | UpdateQueues({progressedChainsById, shouldEnterReorgThreshold}) => + let chainFetchers = state.chainManager.chainFetchers->ChainMap.mapWithKey((chain, cf) => { + let fs = switch progressedChainsById->Utils.Dict.dangerouslyGetByIntNonOption( + chain->ChainMap.Chain.toChainId, + ) { + | Some(chainAfterBatch) => chainAfterBatch.fetchState + | None => cf.fetchState + } + { + ...cf, + fetchState: shouldEnterReorgThreshold + ? fs->FetchState.updateInternal(~blockLag=Env.indexingBlockLag->Option.getWithDefault(0)) + : fs, + } + }) + + let chainManager = { + ...state.chainManager, + chainFetchers, + } + + ( + { + ...state, + chainManager, + }, + [NextQuery(CheckAllChains)], + ) + | SetRollbackState({diffInMemoryStore, rollbackedChainManager, eventsProcessedDiffByChain}) => ( + { + ...state, + rollbackState: RollbackReady({ + diffInMemoryStore, + eventsProcessedDiffByChain, + }), + chainManager: rollbackedChainManager, + }, + [NextQuery(CheckAllChains), ProcessEventBatch], + ) + | SuccessExit => { + Logging.info("Exiting with success") + NodeJs.process->NodeJs.exitWithCode(Success) + (state, []) + } + | ErrorExit(errHandler) => + errHandler->ErrorHandling.log + NodeJs.process->NodeJs.exitWithCode(Failure) + (state, []) + } +} + +let invalidatedActionReducer = (state: t, action: action) => + switch action { + | EventBatchProcessed({batch}) if state->isPreparingRollback => + Logging.info("Finished processing batch before rollback, actioning rollback") + ( + { + ...state, + chainManager: state.chainManager->updateProgressedChains(~batch), + currentlyProcessingBatch: false, + processedBatches: state.processedBatches + 1, + }, + [Rollback], + ) + | ErrorExit(_) => actionReducer(state, action) + | _ => + Logging.trace({ + "msg": "Invalidated action discarded", + "action": action->S.convertOrThrow(Utils.Schema.variantTag), + }) + (state, []) + } + +let checkAndFetchForChain = ( + //Used for dependency injection for tests + ~waitForNewBlock, + ~executeQuery, + //required args + ~state, + ~dispatchAction, +) => async chain => { + let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(chain) + if !isPreparingRollback(state) { + let {currentBlockHeight, fetchState} = chainFetcher + + await chainFetcher.sourceManager->SourceManager.fetchNext( + ~fetchState, + ~waitForNewBlock=(~knownHeight) => + chainFetcher.sourceManager->waitForNewBlock(~knownHeight), + ~onNewBlock=(~knownHeight) => + dispatchAction(FinishWaitingForNewBlock({chain, currentBlockHeight: knownHeight})), + ~currentBlockHeight, + ~executeQuery=async query => { + try { + let response = await chainFetcher.sourceManager->executeQuery(~query, ~currentBlockHeight) + dispatchAction(ValidatePartitionQueryResponse({chain, response, query})) + } catch { + | exn => dispatchAction(ErrorExit(exn->ErrorHandling.make)) + } + }, + ~stateId=state.id, + ) + } +} + +let injectedTaskReducer = ( + //Used for dependency injection for tests + ~waitForNewBlock, + ~executeQuery, + ~getLastKnownValidBlock, +) => async ( + //required args + state: t, + task: task, + ~dispatchAction, +) => { + switch task { + | ProcessPartitionQueryResponse(partitionQueryResponse) => + state->processPartitionQueryResponse(partitionQueryResponse, ~dispatchAction)->Promise.done + | PruneStaleEntityHistory => + let runPrune = async () => { + switch state.chainManager->ChainManager.getSafeCheckpointId { + | None => () + | Some(safeCheckpointId) => + await state.indexer.persistence.sql->InternalTable.Checkpoints.pruneStaleCheckpoints( + ~pgSchema=Env.Db.publicSchema, + ~safeCheckpointId, + ) + + for idx in 0 to Entities.allEntities->Array.length - 1 { + if idx !== 0 { + // Add some delay between entities + // To unblock the pg client if it's needed for something else + await Utils.delay(1000) + } + let entityConfig = Entities.allEntities->Array.getUnsafe(idx) + let timeRef = Hrtime.makeTimer() + try { + let () = + await state.indexer.persistence.sql->EntityHistory.pruneStaleEntityHistory( + ~entityName=entityConfig.name, + ~entityIndex=entityConfig.index, + ~pgSchema=Env.Db.publicSchema, + ~safeCheckpointId, + ) + } catch { + | exn => + exn->ErrorHandling.mkLogAndRaise( + ~msg=`Failed to prune stale entity history`, + ~logger=Logging.createChild( + ~params={ + "entityName": entityConfig.name, + "safeCheckpointId": safeCheckpointId, + }, + ), + ) + } + Prometheus.RollbackHistoryPrune.increment( + ~timeMillis=Hrtime.timeSince(timeRef)->Hrtime.toMillis, + ~entityName=entityConfig.name, + ) + } + } + } + state.writeThrottlers.pruneStaleEntityHistory->Throttler.schedule(runPrune) + + | UpdateChainMetaDataAndCheckForExit(shouldExit) => + let {chainManager, writeThrottlers} = state + switch shouldExit { + | ExitWithSuccess => + updateChainMetadataTable( + chainManager, + ~throttler=writeThrottlers.chainMetaData, + ~persistence=state.indexer.persistence, + ) + dispatchAction(SuccessExit) + | NoExit => + updateChainMetadataTable( + chainManager, + ~throttler=writeThrottlers.chainMetaData, + ~persistence=state.indexer.persistence, + )->ignore + } + | NextQuery(chainCheck) => + let fetchForChain = checkAndFetchForChain( + ~waitForNewBlock, + ~executeQuery, + ~state, + ~dispatchAction, + ) + + switch chainCheck { + | Chain(chain) => await chain->fetchForChain + | CheckAllChains => + //Mapping from the states chainManager so we can construct tests that don't use + //all chains + let _ = + await state.chainManager.chainFetchers + ->ChainMap.keys + ->Array.map(fetchForChain(_)) + ->Promise.all + } + | ProcessEventBatch => + if !state.currentlyProcessingBatch && !isPreparingRollback(state) { + let batch = + state.chainManager->ChainManager.createBatch( + ~batchSizeTarget=state.indexer.config.batchSize, + ) + + let progressedChainsById = batch.progressedChainsById + let totalBatchSize = batch.totalBatchSize + + let isInReorgThreshold = state.chainManager.isInReorgThreshold + let shouldSaveHistory = state.indexer.config->Config.shouldSaveHistory(~isInReorgThreshold) + + let isBelowReorgThreshold = + !state.chainManager.isInReorgThreshold && state.indexer.config.shouldRollbackOnReorg + let shouldEnterReorgThreshold = + isBelowReorgThreshold && + state.chainManager.chainFetchers + ->ChainMap.values + ->Array.every(chainFetcher => { + let fetchState = switch progressedChainsById->Utils.Dict.dangerouslyGetByIntNonOption( + chainFetcher.fetchState.chainId, + ) { + | Some(chainAfterBatch) => chainAfterBatch.fetchState + | None => chainFetcher.fetchState + } + fetchState->FetchState.isReadyToEnterReorgThreshold( + ~currentBlockHeight=chainFetcher.currentBlockHeight, + ) + }) + + if shouldEnterReorgThreshold { + dispatchAction(EnterReorgThreshold) + } + + if progressedChainsById->Utils.Dict.isEmpty { + () + } else { + if Env.Benchmark.shouldSaveData { + let group = "Other" + Benchmark.addSummaryData( + ~group, + ~label=`Batch Size`, + ~value=totalBatchSize->Belt.Int.toFloat, + ) + } + + dispatchAction(StartProcessingBatch) + dispatchAction(UpdateQueues({progressedChainsById, shouldEnterReorgThreshold})) + + //In the case of a rollback, use the provided in memory store + //With rolled back values + let rollbackInMemStore = switch state.rollbackState { + | RollbackReady({diffInMemoryStore}) => Some(diffInMemoryStore) + | _ => None + } + + let inMemoryStore = rollbackInMemStore->Option.getWithDefault(InMemoryStore.make(~entities=Entities.allEntities)) + + inMemoryStore->InMemoryStore.setBatchDcs(~batch, ~shouldSaveHistory) + + switch await EventProcessing.processEventBatch( + ~batch, + ~inMemoryStore, + ~isInReorgThreshold, + ~loadManager=state.loadManager, + ~indexer=state.indexer, + ~chainFetchers=state.chainManager.chainFetchers, + ) { + | exception exn => + //All casese should be handled/caught before this with better user messaging. + //This is just a safety in case something unexpected happens + let errHandler = + exn->ErrorHandling.make(~msg="A top level unexpected error occurred during processing") + dispatchAction(ErrorExit(errHandler)) + | res => + switch res { + | Ok() => dispatchAction(EventBatchProcessed({batch: batch})) + | Error(errHandler) => dispatchAction(ErrorExit(errHandler)) + } + } + } + } + | Rollback => + //If it isn't processing a batch currently continue with rollback otherwise wait for current batch to finish processing + switch state { + | {rollbackState: NoRollback | RollbackReady(_)} => + Js.Exn.raiseError("Internal error: Rollback initiated with invalid state") + | {rollbackState: ReorgDetected({chain, blockNumber: reorgBlockNumber})} => { + let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(chain) + + dispatchAction(StartFindingReorgDepth) + let rollbackTargetBlockNumber = + await chainFetcher->getLastKnownValidBlock(~reorgBlockNumber) + + dispatchAction(FindReorgDepth({chain, rollbackTargetBlockNumber})) + } + // We can come to this case when event batch finished processing + // while we are still finding the reorg depth + // Do nothing here, just wait for reorg depth to be found + | {rollbackState: FindingReorgDepth} => () + | {rollbackState: FoundReorgDepth(_), currentlyProcessingBatch: true} => + Logging.info("Waiting for batch to finish processing before executing rollback") + | {rollbackState: FoundReorgDepth({chain: reorgChain, rollbackTargetBlockNumber})} => + let startTime = Hrtime.makeTimer() + + let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(reorgChain) + + let logger = Logging.createChildFrom( + ~logger=chainFetcher.logger, + ~params={ + "action": "Rollback", + "reorgChain": reorgChain, + "targetBlockNumber": rollbackTargetBlockNumber, + }, + ) + logger->Logging.childInfo("Started rollback on reorg") + Prometheus.RollbackTargetBlockNumber.set( + ~blockNumber=rollbackTargetBlockNumber, + ~chain=reorgChain, + ) + + let reorgChainId = reorgChain->ChainMap.Chain.toChainId + + let rollbackTargetCheckpointId = { + switch await state.indexer.persistence.sql->InternalTable.Checkpoints.getRollbackTargetCheckpoint( + ~pgSchema=Env.Db.publicSchema, + ~reorgChainId, + ~lastKnownValidBlockNumber=rollbackTargetBlockNumber, + ) { + | [checkpoint] => checkpoint["id"] + | _ => 0 + } + } + + let eventsProcessedDiffByChain = Js.Dict.empty() + let newProgressBlockNumberPerChain = Js.Dict.empty() + let rollbackedProcessedEvents = ref(0) + + { + let rollbackProgressDiff = + await state.indexer.persistence.sql->InternalTable.Checkpoints.getRollbackProgressDiff( + ~pgSchema=Env.Db.publicSchema, + ~rollbackTargetCheckpointId, + ) + for idx in 0 to rollbackProgressDiff->Js.Array2.length - 1 { + let diff = rollbackProgressDiff->Js.Array2.unsafe_get(idx) + eventsProcessedDiffByChain->Utils.Dict.setByInt( + diff["chain_id"], + switch diff["events_processed_diff"]->Int.fromString { + | Some(eventsProcessedDiff) => { + rollbackedProcessedEvents := + rollbackedProcessedEvents.contents + eventsProcessedDiff + eventsProcessedDiff + } + | None => + Js.Exn.raiseError( + `Unexpedted case: Invalid events processed diff ${diff["events_processed_diff"]}`, + ) + }, + ) + newProgressBlockNumberPerChain->Utils.Dict.setByInt( + diff["chain_id"], + if rollbackTargetCheckpointId === 0 && diff["chain_id"] === reorgChainId { + Pervasives.min(diff["new_progress_block_number"], rollbackTargetBlockNumber) + } else { + diff["new_progress_block_number"] + }, + ) + } + } + + let chainFetchers = state.chainManager.chainFetchers->ChainMap.mapWithKey((chain, cf) => { + switch newProgressBlockNumberPerChain->Utils.Dict.dangerouslyGetByIntNonOption( + chain->ChainMap.Chain.toChainId, + ) { + | Some(newProgressBlockNumber) => + let fetchState = + cf.fetchState->FetchState.rollback(~targetBlockNumber=newProgressBlockNumber) + let newTotalEventsProcessed = + cf.numEventsProcessed - + eventsProcessedDiffByChain + ->Utils.Dict.dangerouslyGetByIntNonOption(chain->ChainMap.Chain.toChainId) + ->Option.getUnsafe + + if cf.committedProgressBlockNumber !== newProgressBlockNumber { + Prometheus.ProgressBlockNumber.set( + ~blockNumber=newProgressBlockNumber, + ~chainId=chain->ChainMap.Chain.toChainId, + ) + } + if cf.numEventsProcessed !== newTotalEventsProcessed { + Prometheus.ProgressEventsCount.set( + ~processedCount=newTotalEventsProcessed, + ~chainId=chain->ChainMap.Chain.toChainId, + ) + } + + { + ...cf, + reorgDetection: chain == reorgChain + ? cf.reorgDetection->ReorgDetection.rollbackToValidBlockNumber( + ~blockNumber=rollbackTargetBlockNumber, + ) + : cf.reorgDetection, + safeCheckpointTracking: switch cf.safeCheckpointTracking { + | Some(safeCheckpointTracking) => + Some( + safeCheckpointTracking->SafeCheckpointTracking.rollback( + ~targetBlockNumber=newProgressBlockNumber, + ), + ) + | None => None + }, + fetchState, + committedProgressBlockNumber: newProgressBlockNumber, + numEventsProcessed: newTotalEventsProcessed, + } + + | None => + // Even without a progress diff entry, the reorg chain must have its + // reorgDetection and fetchState rolled back. Otherwise the stale block hash + // stays in dataByBlockNumber and the same reorg is re-detected on the next + // fetch, causing an infinite reorg→rollback loop. + if chain == reorgChain { + { + ...cf, + reorgDetection: cf.reorgDetection->ReorgDetection.rollbackToValidBlockNumber( + ~blockNumber=rollbackTargetBlockNumber, + ), + fetchState: cf.fetchState->FetchState.rollback( + ~targetBlockNumber=rollbackTargetBlockNumber, + ), + } + } else { + cf + } + } + }) + + // Construct in Memory store with rollback diff + let diff = await IO.prepareRollbackDiff( + ~rollbackTargetCheckpointId, + ~persistence=state.indexer.persistence, + ) + + let chainManager = { + ...state.chainManager, + committedCheckpointId: rollbackTargetCheckpointId, + chainFetchers, + } + + logger->Logging.childTrace({ + "msg": "Finished rollback on reorg", + "entityChanges": { + "deleted": diff["deletedEntities"], + "upserted": diff["setEntities"], + }, + "rollbackedEvents": rollbackedProcessedEvents.contents, + "beforeCheckpointId": state.chainManager.committedCheckpointId, + "targetCheckpointId": rollbackTargetCheckpointId, + }) + Prometheus.RollbackSuccess.increment( + ~timeMillis=Hrtime.timeSince(startTime)->Hrtime.toMillis, + ~rollbackedProcessedEvents=rollbackedProcessedEvents.contents, + ) + + dispatchAction( + SetRollbackState({ + diffInMemoryStore: diff["inMemStore"], + rollbackedChainManager: chainManager, + eventsProcessedDiffByChain, + }), + ) + } + } +} + +let taskReducer = injectedTaskReducer( + ~waitForNewBlock=SourceManager.waitForNewBlock, + ~executeQuery=SourceManager.executeQuery, + ~getLastKnownValidBlock=(chainFetcher, ~reorgBlockNumber) => + chainFetcher->ChainFetcher.getLastKnownValidBlock(~reorgBlockNumber), +) diff --git a/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.res b/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.res new file mode 100644 index 000000000..11fcc7880 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.res @@ -0,0 +1,76 @@ +open Belt +module type State = { + type t + type action + type task + + let taskReducer: (t, task, ~dispatchAction: action => unit) => promise + let actionReducer: (t, action) => (t, array) + let invalidatedActionReducer: (t, action) => (t, array) + let getId: t => int +} + +module MakeManager = (S: State) => { + type t = {mutable state: S.t, stateUpdatedHook: option unit>, onError: exn => unit} + + let make = ( + state: S.t, + ~stateUpdatedHook: option unit>=?, + ~onError=e => { + e->ErrorHandling.make(~msg="Indexer has failed with an unexpected error")->ErrorHandling.log + NodeJs.process->NodeJs.exitWithCode(Failure) + }, + ) => { + state, + stateUpdatedHook, + onError, + } + + let rec dispatchAction = (~stateId=0, self: t, action: S.action) => { + try { + let reducer = if stateId == self.state->S.getId { + S.actionReducer + } else { + S.invalidatedActionReducer + } + let (nextState, nextTasks) = reducer(self.state, action) + switch self.stateUpdatedHook { + // In ReScript `!==` is shallow equality check rather than `!=` + // This is just a check to see if a new object reference was returned + | Some(hook) if self.state !== nextState => hook(nextState) + | _ => () + } + self.state = nextState + nextTasks->Array.forEach(task => dispatchTask(self, task)) + } catch { + | e => e->self.onError + } + } + and dispatchTask = (self, task: S.task) => { + let stateId = self.state->S.getId + Js.Global.setTimeout(() => { + if stateId !== self.state->S.getId { + Logging.info("Invalidated task discarded") + } else { + try { + S.taskReducer(self.state, task, ~dispatchAction=action => + dispatchAction(~stateId, self, action) + ) + ->Promise.catch(e => { + e->self.onError + Promise.resolve() + }) + ->ignore + } catch { + | e => e->self.onError + } + } + }, 0)->ignore + } + + let getState = self => self.state + let setState = (self: t, state: S.t) => self.state = state +} + +module Manager = MakeManager(GlobalState) +include Manager diff --git a/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.resi b/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.resi new file mode 100644 index 000000000..d9ff6d8c9 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.resi @@ -0,0 +1,7 @@ +type t + +let make: (GlobalState.t, ~stateUpdatedHook: GlobalState.t => unit=?, ~onError: exn => unit=?) => t +let dispatchAction: (~stateId: int=?, t, GlobalState.action) => unit +let dispatchTask: (t, GlobalState.task) => unit +let getState: t => GlobalState.t +let setState: (t, GlobalState.t) => unit diff --git a/apps/hypersync-indexer/generated/src/ink/EnvioInkApp.res b/apps/hypersync-indexer/generated/src/ink/EnvioInkApp.res new file mode 100644 index 000000000..926dd288d --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/EnvioInkApp.res @@ -0,0 +1,67 @@ +open Ink +open Belt + +type chainData = ChainData.chainData +type appState = { + chains: array, + indexerStartTime: Js.Date.t, + config: Config.t, +} + +let getTotalNumEventsProcessed = (~chains: array) => { + chains->Array.reduce(0, (acc, chain) => { + acc + chain.progress->ChainData.getNumberOfEventsProccessed + }) +} + +module TotalEventsProcessed = { + @react.component + let make = (~totalEventsProcessed) => { + let label = "Total Events Processed: " + + {label->React.string} + + {`${totalEventsProcessed->ChainData.formatLocaleString}`->React.string} + + + } +} + +module App = { + @react.component + let make = (~appState: appState) => { + let {chains, indexerStartTime, config} = appState + let totalEventsProcessed = getTotalNumEventsProcessed(~chains) + + + {chains + ->Array.mapWithIndex((i, chainData) => { + Int.toString} chainData /> + }) + ->React.array} + + + + + + {"Development Console: "->React.string} + + {`${Env.envioAppUrl}/console`->React.string} + + + + {"GraphQL Endpoint: "->React.string} + + {`${Env.Hasura.url}/v1/graphql`->React.string} + + + + } +} + +let startApp = appState => { + let {rerender} = render() + appState => { + rerender() + } +} diff --git a/apps/hypersync-indexer/generated/src/ink/bindings/DateFns.res b/apps/hypersync-indexer/generated/src/ink/bindings/DateFns.res new file mode 100644 index 000000000..a628d1447 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/bindings/DateFns.res @@ -0,0 +1,71 @@ +/** +Formats: +hh:mm:ss | 00:00:00 +do MMM ''yy | 1st Jan '21 +ha do MMM ''yy | 8PM 1st Jan '21 +ha | 8PM +iii | Tues +iii MMM | Tues Jan +MMM | Jan +`) +*/ +type dateFormats = + | @as("HH:mm:ss") HoursMinSec + | @as("ha") Hour + | @as("do MMM ''yy") DayMonthYear + | @as("ha do MMM ''yy") HourDayMonthYear + | @as("h:mma do MMM ''yy") HourMinDayMonthYear + | @as("iii") DayName + | @as("iii MMM") DayNameMonth + | @as("do MMM") DayMonth + | @as("MMM") Month + | @as("h:mma") HourMin + +@module("date-fns") external format: (Js.Date.t, dateFormats) => string = "format" + +type formatDistanceToNowOptions = {includeSeconds: bool} +@module("date-fns") +external formatDistanceToNow: Js.Date.t => string = "formatDistanceToNow" + +@module("date-fns") +external formatDistance: (Js.Date.t, Js.Date.t) => string = "formatDistance" + +@module("date-fns") +external formatDistanceWithOptions: (Js.Date.t, Js.Date.t, formatDistanceToNowOptions) => string = + "formatDistance" + +@module("date-fns") +external formatDistanceToNowWithOptions: (Js.Date.t, formatDistanceToNowOptions) => string = + "formatDistanceToNow" + +let formatDistanceToNowWithSeconds = (date: Js.Date.t) => + date->formatDistanceToNowWithOptions({includeSeconds: true}) + +type durationTimeFormat = { + years: int, + months: int, + weeks: int, + days: int, + hours: int, + minutes: int, + seconds: int, +} + +@module("date-fns") +external formatRelative: (Js.Date.t, Js.Date.t) => string = "formatRelative" + +type durationFormatOutput = {format: array} + +@module("date-fns") +external formatDuration: (durationTimeFormat, durationFormatOutput) => string = "formatDuration" + +type interval = {start: Js_date.t, end: Js_date.t} + +@module("date-fns") +external intervalToDuration: interval => durationTimeFormat = "intervalToDuration" + +//helper to convert millis elapsed to duration object +let durationFromMillis = (millis: int) => + intervalToDuration({start: 0->Utils.magic, end: millis->Utils.magic}) + +@module("date-fns") external fromUnixTime: float => Js.Date.t = "fromUnixTime" diff --git a/apps/hypersync-indexer/generated/src/ink/bindings/Ink.res b/apps/hypersync-indexer/generated/src/ink/bindings/Ink.res new file mode 100644 index 000000000..6dad99675 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/bindings/Ink.res @@ -0,0 +1,355 @@ +open Style + +type instance = { + rerender: React.element => unit, + unmount: unit => unit, + waitUntilExit: unit => promise, + clear: unit => unit, +} +type readableStream +type writableStream +type options = { + stdout?: writableStream, + stdin?: readableStream, + exitOnCtrlC?: bool, + patchConsole?: bool, + debug?: bool, +} +@module("ink") +external renderInternal: (React.element, ~options: option) => instance = "render" + +let render = (~options=?, element) => { + renderInternal(element, ~options) +} +type measurement = {width: int, height: int} + +@module("ink") +external measureElement: React.ref<'a> => measurement = "measureElement" + +module Text = { + type wrapOptions = + | @as("wrap") Wrap + | @as("truncate") Truncate + | @as("truncate-start") TruncateStart + | @as("truncate-middle") TruncateMiddle + | @as("truncate-end") TruncateEnd + @module("ink") @react.component + external make: ( + ~children: React.element, + ~color: chalkTheme=?, + ~backgroundColor: chalkTheme=?, + ~dimColor: bool=?, + ~bold: bool=?, + ~italic: bool=?, + ~underline: bool=?, + ~strikethrough: bool=?, + ~inverse: bool=?, + ~wrap: wrapOptions=?, + ) => React.element = "Text" +} + +module Box = { + @module("ink") @react.component + external make: ( + ~children: React.element=?, + ~width: numOrStr=?, + ~height: numOrStr=?, + ~minWidth: int=?, + ~minHeight: int=?, + ~padding: int=?, + ~paddingTop: int=?, + ~paddingBottom: int=?, + ~paddingLeft: int=?, + ~paddingRight: int=?, + ~paddingX: int=?, + ~paddingY: int=?, + ~margin: int=?, + ~marginTop: int=?, + ~marginBottom: int=?, + ~marginLeft: int=?, + ~marginRight: int=?, + ~marginX: int=?, + ~marginY: int=?, + ~gap: int=?, + ~rowGap: int=?, + ~flexGrow: int=?, + ~flexShrink: int=?, + ~flexBasis: numOrStr=?, + ~flexDirection: flexDirection=?, + ~flexWrap: flexDirection=?, + ~alignItems: alignItems=?, + ~alignSelf: alignSelf=?, + ~justifyContent: justifyContent=?, + ~display: display=?, + ~overflow: overflow=?, + ~overflowX: overflow=?, + ~overflowY: overflow=?, + ~borderStyle: borderStyle=?, + ~borderColor: chalkTheme=?, + ~borderTopColor: chalkTheme=?, + ~borderRightColor: chalkTheme=?, + ~borderBottomColor: chalkTheme=?, + ~borderLeftColor: chalkTheme=?, + ~borderDimColor: bool=?, + ~borderTopDimColor: bool=?, + ~borderRightDimColor: bool=?, + ~borderBottomDimColor: bool=?, + ~borderLeftDimColor: bool=?, + ~borderTop: bool=?, + ~borderRight: bool=?, + ~borderBottom: bool=?, + ~borderLeft: bool=?, + ) => React.element = "Box" +} + +module Newline = { + /** + Adds one or more newline characters. Must be used within components. + + */ + @module("ink") + @react.component + external make: (~count: int=?) => React.element = "Newline" +} + +module Spacer = { + /** + A flexible space that expands along the major axis of its containing layout. It's useful as a shortcut for filling all the available spaces between elements. + + For example, using in a with default flex direction (row) will position "Left" on the left side and will push "Right" to the right side. + */ + @module("ink") + @react.component + external make: unit => React.element = "Spacer" +} + +module Static = { + /** + component permanently renders its output above everything else. It's useful for displaying activity like completed tasks or logs - things that are not changing after they're rendered (hence the name "Static"). + + It's preferred to use for use cases like these, when you can't know or control the amount of items that need to be rendered. + */ + @module("ink") + @react.component + external make: ( + ~children: ('a, int) => React.element, + ~items: array<'a>, + ~style: styles=?, + ) => React.element = "Static" +} + +module Transform = { + /** + Transform a string representation of React components before they are written to output. For example, you might want to apply a gradient to text, add a clickable link or create some text effects. These use cases can't accept React nodes as input, they are expecting a string. That's what component does, it gives you an output string of its child components and lets you transform it in any way. + + Note: must be applied only to children components and shouldn't change the dimensions of the output, otherwise layout will be incorrect. + */ + type outputLine = string + type index = int + @module("ink") @react.component + external make: ( + ~children: string, + ~tranform: (outputLine, index) => string, + ~index: int=?, + ) => React.element = "Transform" +} + +module Hooks = { + type key = { + leftArrow: bool, + rightArrow: bool, + upArrow: bool, + downArrow: bool, + return: bool, + escape: bool, + ctrl: bool, + shift: bool, + tab: bool, + backspace: bool, + delete: bool, + pageDown: bool, + pageUp: bool, + meta: bool, + enter: bool, + } + type input = string + type inputHandler = (input, key) => unit + type options = {isActive?: bool} + + @module("ink") external useInput: (inputHandler, ~options: options=?) => unit = "useInput" + + type app = {exit: (~err: exn=?) => unit} + @module("ink") external useApp: unit => app = "useApp" + + type stdin = { + stdin: readableStream, + isRawModeSupported: bool, + setRawMode: bool => unit, + } + + @module("ink") external useStdin: unit => stdin = "useStdin" + + type stdout = { + stdout: writableStream, + write: string => unit, + } + + @module("ink") external useStdout: unit => stdout = "useStdout" + + type stderr = { + stderr: writableStream, + write: string => unit, + } + + @module("ink") external useStderr: unit => stderr = "useStderr" + + type focusOptions = {autoFocus?: bool, isActive?: bool, id?: string} + type focus = {isFocused: bool} + @module("ink") external useFocus: (~options: focusOptions=?) => focus = "useFocus" + + type focusManager = { + enableFocus: unit => unit, + disableFocus: unit => unit, + focusNext: unit => unit, + focusPrevious: unit => unit, + focusId: string => unit, + } + @module("ink") + external useFocusManager: unit => focusManager = "useFocusManager" +} + +module BigText = { + type font = + | @as("block") Block + | @as("slick") Slick + | @as("tiny") Tiny + | @as("grid") Grid + | @as("pallet") Pallet + | @as("shade") Shade + | @as("simple") Simple + | @as("simpleBlock") SimpleBlock + | @as("3d") D3 + | @as("simple3d") Simple3D + | @as("chrome") Chrome + | @as("huge") Huge + type align = + | @as("left") Left + | @as("center") Center + | @as("right") Right + type backgroundColor = + | @as("transparent") Transparent + | @as("black") Black + | @as("red") Red + | @as("green") Green + | @as("yellow") Yellow + | @as("blue") Blue + | @as("magenta") Magenta + | @as("cyan") Cyan + | @as("white") White + + type color = | ...chalkTheme | @as("system") System + @module @react.component + external make: ( + ~text: string, + ~font: font=?, //default block + ~align: align=?, //default left + ~colors: array=?, //default [system] + ~backgroundColor: backgroundColor=?, //default transparent + ~letterSpacing: int=?, //default 1 + ~lineHeight: int=?, //default 1 + ~space: bool=?, //default true + ~maxLength: int=?, + ) => React.element = "ink-big-text" +} + +module Spinner = { + type typeOption = + | @as("dots") Dots + | @as("dots2") Dots2 + | @as("dots3") Dots3 + | @as("dots4") Dots4 + | @as("dots5") Dots5 + | @as("dots6") Dots6 + | @as("dots7") Dots7 + | @as("dots8") Dots8 + | @as("dots9") Dots9 + | @as("dots10") Dots10 + | @as("dots11") Dots11 + | @as("dots12") Dots12 + | @as("dots13") Dots13 + | @as("dots8Bit") Dots8Bit + | @as("sand") Sand + | @as("line") Line + | @as("line2") Line2 + | @as("pipe") Pipe + | @as("simpleDots") SimpleDots + | @as("simpleDotsScrolling") SimpleDotsScrolling + | @as("star") Star + | @as("star2") Star2 + | @as("flip") Flip + | @as("hamburger") Hamburger + | @as("growVertical") GrowVertical + | @as("growHorizontal") GrowHorizontal + | @as("balloon") Balloon + | @as("balloon2") Balloon2 + | @as("noise") Noise + | @as("bounce") Bounce + | @as("boxBounce") BoxBounce + | @as("boxBounce2") BoxBounce2 + | @as("triangle") Triangle + | @as("binary") Binary + | @as("arc") Arc + | @as("circle") Circle + | @as("squareCorners") SquareCorners + | @as("circleQuarters") CircleQuarters + | @as("circleHalves") CircleHalves + | @as("squish") Squish + | @as("toggle") Toggle + | @as("toggle2") Toggle2 + | @as("toggle3") Toggle3 + | @as("toggle4") Toggle4 + | @as("toggle5") Toggle5 + | @as("toggle6") Toggle6 + | @as("toggle7") Toggle7 + | @as("toggle8") Toggle8 + | @as("toggle9") Toggle9 + | @as("toggle10") Toggle10 + | @as("toggle11") Toggle11 + | @as("toggle12") Toggle12 + | @as("toggle13") Toggle13 + | @as("arrow") Arrow + | @as("arrow2") Arrow2 + | @as("arrow3") Arrow3 + | @as("bouncingBar") BouncingBar + | @as("bouncingBall") BouncingBall + | @as("smiley") Smiley + | @as("monkey") Monkey + | @as("hearts") Hearts + | @as("clock") Clock + | @as("earth") Earth + | @as("material") Material + | @as("moon") Moon + | @as("runner") Runner + | @as("pong") Pong + | @as("shark") Shark + | @as("dqpb") Dqpb + | @as("weather") Weather + | @as("christmas") Christmas + | @as("grenade") Grenade + | @as("point") Point + | @as("layer") Layer + | @as("betaWave") BetaWave + | @as("fingerDance") FingerDance + | @as("fistBump") FistBump + | @as("soccerHeader") SoccerHeader + | @as("mindblown") Mindblown + | @as("speaker") Speaker + | @as("orangePulse") OrangePulse + | @as("bluePulse") BluePulse + | @as("orangeBluePulse") OrangeBluePulse + | @as("timeTravel") TimeTravel + | @as("aesthetic") Aesthetic + | @as("dwarfFortress") DwarfFortress + @module("ink-spinner") @react.component + external make: (@as("type") ~type_: typeOption=?) => React.element = "default" +} diff --git a/apps/hypersync-indexer/generated/src/ink/bindings/Style.res b/apps/hypersync-indexer/generated/src/ink/bindings/Style.res new file mode 100644 index 000000000..8b3fc30f3 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/bindings/Style.res @@ -0,0 +1,123 @@ +type chalkTheme = + | @as("#9860E5") Primary + | @as("#FFBB2F") Secondary + | @as("#6CBFEE") Info + | @as("#FF8269") Danger + | @as("#3B8C3D") Success + | @as("white") White + | @as("gray") Gray + +@unboxed type numOrStr = Num(int) | Str(string) + +type textWrap = + | @as("wrap") Wrap + | @as("end") End + | @as("middle") Middle + | @as("truncate-end") TruncateEnd + | @as("truncate") Truncate + | @as("truncate-middle") TruncateMiddle + | @as("truncate-start") TruncateStart + +type position = + | @as("absolute") Absolute + | @as("relative") Relative + +type flexDirection = + | @as("row") Row + | @as("column") Column + | @as("row-reverse") RowReverse + | @as("column-reverse") ColumnReverse + +type flexWrap = + | @as("nowrap") NoWrap + | @as("wrap") Wrap + | @as("wrap-reverse") WrapReverse + +type alignItems = + | @as("flex-start") FlexStart + | @as("center") Center + | @as("flex-end") FlexEnd + | @as("stretch") Stretch + +type alignSelf = + | @as("flex-start") FlexStartSelf + | @as("center") CenterSelf + | @as("flex-end") FlexEndSelf + | @as("auto") Auto + +type justifyContent = + | @as("flex-start") JustifyFlexStart + | @as("flex-end") JustifyFlexEnd + | @as("space-between") SpaceBetween + | @as("space-around") SpaceAround + | @as("center") JustifyCenter + +type display = + | @as("flex") Flex + | @as("none") None + +type overflow = + | @as("visible") Visible + | @as("hidden") Hidden + +type borderStyle = + | @as("single") Single + | @as("double") Double + | @as("round") Round + | @as("bold") Bold + | @as("singleDouble") SingleDouble + | @as("doubleSingle") DoubleSingle + | @as("classic") Classic + +type styles = { + textWrap?: textWrap, + position?: position, + columnGap?: int, + rowGap?: int, + gap?: int, + margin?: int, + marginX?: int, + marginY?: int, + marginTop?: int, + marginBottom?: int, + marginLeft?: int, + marginRight?: int, + padding?: int, + paddingX?: int, + paddingY?: int, + paddingTop?: int, + paddingBottom?: int, + paddingLeft?: int, + paddingRight?: int, + flexGrow?: int, + flexShrink?: int, + flexDirection?: flexDirection, + flexBasis?: numOrStr, + flexWrap?: flexWrap, + alignItems?: alignItems, + alignSelf?: alignSelf, + justifyContent?: justifyContent, + width?: numOrStr, + height?: numOrStr, + minWidth?: numOrStr, + minHeight?: numOrStr, + display?: display, + borderStyle?: borderStyle, + borderTop?: bool, + borderBottom?: bool, + borderLeft?: bool, + borderRight?: bool, + borderColor?: chalkTheme, + borderTopColor?: chalkTheme, + borderBottomColor?: chalkTheme, + borderLeftColor?: chalkTheme, + borderRightColor?: chalkTheme, + borderDimColor?: bool, + borderTopDimColor?: bool, + borderBottomDimColor?: bool, + borderLeftDimColor?: bool, + borderRightDimColor?: bool, + overflow?: overflow, + overflowX?: overflow, + overflowY?: overflow, +} diff --git a/apps/hypersync-indexer/generated/src/ink/components/BufferedProgressBar.res b/apps/hypersync-indexer/generated/src/ink/components/BufferedProgressBar.res new file mode 100644 index 000000000..72aed5314 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/components/BufferedProgressBar.res @@ -0,0 +1,40 @@ +open Ink +open Belt +@react.component +let make = (~loaded, ~buffered=?, ~outOf, ~barWidth=36, ~loadingColor=Style.Secondary) => { + let maxCount = barWidth + + let loadedFraction = loaded->Int.toFloat /. outOf->Int.toFloat + let loadedCount = Pervasives.min( + Js.Math.floor_float(maxCount->Js.Int.toFloat *. loadedFraction)->Belt.Float.toInt, + maxCount, + ) + + let bufferedCount = buffered->Option.mapWithDefault(loadedCount, buffered => { + let bufferedFraction = buffered->Int.toFloat /. outOf->Int.toFloat + Pervasives.min( + Js.Math.floor_float(maxCount->Js.Int.toFloat *. bufferedFraction)->Belt.Float.toInt, + maxCount, + ) + }) + let loadedFraction = loadedFraction > 0.0 ? loadedFraction : 0.0 + let loadedPercentageStr = (loadedFraction *. 100.)->Int.fromFloat->Int.toString ++ "% " + + let loadedPercentageStrCount = loadedPercentageStr->String.length + let loadedSpaces = Pervasives.max(loadedCount - loadedPercentageStrCount, 0) + let loadedCount = Pervasives.max(loadedCount, loadedPercentageStrCount) + let bufferedCount = Pervasives.max(bufferedCount, loadedCount) + + + + {" "->Js.String2.repeat(loadedSpaces)->React.string} + {loadedPercentageStr->React.string} + + + {" "->Js.String2.repeat(bufferedCount - loadedCount)->React.string} + + + {" "->Js.String2.repeat(maxCount - bufferedCount)->React.string} + + +} diff --git a/apps/hypersync-indexer/generated/src/ink/components/ChainData.res b/apps/hypersync-indexer/generated/src/ink/components/ChainData.res new file mode 100644 index 000000000..946e9a4e3 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/components/ChainData.res @@ -0,0 +1,161 @@ +open Ink + +type syncing = { + firstEventBlockNumber: int, + latestProcessedBlock: int, + numEventsProcessed: int, +} +type synced = { + ...syncing, + timestampCaughtUpToHeadOrEndblock: Js.Date.t, +} + +type progress = SearchingForEvents | Syncing(syncing) | Synced(synced) + +let getNumberOfEventsProccessed = (progress: progress) => { + switch progress { + | SearchingForEvents => 0 + | Syncing(syncing) => syncing.numEventsProcessed + | Synced(synced) => synced.numEventsProcessed + } +} +type chainData = { + chain: ChainMap.Chain.t, + poweredByHyperSync: bool, + progress: progress, + latestFetchedBlockNumber: int, + currentBlockHeight: int, + numBatchesFetched: int, + endBlock: option, +} + +let minOfOption: (int, option) => int = (a: int, b: option) => { + switch (a, b) { + | (a, Some(b)) => min(a, b) + | (a, None) => a + } +} + +type number +@val external number: int => number = "Number" +@send external toLocaleString: number => string = "toLocaleString" +let formatLocaleString = n => n->number->toLocaleString + +module BlocksDisplay = { + @react.component + let make = (~latestProcessedBlock, ~currentBlockHeight) => { + + {"blocks: "->React.string} + + + {latestProcessedBlock->formatLocaleString->React.string} + + + {"/"->React.string} + {currentBlockHeight->formatLocaleString->React.string} + + + + } +} + +module SyncBar = { + @react.component + let make = ( + ~chainId, + ~loaded, + ~buffered=?, + ~outOf, + ~loadingColor, + ~poweredByHyperSync=true, + ~isSearching=false, + ) => { + + + {poweredByHyperSync ? {"⚡"->React.string} : React.null} + {"Chain ID: "->React.string} + {chainId->React.int} + {" "->React.string} + + {isSearching + ? + + + : } + + } +} + +@react.component +let make = (~chainData: chainData) => { + let { + chain, + progress, + poweredByHyperSync, + latestFetchedBlockNumber, + currentBlockHeight, + endBlock, + } = chainData + let chainId = chain->ChainMap.Chain.toChainId + + let toBlock = minOfOption(currentBlockHeight, endBlock) + + switch progress { + | SearchingForEvents => + + + {"Searching for events..."->React.string} + + + + + + | Syncing({firstEventBlockNumber, latestProcessedBlock, numEventsProcessed}) => + + + + + {"Events Processed: "->React.string} + + {numEventsProcessed->formatLocaleString->React.string} + + + + + + + | Synced({firstEventBlockNumber, latestProcessedBlock, numEventsProcessed}) => + + + + {"Events Processed: "->React.string} + {numEventsProcessed->React.int} + + + + + + + } +} diff --git a/apps/hypersync-indexer/generated/src/ink/components/CustomHooks.res b/apps/hypersync-indexer/generated/src/ink/components/CustomHooks.res new file mode 100644 index 000000000..335edbe06 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/components/CustomHooks.res @@ -0,0 +1,114 @@ +open Belt +module InitApi = { + type ecosystem = | @as("evm") Evm | @as("fuel") Fuel + type body = { + envioVersion: string, + envioApiToken: option, + ecosystem: ecosystem, + hyperSyncNetworks: array, + rpcNetworks: array, + } + + let bodySchema = S.object(s => { + envioVersion: s.field("envioVersion", S.string), + envioApiToken: s.field("envioApiToken", S.option(S.string)), + ecosystem: s.field("ecosystem", S.enum([Evm, Fuel])), + hyperSyncNetworks: s.field("hyperSyncNetworks", S.array(S.int)), + rpcNetworks: s.field("rpcNetworks", S.array(S.int)), + }) + + let makeBody = (~envioVersion, ~envioApiToken, ~config: Config.t) => { + let hyperSyncNetworks = [] + let rpcNetworks = [] + config.chainMap + ->ChainMap.values + ->Array.forEach(({sources, id}) => { + switch sources->Js.Array2.some(s => s.poweredByHyperSync) { + | true => hyperSyncNetworks + | false => rpcNetworks + } + ->Js.Array2.push(id) + ->ignore + }) + + { + envioVersion, + envioApiToken, + ecosystem: (config.ecosystem :> ecosystem), + hyperSyncNetworks, + rpcNetworks, + } + } + + type messageColor = + | @as("primary") Primary + | @as("secondary") Secondary + | @as("info") Info + | @as("danger") Danger + | @as("success") Success + | @as("white") White + | @as("gray") Gray + + let toTheme = (color: messageColor): Style.chalkTheme => + switch color { + | Primary => Primary + | Secondary => Secondary + | Info => Info + | Danger => Danger + | Success => Success + | White => White + | Gray => Gray + } + + type message = { + color: messageColor, + content: string, + } + + let messageSchema = S.object(s => { + color: s.field("color", S.enum([Primary, Secondary, Info, Danger, Success, White, Gray])), + content: s.field("content", S.string), + }) + + let client = Rest.client(Env.envioAppUrl ++ "/api") + + let route = Rest.route(() => { + method: Post, + path: "/hyperindex/init", + input: s => s.body(bodySchema), + responses: [s => s.field("messages", S.array(messageSchema))], + }) + + let getMessages = async (~config) => { + let envioVersion = Utils.EnvioPackage.json.version + let body = makeBody(~envioVersion, ~envioApiToken=Env.envioApiToken, ~config) + + switch await route->Rest.fetch(body, ~client) { + | exception exn => Error(exn->Obj.magic) + | messages => Ok(messages) + } + } +} + +type request<'ok, 'err> = Data('ok) | Loading | Err('err) + +let useMessages = (~config) => { + let (request, setRequest) = React.useState(_ => Loading) + React.useEffect0(() => { + InitApi.getMessages(~config) + ->Promise.thenResolve(res => + switch res { + | Ok(data) => setRequest(_ => Data(data)) + | Error(e) => + Logging.error({ + "msg": "Failed to load messages from envio server", + "err": e->Utils.prettifyExn, + }) + setRequest(_ => Err(e)) + } + ) + ->ignore + None + }) + request +} diff --git a/apps/hypersync-indexer/generated/src/ink/components/Messages.res b/apps/hypersync-indexer/generated/src/ink/components/Messages.res new file mode 100644 index 000000000..b7df3ff25 --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/components/Messages.res @@ -0,0 +1,41 @@ +open Belt +open Ink +module Message = { + @react.component + let make = (~message: CustomHooks.InitApi.message) => { + CustomHooks.InitApi.toTheme}> + {message.content->React.string} + + } +} + +module Notifications = { + @react.component + let make = (~children) => { + <> + + {"Notifications:"->React.string} + {children} + + } +} + +@react.component +let make = (~config) => { + let messages = CustomHooks.useMessages(~config) + <> + {switch messages { + | Data([]) | Loading => React.null //Don't show anything while loading or no messages + | Data(messages) => + + {messages + ->Array.mapWithIndex((i, message) => {Int.toString} message />}) + ->React.array} + + | Err(_) => + + + + }} + +} diff --git a/apps/hypersync-indexer/generated/src/ink/components/SyncETA.res b/apps/hypersync-indexer/generated/src/ink/components/SyncETA.res new file mode 100644 index 000000000..f3f83e09b --- /dev/null +++ b/apps/hypersync-indexer/generated/src/ink/components/SyncETA.res @@ -0,0 +1,198 @@ +open Ink +open Belt + +let isIndexerFullySynced = (chains: array) => { + chains->Array.reduce(true, (accum, current) => { + switch current.progress { + | Synced(_) => accum + | _ => false + } + }) +} + +let getTotalRemainingBlocks = (chains: array) => { + chains->Array.reduce(0, (accum, {progress, currentBlockHeight, latestFetchedBlockNumber, endBlock}) => { + let finalBlock = switch endBlock { + | Some(endBlock) => endBlock + | None => currentBlockHeight + } + switch progress { + | Syncing({latestProcessedBlock}) + | Synced({latestProcessedBlock}) => + finalBlock - latestProcessedBlock + accum + | SearchingForEvents => finalBlock - latestFetchedBlockNumber + accum + } + }) +} + +let getLatestTimeCaughtUpToHead = ( + chains: array, + indexerStartTime: Js.Date.t, +) => { + let latesttimestampCaughtUpToHeadOrEndblockFloat = chains->Array.reduce(0.0, (accum, current) => { + switch current.progress { + | Synced({timestampCaughtUpToHeadOrEndblock}) => + timestampCaughtUpToHeadOrEndblock->Js.Date.valueOf > accum + ? timestampCaughtUpToHeadOrEndblock->Js.Date.valueOf + : accum + | Syncing(_) + | SearchingForEvents => accum + } + }) + + DateFns.formatDistanceWithOptions( + indexerStartTime, + latesttimestampCaughtUpToHeadOrEndblockFloat->Js.Date.fromFloat, + {includeSeconds: true}, + ) +} + +let getTotalBlocksProcessed = (chains: array) => { + chains->Array.reduce(0, (accum, {progress, latestFetchedBlockNumber}) => { + switch progress { + | Syncing({latestProcessedBlock, firstEventBlockNumber}) + | Synced({latestProcessedBlock, firstEventBlockNumber}) => + latestProcessedBlock - firstEventBlockNumber + accum + | SearchingForEvents => latestFetchedBlockNumber + accum + } + }) +} + +let useShouldDisplayEta = (~chains: array) => { + let (shouldDisplayEta, setShouldDisplayEta) = React.useState(_ => false) + React.useEffect(() => { + //Only compute this while it is not displaying eta + if !shouldDisplayEta { + //Each chain should have fetched at least one batch + let (allChainsHaveFetchedABatch, totalNumBatchesFetched) = chains->Array.reduce((true, 0), ( + (allChainsHaveFetchedABatch, totalNumBatchesFetched), + chain, + ) => { + ( + allChainsHaveFetchedABatch && chain.numBatchesFetched >= 1, + totalNumBatchesFetched + chain.numBatchesFetched, + ) + }) + + //Min num fetched batches is num of chains + 2. All + // Chains should have fetched at least 1 batch. (They + // could then be blocked from fetching if they are past + //the max queue size on first batch) + // Only display once an additinal 2 batches have been fetched to allow + // eta to realistically stabalize + let numChains = chains->Array.length + let minTotalBatches = numChains + 2 + let hasMinNumBatches = totalNumBatchesFetched >= minTotalBatches + + let shouldDisplayEta = allChainsHaveFetchedABatch && hasMinNumBatches + + if shouldDisplayEta { + setShouldDisplayEta(_ => true) + } + } + + None + }, [chains]) + + shouldDisplayEta +} + +let useEta = (~chains, ~indexerStartTime) => { + let shouldDisplayEta = useShouldDisplayEta(~chains) + let (secondsToSub, setSecondsToSub) = React.useState(_ => 0.) + let (timeSinceStart, setTimeSinceStart) = React.useState(_ => 0.) + + React.useEffect2(() => { + setTimeSinceStart(_ => Js.Date.now() -. indexerStartTime->Js.Date.valueOf) + setSecondsToSub(_ => 0.) + + let intervalId = Js.Global.setInterval(() => { + setSecondsToSub(prev => prev +. 1.) + }, 1000) + + Some(() => Js.Global.clearInterval(intervalId)) + }, (chains, indexerStartTime)) + + //blocksProcessed/remainingBlocks = timeSoFar/eta + //eta = (timeSoFar/blocksProcessed) * remainingBlocks + + let blocksProcessed = getTotalBlocksProcessed(chains)->Int.toFloat + if shouldDisplayEta && blocksProcessed > 0. { + let nowDate = Js.Date.now() + let remainingBlocks = getTotalRemainingBlocks(chains)->Int.toFloat + let etaFloat = timeSinceStart /. blocksProcessed *. remainingBlocks + let millisToSub = secondsToSub *. 1000. + let etaFloat = Pervasives.max(etaFloat -. millisToSub, 0.0) //template this + let eta = (etaFloat +. nowDate)->Js.Date.fromFloat + let interval: DateFns.interval = {start: nowDate->Js.Date.fromFloat, end: eta} + let duration = DateFns.intervalToDuration(interval) + let formattedDuration = DateFns.formatDuration( + duration, + {format: ["hours", "minutes", "seconds"]}, + ) + let outputString = switch formattedDuration { + | "" => "less than 1 second" + | formattedDuration => formattedDuration + } + Some(outputString) + } else { + None + } +} + +module Syncing = { + @react.component + let make = (~etaStr) => { + + + {"Sync Time ETA: "->React.string} + + {etaStr->React.string} + {" ("->React.string} + + + + {" in progress"->React.string} + {")"->React.string} + + } +} + +module Synced = { + @react.component + let make = (~latestTimeCaughtUpToHeadStr) => { + + {"Time Synced: "->React.string} + {`${latestTimeCaughtUpToHeadStr}`->React.string} + {" ("->React.string} + {"synced"->React.string} + {")"->React.string} + + } +} + +module Calculating = { + @react.component + let make = () => { + + + + + {" Calculating ETA..."->React.string} + + } +} + +@react.component +let make = (~chains, ~indexerStartTime) => { + let optEta = useEta(~chains, ~indexerStartTime) + if isIndexerFullySynced(chains) { + let latestTimeCaughtUpToHeadStr = getLatestTimeCaughtUpToHead(chains, indexerStartTime) + //TODO add real time + } else { + switch optEta { + | Some(etaStr) => + | None => + } + } +} diff --git a/apps/hypersync-indexer/package.json b/apps/hypersync-indexer/package.json new file mode 100644 index 000000000..0ee592b5d --- /dev/null +++ b/apps/hypersync-indexer/package.json @@ -0,0 +1,30 @@ +{ + "name": "@anticapture/hypersync-indexer", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "envio dev", + "start": "envio start", + "codegen": "envio codegen", + "lint": "eslint .", + "lint:fix": "eslint . --fix", + "typecheck": "tsc --noEmit", + "clean": "rm -rf node_modules generated *.tsbuildinfo" + }, + "dependencies": { + "viem": "^2.37.11" + }, + "devDependencies": { + "@types/node": "^20.16.5", + "dotenv": "^16.5.0", + "envio": "^2.32.12", + "eslint": "^9", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.2.1", + "prettier": "^3.5.3", + "typescript": "^5.8.3" + }, + "engines": { + "node": ">=18.14" + } +} diff --git a/apps/indexer/schema.graphql b/apps/hypersync-indexer/schema.graphql similarity index 100% rename from apps/indexer/schema.graphql rename to apps/hypersync-indexer/schema.graphql diff --git a/apps/indexer/src/eventHandlers/ENSGovernor.ts b/apps/hypersync-indexer/src/eventHandlers/ENSGovernor.ts similarity index 100% rename from apps/indexer/src/eventHandlers/ENSGovernor.ts rename to apps/hypersync-indexer/src/eventHandlers/ENSGovernor.ts diff --git a/apps/indexer/src/eventHandlers/ENSToken.ts b/apps/hypersync-indexer/src/eventHandlers/ENSToken.ts similarity index 100% rename from apps/indexer/src/eventHandlers/ENSToken.ts rename to apps/hypersync-indexer/src/eventHandlers/ENSToken.ts diff --git a/apps/hypersync-indexer/src/eventHandlers/delegation.ts b/apps/hypersync-indexer/src/eventHandlers/delegation.ts new file mode 100644 index 000000000..142c1b215 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/delegation.ts @@ -0,0 +1,212 @@ +import type { handlerContext } from "../../generated/index.js"; +import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; +import type { Address, Hex } from "viem"; +import { getAddress, zeroAddress } from "viem"; + +import { + BurningAddresses, + CEXAddresses, + DEXAddresses, + LendingAddresses, +} from "../lib/constants.ts"; +import { DaoIdEnum } from "../lib/enums.ts"; + +import { + createAddressSet, + ensureAccountExists, + ensureAccountsExist, +} from "./shared.ts"; + +type DelegationAddressSets = { + cex: ReadonlySet
; + dex: ReadonlySet
; + lending: ReadonlySet
; + burning: ReadonlySet
; +}; + +export const delegateChanged = async ( + context: handlerContext, + daoId: DaoIdEnum, + args: { + delegator: Address; + delegate: Address; + tokenId: Address; + previousDelegate: Address; + txHash: Hex; + timestamp: bigint; + logIndex: number; + delegatorBalance?: bigint; + }, + addressSets?: DelegationAddressSets, +) => { + const { + delegator, + delegate, + tokenId, + txHash, + previousDelegate, + timestamp, + logIndex, + delegatorBalance: _delegatorBalance, + } = args; + + const normalizedDelegator = getAddress(delegator); + const normalizedDelegate = getAddress(delegate); + + await ensureAccountsExist(context, [delegator, delegate]); + + const delegatorBalanceId = `${normalizedDelegator}-${getAddress(tokenId)}`; + const storedBalance = _delegatorBalance + ? { balance: _delegatorBalance } + : await context.AccountBalance.get(delegatorBalanceId); + const delegatedValue = storedBalance?.balance ?? 0n; + + const { cex, dex, lending, burning } = addressSets ?? { + cex: createAddressSet(Object.values(CEXAddresses[daoId] || {})), + dex: createAddressSet(Object.values(DEXAddresses[daoId] || {})), + lending: createAddressSet(Object.values(LendingAddresses[daoId] || {})), + burning: createAddressSet(Object.values(BurningAddresses[daoId] || {})), + }; + + const isCex = cex.has(normalizedDelegator) || cex.has(normalizedDelegate); + const isDex = dex.has(normalizedDelegator) || dex.has(normalizedDelegate); + const isLending = + lending.has(normalizedDelegator) || lending.has(normalizedDelegate); + const isTotal = + burning.has(normalizedDelegator) || burning.has(normalizedDelegate); + + const delegationId = `${txHash}-${normalizedDelegator}-${normalizedDelegate}`; + const existingDelegation = await context.Delegation.get(delegationId); + context.Delegation.set({ + id: delegationId, + transactionHash: txHash, + daoId, + delegateAccountId: normalizedDelegate, + delegatorAccountId: normalizedDelegator, + delegatedValue: (existingDelegation?.delegatedValue ?? 0n) + delegatedValue, + previousDelegate: getAddress(previousDelegate), + timestamp, + logIndex, + isCex, + isDex, + isLending, + isTotal, + delegationType: undefined, + }); + + // Update delegator's balance record to point to new delegate + const existingBalance = await context.AccountBalance.get(delegatorBalanceId); + context.AccountBalance.set({ + id: delegatorBalanceId, + accountId: normalizedDelegator, + tokenId: getAddress(tokenId), + balance: existingBalance?.balance ?? 0n, + delegate: normalizedDelegate, + }); + + // Decrement previous delegate's count + if (previousDelegate !== zeroAddress) { + const prevPowerId = getAddress(previousDelegate); + const prevPower = await context.AccountPower.get(prevPowerId); + context.AccountPower.set({ + id: prevPowerId, + accountId: prevPowerId, + daoId, + votingPower: prevPower?.votingPower ?? 0n, + votesCount: prevPower?.votesCount ?? 0, + proposalsCount: prevPower?.proposalsCount ?? 0, + delegationsCount: Math.max(0, (prevPower?.delegationsCount ?? 0) - 1), + lastVoteTimestamp: prevPower?.lastVoteTimestamp ?? 0n, + }); + } + + // Increment new delegate's count + const delegatePowerId = normalizedDelegate; + const delegatePower = await context.AccountPower.get(delegatePowerId); + context.AccountPower.set({ + id: delegatePowerId, + accountId: normalizedDelegate, + daoId, + votingPower: delegatePower?.votingPower ?? 0n, + votesCount: delegatePower?.votesCount ?? 0, + proposalsCount: delegatePower?.proposalsCount ?? 0, + delegationsCount: (delegatePower?.delegationsCount ?? 0) + 1, + lastVoteTimestamp: delegatePower?.lastVoteTimestamp ?? 0n, + }); + + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, + txHash, + logIndex, + eventType: "DELEGATION" as EventType_t, + value: delegatedValue, + timestamp, + metadata: { + delegator: normalizedDelegator, + delegate: normalizedDelegate, + previousDelegate: getAddress(previousDelegate), + amount: delegatedValue.toString(), + }, + }); +}; + +export const delegatedVotesChanged = async ( + context: handlerContext, + daoId: DaoIdEnum, + args: { + delegate: Address; + txHash: Hex; + newBalance: bigint; + oldBalance: bigint; + timestamp: bigint; + logIndex: number; + }, +) => { + const { delegate, txHash, newBalance, oldBalance, timestamp, logIndex } = + args; + + const normalizedDelegate = getAddress(delegate); + + await ensureAccountExists(context, delegate); + + const diff = newBalance - oldBalance; + const deltaMod = diff > 0n ? diff : -diff; + + context.VotingPowerHistory.set({ + id: `${txHash}-${normalizedDelegate}-${logIndex}`, + daoId, + transactionHash: txHash, + accountId: normalizedDelegate, + votingPower: newBalance, + delta: diff, + deltaMod, + timestamp, + logIndex, + }); + + const existingPower = await context.AccountPower.get(normalizedDelegate); + context.AccountPower.set({ + id: normalizedDelegate, + accountId: normalizedDelegate, + daoId, + votingPower: newBalance, + votesCount: existingPower?.votesCount ?? 0, + proposalsCount: existingPower?.proposalsCount ?? 0, + delegationsCount: existingPower?.delegationsCount ?? 0, + lastVoteTimestamp: existingPower?.lastVoteTimestamp ?? 0n, + }); + + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, + txHash, + logIndex, + eventType: "DELEGATION_VOTES_CHANGED" as EventType_t, + value: deltaMod, + timestamp, + metadata: { + delta: diff.toString(), + deltaMod: deltaMod.toString(), + delegate: normalizedDelegate, + }, + }); +}; diff --git a/apps/hypersync-indexer/src/eventHandlers/index.ts b/apps/hypersync-indexer/src/eventHandlers/index.ts new file mode 100644 index 000000000..a5e8d7c14 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/index.ts @@ -0,0 +1,3 @@ +export * from "./transfer.ts"; +export * from "./delegation.ts"; +export * from "./voting.ts"; diff --git a/apps/hypersync-indexer/src/eventHandlers/metrics/circulating.ts b/apps/hypersync-indexer/src/eventHandlers/metrics/circulating.ts new file mode 100644 index 000000000..24c7b2479 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/metrics/circulating.ts @@ -0,0 +1,37 @@ +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../../generated/index.js"; + +import { storeDailyBucket } from "../shared.ts"; +import { MetricTypesEnum } from "../../lib/constants.ts"; + +export const updateCirculatingSupply = async ( + context: handlerContext, + daoId: string, + tokenAddress: Address, + timestamp: bigint, +) => { + const tokenId = getAddress(tokenAddress); + const token = await context.Token.get(tokenId); + if (!token) return false; + + const currentCirculatingSupply = token.circulatingSupply; + const newCirculatingSupply = + token.totalSupply - token.treasury - token.nonCirculatingSupply; + + if (currentCirculatingSupply === newCirculatingSupply) return false; + + context.Token.set({ ...token, circulatingSupply: newCirculatingSupply }); + + await storeDailyBucket( + context, + MetricTypesEnum.CIRCULATING_SUPPLY, + currentCirculatingSupply, + newCirculatingSupply, + daoId, + timestamp, + tokenAddress, + ); + + return true; +}; diff --git a/apps/hypersync-indexer/src/eventHandlers/metrics/delegated.ts b/apps/hypersync-indexer/src/eventHandlers/metrics/delegated.ts new file mode 100644 index 000000000..675d39025 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/metrics/delegated.ts @@ -0,0 +1,34 @@ +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../../generated/index.js"; + +import { DaoIdEnum } from "../../lib/enums.ts"; +import { MetricTypesEnum } from "../../lib/constants.ts"; +import { storeDailyBucket } from "../shared.ts"; + +export const updateDelegatedSupply = async ( + context: handlerContext, + daoId: DaoIdEnum, + tokenId: Address, + amount: bigint, + timestamp: bigint, +) => { + const normalizedId = getAddress(tokenId); + const token = await context.Token.get(normalizedId); + if (!token) return; + + const currentDelegatedSupply = token.delegatedSupply; + const newDelegatedSupply = currentDelegatedSupply + amount; + + context.Token.set({ ...token, delegatedSupply: newDelegatedSupply }); + + await storeDailyBucket( + context, + MetricTypesEnum.DELEGATED_SUPPLY, + currentDelegatedSupply, + newDelegatedSupply, + daoId, + timestamp, + tokenId, + ); +}; diff --git a/apps/hypersync-indexer/src/eventHandlers/metrics/index.ts b/apps/hypersync-indexer/src/eventHandlers/metrics/index.ts new file mode 100644 index 000000000..a4126ee00 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/metrics/index.ts @@ -0,0 +1,4 @@ +export * from "./delegated.ts"; +export * from "./total.ts"; +export * from "./supply.ts"; +export * from "./circulating.ts"; diff --git a/apps/hypersync-indexer/src/eventHandlers/metrics/supply.ts b/apps/hypersync-indexer/src/eventHandlers/metrics/supply.ts new file mode 100644 index 000000000..ecac65839 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/metrics/supply.ts @@ -0,0 +1,59 @@ +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../../generated/index.js"; + +import { + AddressCollection, + storeDailyBucket, + toAddressSet, +} from "../shared.ts"; +import { MetricTypesEnum } from "../../lib/constants.ts"; + +export const updateSupplyMetric = async ( + context: handlerContext, + supplyField: + | "lendingSupply" + | "cexSupply" + | "dexSupply" + | "treasury" + | "nonCirculatingSupply", + addressList: AddressCollection, + metricType: MetricTypesEnum, + from: Address, + to: Address, + value: bigint, + daoId: string, + tokenAddress: Address, + timestamp: bigint, +) => { + const normalizedAddressList = toAddressSet(addressList); + const isToRelevant = normalizedAddressList.has(getAddress(to)); + const isFromRelevant = normalizedAddressList.has(getAddress(from)); + + if ((isToRelevant || isFromRelevant) && !(isToRelevant && isFromRelevant)) { + const tokenId = getAddress(tokenAddress); + const token = await context.Token.get(tokenId); + if (!token) return false; + + const currentSupply = token[supplyField]; + const newSupply = isToRelevant + ? currentSupply + value + : currentSupply - value; + + context.Token.set({ ...token, [supplyField]: newSupply }); + + await storeDailyBucket( + context, + metricType, + currentSupply, + newSupply, + daoId, + timestamp, + tokenAddress, + ); + + return true; + } + + return false; +}; diff --git a/apps/hypersync-indexer/src/eventHandlers/metrics/total.ts b/apps/hypersync-indexer/src/eventHandlers/metrics/total.ts new file mode 100644 index 000000000..d80f01a61 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/metrics/total.ts @@ -0,0 +1,58 @@ +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../../generated/index.js"; + +import { DaoIdEnum } from "../../lib/enums.ts"; +import { MetricTypesEnum } from "../../lib/constants.ts"; +import { + AddressCollection, + storeDailyBucket, + toAddressSet, +} from "../shared.ts"; + +export const updateTotalSupply = async ( + context: handlerContext, + addressList: AddressCollection, + metricType: MetricTypesEnum, + from: Address, + to: Address, + value: bigint, + daoId: DaoIdEnum, + tokenAddress: Address, + timestamp: bigint, +) => { + const normalizedAddressList = toAddressSet(addressList); + const isToBurningAddress = normalizedAddressList.has(getAddress(to)); + const isFromBurningAddress = normalizedAddressList.has(getAddress(from)); + const isTotalSupplyTransaction = + (isToBurningAddress || isFromBurningAddress) && + !(isToBurningAddress && isFromBurningAddress); + + if (isTotalSupplyTransaction) { + const isBurningTokens = normalizedAddressList.has(getAddress(to)); + const tokenId = getAddress(tokenAddress); + const token = await context.Token.get(tokenId); + if (!token) return false; + + const currentTotalSupply = token.totalSupply; + const newTotalSupply = isBurningTokens + ? currentTotalSupply - value + : currentTotalSupply + value; + + context.Token.set({ ...token, totalSupply: newTotalSupply }); + + await storeDailyBucket( + context, + metricType, + currentTotalSupply, + newTotalSupply, + daoId, + timestamp, + tokenAddress, + ); + + return true; + } + + return false; +}; diff --git a/apps/hypersync-indexer/src/eventHandlers/shared.ts b/apps/hypersync-indexer/src/eventHandlers/shared.ts new file mode 100644 index 000000000..5771e329b --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/shared.ts @@ -0,0 +1,166 @@ +import type { Address } from "viem"; +import { getAddress } from "viem"; +import type { handlerContext } from "../../generated/index.js"; +import type { MetricType_t } from "../../generated/src/db/Enums.gen.ts"; + +import { MetricTypesEnum } from "../lib/constants.ts"; +import { delta, max, min } from "../lib/utils.ts"; +import { truncateTimestampToMidnight } from "../lib/date-helpers.ts"; + +const METRIC_TYPE_MAP: Record = { + [MetricTypesEnum.TOTAL_SUPPLY]: "total", + [MetricTypesEnum.DELEGATED_SUPPLY]: "delegated", + [MetricTypesEnum.CEX_SUPPLY]: "cex", + [MetricTypesEnum.DEX_SUPPLY]: "dex", + [MetricTypesEnum.LENDING_SUPPLY]: "lending", + [MetricTypesEnum.CIRCULATING_SUPPLY]: "circulating", + [MetricTypesEnum.TREASURY]: "treasury", + [MetricTypesEnum.NON_CIRCULATING_SUPPLY]: "non_circulating", +}; + +export type AddressCollection = readonly Address[] | ReadonlySet
; + +const normalizeAddressCollection = ( + addresses: AddressCollection, +): Address[] => { + if (Array.isArray(addresses)) { + return [...new Set(addresses.map((address) => getAddress(address)))]; + } + + return [...(addresses as ReadonlySet
)]; +}; + +export const createAddressSet = ( + addresses: readonly Address[], +): ReadonlySet
=> + new Set(addresses.map((address) => getAddress(address))); + +export const toAddressSet = ( + addresses: AddressCollection, +): ReadonlySet
=> { + if (Array.isArray(addresses)) { + return new Set(addresses.map((address) => getAddress(address))); + } + + return addresses as ReadonlySet
; +}; + +export const ensureAccountExists = async ( + context: handlerContext, + address: Address, +): Promise => { + await context.Account.getOrCreate({ id: getAddress(address) }); +}; + +/** + * Helper function to ensure multiple accounts exist + */ +export const ensureAccountsExist = async ( + context: handlerContext, + addresses: Address[], +): Promise => { + const normalized = normalizeAddressCollection(addresses); + if (normalized.length === 0) return; + await Promise.all( + normalized.map((id) => context.Account.getOrCreate({ id })), + ); +}; + +export const storeDailyBucket = async ( + context: handlerContext, + metricType: MetricTypesEnum, + currentValue: bigint, + newValue: bigint, + daoId: string, + timestamp: bigint, + tokenAddress: Address, +) => { + const vol = delta(newValue, currentValue); + const date = BigInt(truncateTimestampToMidnight(Number(timestamp))); + const tokenId = getAddress(tokenAddress); + const id = `${date}-${tokenId}-${metricType}`; + + const existing = await context.DaoMetricsDayBucket.get(id); + if (existing) { + context.DaoMetricsDayBucket.set({ + ...existing, + average: + (existing.average * BigInt(existing.count) + newValue) / + BigInt(existing.count + 1), + high: max(newValue, existing.high), + low: min(newValue, existing.low), + closeValue: newValue, + volume: existing.volume + vol, + count: existing.count + 1, + lastUpdate: timestamp, + }); + } else { + context.DaoMetricsDayBucket.set({ + id, + date, + tokenId, + metricType: METRIC_TYPE_MAP[metricType], + daoId, + average: newValue, + openValue: newValue, + high: newValue, + low: newValue, + closeValue: newValue, + volume: vol, + count: 1, + lastUpdate: timestamp, + }); + } +}; + +export const handleTransaction = async ( + context: handlerContext, + transactionHash: string, + from: Address, + to: Address, + timestamp: bigint, + addresses: AddressCollection, + { + cex = [], + dex = [], + lending = [], + burning = [], + }: { + cex?: AddressCollection; + dex?: AddressCollection; + lending?: AddressCollection; + burning?: AddressCollection; + } = {}, +) => { + const normalizedAddresses = normalizeAddressCollection(addresses); + const normalizedCex = toAddressSet(cex); + const normalizedDex = toAddressSet(dex); + const normalizedLending = toAddressSet(lending); + const normalizedBurning = toAddressSet(burning); + + const isCex = normalizedAddresses.some((addr) => normalizedCex.has(addr)); + const isDex = normalizedAddresses.some((addr) => normalizedDex.has(addr)); + const isLending = normalizedAddresses.some((addr) => + normalizedLending.has(addr), + ); + const isTotal = normalizedAddresses.some((addr) => + normalizedBurning.has(addr), + ); + + if (!(isCex || isDex || isLending || isTotal)) { + return; + } + + const existing = await context.Transaction.get(transactionHash); + context.Transaction.set({ + id: transactionHash, + transactionHash, + fromAddress: getAddress(from), + toAddress: getAddress(to), + timestamp, + isCex: (existing?.isCex ?? false) || isCex, + isDex: (existing?.isDex ?? false) || isDex, + isLending: (existing?.isLending ?? false) || isLending, + isTotal: (existing?.isTotal ?? false) || isTotal, + }); +}; diff --git a/apps/hypersync-indexer/src/eventHandlers/transfer.ts b/apps/hypersync-indexer/src/eventHandlers/transfer.ts new file mode 100644 index 000000000..f25566823 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/transfer.ts @@ -0,0 +1,149 @@ +import type { handlerContext } from "../../generated/index.js"; +import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; +import type { Address, Hex } from "viem"; +import { getAddress, zeroAddress } from "viem"; + +import { DaoIdEnum } from "../lib/enums.ts"; + +import { + AddressCollection, + ensureAccountsExist, + toAddressSet, +} from "./shared.ts"; + +export const tokenTransfer = async ( + context: handlerContext, + daoId: DaoIdEnum, + args: { + from: Address; + to: Address; + token: Address; + transactionHash: Hex; + value: bigint; + timestamp: bigint; + logIndex: number; + }, + { + cex = [], + dex = [], + lending = [], + burning = [], + }: { + cex?: AddressCollection; + dex?: AddressCollection; + lending?: AddressCollection; + burning?: AddressCollection; + }, +) => { + const { + from, + to, + token: tokenId, + transactionHash, + value, + timestamp, + logIndex, + } = args; + + const normalizedFrom = getAddress(from); + const normalizedTo = getAddress(to); + const normalizedTokenId = getAddress(tokenId); + + await ensureAccountsExist(context, [from, to]); + + // Upsert receiver balance and track current balance for history + const receiverBalanceId = `${normalizedTo}-${normalizedTokenId}`; + const existingReceiverBalance = + await context.AccountBalance.get(receiverBalanceId); + const currentReceiverBalance = existingReceiverBalance + ? existingReceiverBalance.balance + value + : value; + context.AccountBalance.set({ + id: receiverBalanceId, + accountId: normalizedTo, + tokenId: normalizedTokenId, + balance: currentReceiverBalance, + delegate: existingReceiverBalance?.delegate ?? zeroAddress, + }); + + context.BalanceHistory.set({ + id: `${transactionHash}-${normalizedTo}-${logIndex}`, + daoId, + transactionHash, + accountId: normalizedTo, + balance: currentReceiverBalance, + delta: value, + deltaMod: value > 0n ? value : -value, + timestamp, + logIndex, + }); + + if (from !== zeroAddress) { + const senderBalanceId = `${normalizedFrom}-${normalizedTokenId}`; + const existingSenderBalance = + await context.AccountBalance.get(senderBalanceId); + const currentSenderBalance = existingSenderBalance + ? existingSenderBalance.balance - value + : -value; + context.AccountBalance.set({ + id: senderBalanceId, + accountId: normalizedFrom, + tokenId: normalizedTokenId, + balance: currentSenderBalance, + delegate: existingSenderBalance?.delegate ?? zeroAddress, + }); + + context.BalanceHistory.set({ + id: `${transactionHash}-${normalizedFrom}-${logIndex}`, + daoId, + transactionHash, + accountId: normalizedFrom, + balance: currentSenderBalance, + delta: -value, + deltaMod: value > 0n ? value : -value, + timestamp, + logIndex, + }); + } + + const normalizedCex = toAddressSet(cex); + const normalizedDex = toAddressSet(dex); + const normalizedLending = toAddressSet(lending); + const normalizedBurning = toAddressSet(burning); + + const transferId = `${transactionHash}-${normalizedFrom}-${normalizedTo}`; + const existingTransfer = await context.Transfer.get(transferId); + context.Transfer.set({ + id: transferId, + transactionHash, + daoId, + tokenId: normalizedTokenId, + amount: (existingTransfer?.amount ?? 0n) + value, + fromAccountId: normalizedFrom, + toAccountId: normalizedTo, + timestamp, + logIndex, + isCex: normalizedCex.has(normalizedFrom) || normalizedCex.has(normalizedTo), + isDex: normalizedDex.has(normalizedFrom) || normalizedDex.has(normalizedTo), + isLending: + normalizedLending.has(normalizedFrom) || + normalizedLending.has(normalizedTo), + isTotal: + normalizedBurning.has(normalizedFrom) || + normalizedBurning.has(normalizedTo), + }); + + context.FeedEvent.set({ + id: `${transactionHash}-${logIndex}`, + txHash: transactionHash, + logIndex, + eventType: "TRANSFER" as EventType_t, + value, + timestamp, + metadata: { + from: normalizedFrom, + to: normalizedTo, + amount: value.toString(), + }, + }); +}; diff --git a/apps/hypersync-indexer/src/eventHandlers/voting.ts b/apps/hypersync-indexer/src/eventHandlers/voting.ts new file mode 100644 index 000000000..d3b82ad26 --- /dev/null +++ b/apps/hypersync-indexer/src/eventHandlers/voting.ts @@ -0,0 +1,261 @@ +import type { handlerContext } from "../../generated/index.js"; +import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; +import type { Address, Hex } from "viem"; +import { getAddress } from "viem"; + +import { ProposalStatus } from "../lib/constants.ts"; + +import { ensureAccountExists } from "./shared.ts"; + +export const voteCast = async ( + context: handlerContext, + daoId: string, + args: { + proposalId: string; + voter: Address; + reason: string; + support: number; + timestamp: bigint; + txHash: Hex; + votingPower: bigint; + logIndex: number; + }, +) => { + const { + voter, + timestamp, + txHash, + proposalId, + support, + votingPower, + reason, + logIndex, + } = args; + + await ensureAccountExists(context, voter); + + const normalizedVoter = getAddress(voter); + const powerId = normalizedVoter; + const existingPower = await context.AccountPower.get(powerId); + context.AccountPower.set({ + id: powerId, + accountId: normalizedVoter, + daoId, + votingPower: existingPower?.votingPower ?? 0n, + votesCount: (existingPower?.votesCount ?? 0) + 1, + proposalsCount: existingPower?.proposalsCount ?? 0, + delegationsCount: existingPower?.delegationsCount ?? 0, + lastVoteTimestamp: timestamp, + }); + + context.VoteOnchain.set({ + id: `${normalizedVoter}-${proposalId}`, + txHash, + daoId, + proposalId, + voterAccountId: normalizedVoter, + support: support.toString(), + votingPower, + reason, + timestamp, + }); + + // Update proposal vote totals + const proposal = await context.ProposalOnchain.get(proposalId); + if (proposal) { + context.ProposalOnchain.set({ + ...proposal, + againstVotes: proposal.againstVotes + (support === 0 ? votingPower : 0n), + forVotes: proposal.forVotes + (support === 1 ? votingPower : 0n), + abstainVotes: proposal.abstainVotes + (support === 2 ? votingPower : 0n), + }); + } + + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, + txHash, + logIndex, + eventType: "VOTE" as EventType_t, + value: votingPower, + timestamp, + metadata: { + voter: normalizedVoter, + reason, + support, + votingPower: votingPower.toString(), + proposalId, + title: proposal?.title ?? null, + }, + }); +}; + +const MAX_TITLE_LENGTH = 200; + +function parseProposalTitle(description: string): string { + const normalized = description.replace(/\\n/g, "\n"); + const lines = normalized.split("\n"); + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed) continue; + if (/^# /.test(trimmed)) { + return trimmed.replace(/^# +/, ""); + } + break; + } + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed || /^#{1,6}\s/.test(trimmed)) continue; + return trimmed.length > MAX_TITLE_LENGTH + ? trimmed.substring(0, MAX_TITLE_LENGTH) + "..." + : trimmed; + } + + return ""; +} + +export const proposalCreated = async ( + context: handlerContext, + daoId: string, + blockTime: number, + args: { + proposalId: string; + txHash: Hex; + proposer: Address; + targets: Address[]; + values: bigint[]; + signatures: string[]; + calldatas: Hex[]; + startBlock: string; + endBlock: string; + description: string; + blockNumber: bigint; + timestamp: bigint; + proposalType?: number; + logIndex: number; + }, +) => { + const { + proposer, + proposalId, + txHash, + targets, + values, + signatures, + calldatas, + startBlock, + endBlock, + description, + blockNumber, + timestamp, + logIndex, + } = args; + + await ensureAccountExists(context, proposer); + + const title = parseProposalTitle(description); + const blockDelta = parseInt(endBlock) - Number(blockNumber); + + context.ProposalOnchain.set({ + id: proposalId, + txHash, + daoId, + proposerAccountId: getAddress(proposer), + targets: targets.map((a) => getAddress(a)), + values: values.map((v) => v.toString()), + signatures, + calldatas, + startBlock: parseInt(startBlock), + endBlock: parseInt(endBlock), + title, + description, + timestamp, + logIndex, + status: ProposalStatus.PENDING, + endTimestamp: timestamp + BigInt(blockDelta * blockTime), + proposalType: args.proposalType, + forVotes: 0n, + againstVotes: 0n, + abstainVotes: 0n, + }); + + const powerId = getAddress(proposer); + const existingPower = await context.AccountPower.get(powerId); + const proposerVotingPower = existingPower?.votingPower ?? 0n; + context.AccountPower.set({ + id: powerId, + accountId: powerId, + daoId, + votingPower: proposerVotingPower, + votesCount: existingPower?.votesCount ?? 0, + proposalsCount: (existingPower?.proposalsCount ?? 0) + 1, + delegationsCount: existingPower?.delegationsCount ?? 0, + lastVoteTimestamp: existingPower?.lastVoteTimestamp ?? 0n, + }); + + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, + txHash, + logIndex, + eventType: "PROPOSAL" as EventType_t, + value: 0n, + timestamp, + metadata: { + id: proposalId, + proposer: getAddress(proposer), + votingPower: proposerVotingPower.toString(), + title, + }, + }); +}; + +export const updateProposalStatus = async ( + context: handlerContext, + proposalId: string, + status: string, +) => { + const proposal = await context.ProposalOnchain.get(proposalId); + if (proposal) { + context.ProposalOnchain.set({ ...proposal, status }); + } +}; + +export const proposalExtended = async ( + context: handlerContext, + proposalId: string, + blockTime: number, + extendedDeadline: bigint, + txHash: Hex, + logIndex: number, + timestamp: bigint, +) => { + const proposal = await context.ProposalOnchain.get(proposalId); + if (!proposal) return; + + const endTimestamp = + proposal.endTimestamp + + BigInt((Number(extendedDeadline) - proposal.endBlock) * blockTime); + + context.ProposalOnchain.set({ + ...proposal, + endBlock: Number(extendedDeadline), + endTimestamp, + }); + + context.FeedEvent.set({ + id: `${txHash}-${logIndex}`, + txHash, + logIndex, + eventType: "PROPOSAL_EXTENDED" as EventType_t, + value: 0n, + timestamp, + metadata: { + id: proposalId, + title: proposal.title, + endBlock: Number(extendedDeadline), + endTimestamp: endTimestamp.toString(), + proposer: getAddress(proposal.proposerAccountId), + }, + }); +}; diff --git a/apps/hypersync-indexer/src/lib/constants.ts b/apps/hypersync-indexer/src/lib/constants.ts new file mode 100644 index 000000000..a01a4773a --- /dev/null +++ b/apps/hypersync-indexer/src/lib/constants.ts @@ -0,0 +1,908 @@ +import type { Address } from "viem"; +import { zeroAddress } from "viem"; + +import { DaoIdEnum } from "./enums.ts"; + +export const CONTRACT_ADDRESSES = { + [DaoIdEnum.UNI]: { + blockTime: 12, + // https://etherscan.io/address/0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984 + token: { + address: "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", + decimals: 18, + startBlock: 10861674, + }, + // https://etherscan.io/address/0x408ED6354d4973f66138C91495F2f2FCbd8724C3 + governor: { + address: "0x408ED6354d4973f66138C91495F2f2FCbd8724C3", + startBlock: 13059157, + }, + }, + [DaoIdEnum.ENS]: { + blockTime: 12, + // https://etherscan.io/address/0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72 + token: { + address: "0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72", + decimals: 18, + startBlock: 9380410, + }, + // https://etherscan.io/address/0x323a76393544d5ecca80cd6ef2a560c6a395b7e3 + governor: { + address: "0x323a76393544d5ecca80cd6ef2a560c6a395b7e3", + startBlock: 13533772, + }, + }, + [DaoIdEnum.ARB]: { + blockTime: 0.25, + // https://arbiscan.io/address/0x912CE59144191C1204E64559FE8253a0e49E6548 + token: { + address: "0x912CE59144191C1204E64559FE8253a0e49E6548", + decimals: 18, + startBlock: 70398200, + }, + }, + [DaoIdEnum.OP]: { + blockTime: 2, + optimisticProposalType: 2, + // https://optimistic.etherscan.io/token/0x4200000000000000000000000000000000000042 + token: { + address: "0x4200000000000000000000000000000000000042", + decimals: 18, + startBlock: 6490467, + }, + // https://optimistic.etherscan.io/address/0xcDF27F107725988f2261Ce2256bDfCdE8B382B10 + governor: { + address: "0xcDF27F107725988f2261Ce2256bDfCdE8B382B10", + startBlock: 71801427, + }, + }, + [DaoIdEnum.TEST]: { + blockTime: 12, + token: { + address: "0x244dE6b06E7087110b94Cde88A42d9aBA17efa52", + decimals: 18, + startBlock: 22635098, + }, + governor: { + address: "0x7c28FC9709650D49c8d0aED2f6ece6b191F192a9", + startBlock: 22635098, + }, + }, + [DaoIdEnum.GTC]: { + blockTime: 12, + // https://etherscan.io/address/0xDe30da39c46104798bB5aA3fe8B9e0e1F348163F + token: { + address: "0xDe30da39c46104798bB5aA3fe8B9e0e1F348163F", + decimals: 18, + startBlock: 12422079, + }, + // https://etherscan.io/address/0x9D4C63565D5618310271bF3F3c01b2954C1D1639 + governor: { + address: "0x9D4C63565D5618310271bF3F3c01b2954C1D1639", + startBlock: 17813942, + }, + // https://etherscan.io/address/0xDbD27635A534A3d3169Ef0498beB56Fb9c937489 + governorAlpha: { + address: "0xDbD27635A534A3d3169Ef0498beB56Fb9c937489", + startBlock: 12497481, + }, + }, + [DaoIdEnum.NOUNS]: { + blockTime: 12, + token: { + // https://etherscan.io/token/0x9C8fF314C9Bc7F6e59A9d9225Fb22946427eDC03 + address: "0x9C8fF314C9Bc7F6e59A9d9225Fb22946427eDC03", + decimals: 0, + startBlock: 12985438, + }, + governor: { + // https://etherscan.io/address/0x6f3e6272a167e8accb32072d08e0957f9c79223d + address: "0x6f3e6272a167e8accb32072d08e0957f9c79223d", + startBlock: 12985453, + }, + auction: { + // https://etherscan.io/address/0x830BD73E4184ceF73443C15111a1DF14e495C706 + address: "0x830BD73E4184ceF73443C15111a1DF14e495C706", + startBlock: 12985451, + }, + }, + [DaoIdEnum.SCR]: { + blockTime: 1.5, + // https://scrollscan.com/address/0xd29687c813D741E2F938F4aC377128810E217b1b + token: { + address: "0xd29687c813D741E2F938F4aC377128810E217b1b", + decimals: 18, + startBlock: 8949006, + }, + // https://scrollscan.com/address/0x2f3f2054776bd3c2fc30d750734a8f539bb214f0 + governor: { + address: "0x2f3f2054776bd3c2fc30d750734a8f539bb214f0", + startBlock: 8963441, + }, + }, + [DaoIdEnum.COMP]: { + blockTime: 12, + // https://etherscan.io/address/0xc00e94Cb662C3520282E6f5717214004A7f26888 + token: { + address: "0xc00e94Cb662C3520282E6f5717214004A7f26888", + decimals: 18, + startBlock: 9601359, + }, + // https://etherscan.io/address/0x309a862bbC1A00e45506cB8A802D1ff10004c8C0 + governor: { + address: "0x309a862bbC1A00e45506cB8A802D1ff10004c8C0", + startBlock: 21688680, + }, + }, + [DaoIdEnum.OBOL]: { + blockTime: 12, + // https://etherscan.io/address/0x0B010000b7624eb9B3DfBC279673C76E9D29D5F7 + // Token created: Sep-19-2022 11:12:47 PM UTC + token: { + address: "0x0B010000b7624eb9B3DfBC279673C76E9D29D5F7", + decimals: 18, + startBlock: 15570746, + }, + // https://etherscan.io/address/0xcB1622185A0c62A80494bEde05Ba95ef29Fbf85c + // Governor created: Feb-19-2025 10:34:47 PM UTC + governor: { + address: "0xcB1622185A0c62A80494bEde05Ba95ef29Fbf85c", + startBlock: 21883431, + }, + }, + [DaoIdEnum.ZK]: { + blockTime: 1, + // https://explorer.zksync.io/address/0x5A7d6b2F92C77FAD6CCaBd7EE0624E64907Eaf3E + token: { + address: "0x5A7d6b2F92C77FAD6CCaBd7EE0624E64907Eaf3E", + decimals: 18, + startBlock: 34572100, + }, + // https://explorer.zksync.io/address/0xb83FF6501214ddF40C91C9565d095400f3F45746 + governor: { + address: "0xb83FF6501214ddF40C91C9565d095400f3F45746", + startBlock: 55519658, + }, + }, + [DaoIdEnum.SHU]: { + blockTime: 12, + tokenType: "ERC20", + // https://etherscan.io/address/0xe485E2f1bab389C08721B291f6b59780feC83Fd7 + token: { + address: "0xe485E2f1bab389C08721B291f6b59780feC83Fd7", + decimals: 18, + startBlock: 19021394, + }, + // https://etherscan.io/address/0xAA6BfA174d2f803b517026E93DBBEc1eBa26258e + azorius: { + address: "0xAA6BfA174d2f803b517026E93DBBEc1eBa26258e", + startBlock: 19021698, + }, + // https://etherscan.io/address/0x4b29d8B250B8b442ECfCd3a4e3D91933d2db720F + linearVotingStrategy: { + address: "0x4b29d8B250B8b442ECfCd3a4e3D91933d2db720F", + startBlock: 19021698, + }, + }, + [DaoIdEnum.FLUID]: { + blockTime: 12, + // https://etherscan.io/address/0x6f40d4A6237C257fff2dB00FA0510DeEECd303eb + token: { + address: "0x6f40d4A6237C257fff2dB00FA0510DeEECd303eb", + decimals: 18, + startBlock: 12183236, + }, + // https://etherscan.io/address/0x0204Cd037B2ec03605CFdFe482D8e257C765fA1B + governor: { + address: "0x0204Cd037B2ec03605CFdFe482D8e257C765fA1B", + startBlock: 12183245, + }, + }, + [DaoIdEnum.LIL_NOUNS]: { + blockTime: 12, + token: { + // https://etherscan.io/address/0x4b10701Bfd7BFEdc47d50562b76b436fbB5BdB3B + address: "0x4b10701Bfd7BFEdc47d50562b76b436fbB5BdB3B", + decimals: 0, + startBlock: 14736710, + }, + governor: { + // https://etherscan.io/address/0x5d2C31ce16924C2a71D317e5BbFd5ce387854039 + address: "0x5d2C31ce16924C2a71D317e5BbFd5ce387854039", + startBlock: 14736719, + }, + }, + [DaoIdEnum.AAVE]: { + blockTime: 1, + token: { + decimals: 18, + address: zeroAddress, + }, + aave: { + decimals: 18, + address: "0x7Fc66500c84A76Ad7e9c93437bFc5Ac33E2DDaE9", + }, + stkAAVE: { + decimals: 18, + address: "0x4da27a545c0c5B758a6BA100e3a049001de870f5", + }, + aAAVE: { + decimals: 18, + address: "0xA700b4eB416Be35b2911fd5Dee80678ff64fF6C9", + }, + }, +} as const; + +export const TreasuryAddresses: Record> = { + [DaoIdEnum.UNI]: { + timelock: "0x1a9C8182C09F50C8318d769245beA52c32BE35BC", + treasuryVester1: "0x4750c43867EF5F89869132ecCF19B9b6C4286E1a", + treasuryVester2: "0xe3953D9d317B834592aB58AB2c7A6aD22b54075D", + treasuryVester3: "0x4b4e140D1f131fdaD6fb59C13AF796fD194e4135", + treasuryVester4: "0x3D30B1aB88D487B0F3061F40De76845Bec3F1e94", + }, + [DaoIdEnum.ENS]: { + timelock: "0xFe89cc7aBB2C4183683ab71653C4cdc9B02D44b7", + endaoment: "0x4F2083f5fBede34C2714aFfb3105539775f7FE64", + oldEthRegistrarController: "0x283Af0B28c62C092C9727F1Ee09c02CA627EB7F5", + ethRegistrarController: "0x253553366Da8546fC250F225fe3d25d0C782303b", + }, + [DaoIdEnum.ARB]: { + // https://docs.arbitrum.foundation/deployment-addresses + "DAO Treasury": "0xF3FC178157fb3c87548bAA86F9d24BA38E649B58", + "L2 Treasury Timelock": "0xbFc1FECa8B09A5c5D3EFfE7429eBE24b9c09EF58", + "L2 Core Timelock": "0x34d45e99f7D8c45ed05B5cA72D54bbD1fb3F98f0", + "Foundation Vesting Wallet": "0x15533b77981cDa0F85c4F9a485237DF4285D6844", + }, + [DaoIdEnum.AAVE]: { + // https://github.com/bgd-labs/aave-address-book + Collector: "0x464C71f6c2F760DdA6093dCB91C24c39e5d6e18c", + "Ecosystem Reserve": "0x25F2226B597E8F9514B3F68F00f494cF4f286491", + }, + [DaoIdEnum.OP]: { + // https://gov.optimism.io/t/where-are-the-optimisms-main-treasury-addresses/8880 + "Unallocated Treasury": "0x2A82Ae142b2e62Cb7D10b55E323ACB1Cab663a26", + "Foundation Budget": "0x2501c477D0A35545a387Aa4A3EEe4292A9a8B3F0", + "Foundation Grants": "0x19793c7824Be70ec58BB673CA42D2779d12581BE", + "Foundation Locked Grants": "0xE4553b743E74dA3424Ac51f8C1E586fd43aE226F", + }, + [DaoIdEnum.NOUNS]: { + timelock: "0xb1a32fc9f9d8b2cf86c068cae13108809547ef71", + auction: "0x830BD73E4184ceF73443C15111a1DF14e495C706", + }, + [DaoIdEnum.LIL_NOUNS]: { + timelock: "0xd5f279ff9EB21c6D40C8f345a66f2751C4eeA1fB", + }, + [DaoIdEnum.TEST]: {}, + [DaoIdEnum.GTC]: { + "Gitcoin Timelock": "0x57a8865cfB1eCEf7253c27da6B4BC3dAEE5Be518", + "Gitcoin CSDO": "0x931896A8A9313F622a2AFCA76d1471B97955e551", + "Gitcoin Fraud Detection & Defense": + "0xD4567069C5a1c1fc8261d8Ff5C0B1d98f069Cf47", + "Gitcoin Grants Matching Pool": + "0xde21F729137C5Af1b01d73aF1dC21eFfa2B8a0d6", + "Gitcoin Merch, Memes and Marketing": + "0xC23DA3Ca9300571B9CF43298228353cbb3E1b4c0", + "Gitcoin Timelock Transfer 1": "0x6EEdE31a2A15340342B4BCb3039447d457aC7C4b", + "Gitcoin Timelock Transfer 2": "0xeD95D629c4Db80060C59432e81254D256AEc97E2", + "Vesting Address GTC 1": "0x2AA5d15Eb36E5960d056e8FeA6E7BB3e2a06A351", + "Staking contract GTC": "0x0E3efD5BE54CC0f4C64e0D186b0af4b7F2A0e95F", + "OKX Ventures": "0xe527BbDE3654E9ba824f9B72DFF495eEe60fD366", + "Protocol Labs 1": "0x154855f5522f6B04ce654175368F428852DCd55D", + "Matt Solomon": "0x7aD3d9819B06E800F8A65f3440D599A23D6A0BDf", + "Arbitrum Bridge": "0xa3A7B6F88361F48403514059F1F16C8E78d60EeC", + "Optimism Bridge": "0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1", + "Radicle Timelock": "0x8dA8f82d2BbDd896822de723F55D6EdF416130ba", + "Vesting Address GTC 3": "0x2CDE9919e81b20B4B33DD562a48a84b54C48F00C", + "deltajuliet.eth 1": "0x5b1ddBEC956Ed39e1aC92AE3c3D99295ddD59865", + "deltajuliet.eth 2": "0x407466C56B8488c4d99558633Ff1AC5D84400B46", + "deltajuliet.eth 3": "0x14b9F70C3d4B367D496F3771EdA7EFA65282e55D", + "deltajuliet.eth 4": "0x0dcFc9323539A6eC47f9BC0A96882070540bf950", + "deltajuliet.eth 5": "0x08f3FB287AEc4E06EFF8de37410eaF52B05c7f56", + "Gitcoin Timelock Transfer 5": "0x9E75c3BFb82cf701AC0A74d6C1607461Ec65EfF9", + "Old Address, Large GTC Transfers 1": + "0xF5A7bA226CB94D87C29aDD2b59aC960904a163F3", + "Old Address, Large GTC Transfers 2": + "0xeD865C87c3509e3A908655777B13f7313b2fc196", + "Old Address, Large GTC Transfers 3": + "0xDD6a165B9e05549640149dF108AC0aF8579B7005", + "Old Address, Large GTC Transfers 4": + "0xaD467E6039F0Ca383b5FFd60F1C7a890acaB4bE3", + "Old Address, Large GTC Transfers 5": + "0x44d4d830788cc6D4d72C78203F5918a3E2761691", + "Old Address, Large GTC Transfers 6": + "0x38661187CfD779bEa00e14Bc5b986CF0C717A39B", + "Old Address, Large GTC Transfers 7": + "0x34237F91D2Ce322f3572376b82472C7FA56D7595", + "Old Address, Large GTC Transfers 8": + "0x2083e7B107347AE4F5Cb6Ff35EC5DAcf03391c57", + "Old Address, Large GTC Transfers 9": + "0x183a1CaF6750CF88E45812FCE0110D59d71833e4", + "Old Address, Large GTC Transfers 10": + "0x11e06eF6e42306dc40D2754Ef2629fB011d80aE9", + }, + [DaoIdEnum.SCR]: { + "DAO Treasury": "0x4cb06982dD097633426cf32038D9f1182a9aDA0c", + "Foundation Treasury": "0xfF120e015777E9AA9F1417a4009a65d2EdA78C13", + "Ecosystem Treasury": "0xeE198F4a91E5b05022dc90535729B2545D3b03DF", + }, + [DaoIdEnum.COMP]: { + Timelock: "0x6d903f6003cca6255D85CcA4D3B5E5146dC33925", + Comptroller: "0x3d9819210A31b4961b30EF54bE2aeD79B9c9Cd3B", + /// v2 markets + v2WBTC: "0xccF4429DB6322D5C611ee964527D42E5d685DD6a", + v2USDC: "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + v2DAI: "0x5d3a536E4D6DbD6114cc1Ead35777bAB948E3643", + v2USDT: "0xf650C3d88D12dB855b8bf7D11Be6C55A4e07dCC9", + v2ETH: "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", + v2UNI: "0x35A18000230DA775CAc24873d00Ff85BccdeD550", + v2BAT: "0x6C8c6b02E7b2BE14d4fA6022Dfd6d75921D90E4E", + v2LINK: "0xFAce851a4921ce59e912d19329929CE6da6EB0c7", + v2TUSD: "0x12392F67bdf24faE0AF363c24aC620a2f67DAd86", + v2AAVE: "0xe65cdB6479BaC1e22340E4E755fAE7E509EcD06c", + v2COMP: "0x70e36f6BF80a52b3B46b3aF8e106CC0ed743E8e4", + ///v3 markets + //Ethereum markets + mainnetETH: "0xA17581A9E3356d9A858b789D68B4d866e593aE94", + mainnetstETH: "0x3D0bb1ccaB520A66e607822fC55BC921738fAFE3", + mainnetUSDT: "0x3Afdc9BCA9213A35503b077a6072F3D0d5AB0840", + mainnetUSDS: "0x5D409e56D886231aDAf00c8775665AD0f9897b56", + mainnetUSDC: "0xc3d688B66703497DAA19211EEdff47f25384cdc3", + mainnetWBTC: "0xe85Dc543813B8c2CFEaAc371517b925a166a9293", + // Optimism markets + opETH: "0xE36A30D249f7761327fd973001A32010b521b6Fd", + opUSDT: "0x995E394b8B2437aC8Ce61Ee0bC610D617962B214", + opUSDC: "0x2e44e174f7D53F0212823acC11C01A11d58c5bCB", + // Unichain markets + uniUSDC: "0x2c7118c4C88B9841FCF839074c26Ae8f035f2921", + uniETH: "0x6C987dDE50dB1dcDd32Cd4175778C2a291978E2a", + // Polygon markets + polyUSDT0: "0xaeB318360f27748Acb200CE616E389A6C9409a07", + polyUSDC: "0xF25212E676D1F7F89Cd72fFEe66158f541246445", + // Ronin markets + ronWETH: "0x4006ed4097ee51c09a04c3b0951d28ccf19e6dfe", + ronRON: "0xc0Afdbd1cEB621Ef576BA969ce9D4ceF78Dbc0c0", + // Mantle markets + manUSDe: "0x606174f62cd968d8e684c645080fa694c1D7786E", + // Base markets + manUSDbC: "0x9c4ec768c28520B50860ea7a15bd7213a9fF58bf", + manUSDC: "0xb125E6687d4313864e53df431d5425969c15Eb2F", + manAERO: "0x784efeB622244d2348d4F2522f8860B96fbEcE89", + manUSDS: "0x2c776041CCFe903071AF44aa147368a9c8EEA518", + manETH: "0x46e6b214b524310239732D51387075E0e70970bf", + // Arbitrum marketsVOTE + arbUSDT0: "0xd98Be00b5D27fc98112BdE293e487f8D4cA57d07", + arbUSDC: "0x9c4ec768c28520B50860ea7a15bd7213a9fF58bf", + "arbUSDC.e": "0xA5EDBDD9646f8dFF606d7448e414884C7d905dCA", + arbETH: "0x6f7D514bbD4aFf3BcD1140B7344b32f063dEe486", + // Linea markets + linUSDC: "0x8D38A3d6B3c3B7d96D6536DA7Eef94A9d7dbC991", + linETH: "0x60F2058379716A64a7A5d29219397e79bC552194", + // Scroll markets + scrUSDC: "0xB2f97c1Bd3bf02f5e74d13f02E3e26F93D77CE44", + }, + [DaoIdEnum.OBOL]: { + timelock: "0xCdBf527842Ab04Da548d33EB09d03DB831381Fb0", + "Ecosystem Treasury 1": "0x42D201CC4d9C1e31c032397F54caCE2f48C1FA72", + "Ecosystem Treasury 2": "0x54076088bE86943e27B99120c5905AAD8A1bD166", + "Staking Rewards Reserve": "0x33f3D61415784A5899b733976b0c1F9176051569", + "OBOL Incentives Reserve": "0xdc8A309111aB0574CA51cA9C7Dd0152738e4c374", + "Protocol Revenue": "0xDe5aE4De36c966747Ea7DF13BD9589642e2B1D0d", + "Grant Program": "0xa59f60A7684A69E63c07bEC087cEC3D0607cd5cE", + "DV Labs Treasury 2": "0x6BeFB6484AA10187947Dda81fC01e495f7168dB4", + }, + [DaoIdEnum.ZK]: { + timelock: "0xe5d21A9179CA2E1F0F327d598D464CcF60d89c3d", + }, + [DaoIdEnum.SHU]: { + timelock: "0x36bD3044ab68f600f6d3e081056F34f2a58432c4", + }, + [DaoIdEnum.FLUID]: { + "InstaDApp Treasury": "0x28849D2b63fA8D361e5fc15cB8aBB13019884d09", + "Fluid Liquidity": "0x52Aa899454998Be5b000Ad077a46Bbe360F4e497", + "Chainlink CCIP LockReleaseTokenPool": + "0x639f35C5E212D61Fe14Bd5CD8b66aAe4df11a50c", + InstaTimelock: "0xC7Cb1dE2721BFC0E0DA1b9D526bCdC54eF1C0eFC", + }, +}; + +export const CEXAddresses: Record> = { + [DaoIdEnum.UNI]: { + BinanceHotWallet: "0x5a52E96BAcdaBb82fd05763E25335261B270Efcb", + BinanceHotWallet2: "0x28C6c06298d514Db089934071355E5743bf21d60", + BinanceHotWallet3: "0x8894E0a0c962CB723c1976a4421c95949bE2D4E3", + BinanceHotWallet4: "0x43684D03D81d3a4C70da68feBDd61029d426F042", + BinanceHotWallet5: "0x21a31Ee1afC51d94C2eFcCAa2092aD1028285549", + BinanceHotWallet6: "0xDFd5293D8e347dFe59E90eFd55b2956a1343963d", + BinanceUSHotWallet: "0x21d45650db732cE5dF77685d6021d7D5d1da807f", + BinanceColdWallet: "0xF977814e90dA44bFA03b6295A0616a897441aceC", + BinancePegTokenFunds: "0x47ac0Fb4F2D84898e4D9E7b4DaB3C24507a6D503", + Robinhood: "0x73AF3bcf944a6559933396c1577B257e2054D935", + AnchorageDigital1: "0x985DE23260743c2c2f09BFdeC50b048C7a18c461", + AnchorageDigital2: "0xfad67fBdb7d4D8569671b8aa4A09F6a90d692Ed7", + BybitColdWallet1: "0x88a1493366D48225fc3cEFbdae9eBb23E323Ade3", + UpbitDeposit: "0xacCFeA7d9e618f60CE1347C52AE206262412AA4a", + UpbitColdWallet: "0x245445940B317E509002eb682E03f4429184059d", + KrakenColdWallet: "0xDA9dfA130Df4dE4673b89022EE50ff26f6EA73Cf", + KrakenHotWallet: "0x4C6007e38Ce164Ed80FF8Ff94192225FcdAC68CD", + KrakenHotWallet2: "0x0A332d03367366dd5fD3a554EF8f8B47ED36e591", + Robinhood2: "0x2eFB50e952580f4ff32D8d2122853432bbF2E204", + GeminiColdWallet: "0xAFCD96e580138CFa2332C632E66308eACD45C5dA", + KrakenColdWallet2: "0xC06f25517E906b7F9B4deC3C7889503Bb00b3370", + CoinbaseColdWallet: "0x6cc8FfF60A60AB0373fB3072f0B846450a8FA443", + NobitexIrHotWallet: "0xF639d88a89384A4D97f2bA9159567Ddb3890Ea07", + MEXCHotWallet: "0x4982085C9e2F89F2eCb8131Eca71aFAD896e89CB", + MEXCHotWallet2: "0x2e8F79aD740de90dC5F5A9F0D8D9661a60725e64", + OKXHotWallet: "0x6cC5F688a315f3dC28A7781717a9A798a59fDA7b", + StakeComHotWallet: "0xFa500178de024BF43CFA69B7e636A28AB68F2741", + BinanceWithdrawalHotWallet: "0xe2fc31F816A9b94326492132018C3aEcC4a93aE1", + NobitexIrHotWallet2: "0xd582C78a04E7379DfC9EE991A25f549576962eE1", + }, + [DaoIdEnum.ENS]: { + BinanceHotWallet: "0x5a52E96BAcdaBb82fd05763E25335261B270Efcb", + BinanceHotWallet2: "0x28C6c06298d514Db089934071355E5743bf21d60", + BinanceHotWallet3: "0x8894E0a0c962CB723c1976a4421c95949bE2D4E3", + BinanceHotWallet4: "0x43684D03D81d3a4C70da68feBDd61029d426F042", + BinanceHotWallet5: "0x21a31Ee1afC51d94C2eFcCAa2092aD1028285549", + BinanceHotWallet6: "0xDFd5293D8e347dFe59E90eFd55b2956a1343963d", + BinanceUSHotWallet: "0x21d45650db732cE5dF77685d6021d7D5d1da807f", + BitThumbHotWallet: "0x498697892fd0e5e3a16bd40D7bF2644F33CBbBd4", + BybitColdWallet1: "0x88a1493366D48225fc3cEFbdae9eBb23E323Ade3", + ByBitHotWallet: "0xf89d7b9c864f589bbF53a82105107622B35EaA40", + BtcTurkColdWallet: "0x76eC5A0D3632b2133d9f1980903305B62678Fbd3", + BitGetHotWallet: "0x5bdf85216ec1e38D6458C870992A69e38e03F7Ef", + CryptoComHotWallet: "0xA023f08c70A23aBc7EdFc5B6b5E171d78dFc947e", + CryptoComHotWallet2: "0xCFFAd3200574698b78f32232aa9D63eABD290703", + BitThumbHotWallet2: "0x10522336d85Cb52628C84e06CB05f79011FEf585", + ParibuColdWallet: "0xa23cbCdFAfd09De2ce793D0A08f51865885Be3f5", + CoinOneHotWallet: "0x167A9333BF582556f35Bd4d16A7E80E191aa6476", + BitvavoColdWallet: "0xc419733Ba8F13d8605141Cac8f681F5A0aBC0122", + KuCoinHotWallet: "0xD6216fC19DB775Df9774a6E33526131dA7D19a2c", + BitvavoColdWallet2: "0xedC6BacdC1e29D7c5FA6f6ECA6FDD447B9C487c9", + CoinbaseHotWallet: "0xA9D1e08C7793af67e9d92fe308d5697FB81d3E43", + MEXCHotWallet3: "0x3CC936b795A188F0e246cBB2D74C5Bd190aeCF18", + KuCoinColdWallet: "0x2933782B5A8d72f2754103D1489614F29bfA4625", + UpbitColdWallet: "0x245445940B317E509002eb682E03f4429184059d", + }, + [DaoIdEnum.ARB]: {}, + [DaoIdEnum.AAVE]: {}, + [DaoIdEnum.NOUNS]: {}, + [DaoIdEnum.LIL_NOUNS]: {}, + [DaoIdEnum.OP]: { + "Binance 1": "0xF977814e90dA44bFA03b6295A0616a897441aceC", + "Binance 2": "0x5a52E96BAcdaBb82fd05763E25335261B270Efcb", + OKX: "0x611f7bF868a6212f871e89F7e44684045DdFB09d", + Bybit: "0xf89d7b9c864f589bbF53a82105107622B35EaA40", + "Bybit 2": "0x88a1493366D48225fc3cEFbdae9eBb23E323Ade3", + Bithumb: "0xB18fe4B95b7d633c83689B5Ed3ac4ad0a857A2a7", + MEXC: "0xDF90C9B995a3b10A5b8570a47101e6c6a29eb945", + Gate: "0xC882b111A75C0c657fC507C04FbFcD2cC984F071", + "Kraken 1": "0x2a62C4aCcA1A166Ee582877112682cAe8Cc0ffe7", + "Kraken 2": "0xC06f25517E906b7F9B4deC3C7889503Bb00b3370", + "Bitkub 1": "0xda4231EF1768176536EEE3ec187315E60572BBD4", + "Bitkub 2": "0x7A1CF8CE543F4838c964FB14D403Cc6ED0bDbaCC", + Bitget: "0x5bdf85216ec1e38D6458C870992A69e38e03F7Ef", + "Kucoin 1": "0x2933782B5A8d72f2754103D1489614F29bfA4625", + "Kucoin 2": "0xC1274c580C5653cDF8246695c2E0112492a99D6F", + "Kucoin 3": "0xa3f45e619cE3AAe2Fa5f8244439a66B203b78bCc", + "Coinbase 1": "0xC8373EDFaD6d5C5f600b6b2507F78431C5271fF5", + "Coinbase 2": "0xD839C179a4606F46abD7A757f7Bb77D7593aE249", + "Crypto.com 1": "0x8a161a996617f130d0F37478483AfC8c1914DB6d", + "Crypto.com 2": "0x92BD687953Da50855AeE2Df0Cff282cC2d5F226b", + "Btcturk 1": "0xdE2fACa4BBC0aca08fF04D387c39B6f6325bf82A", + "Btcturk 2": "0xB5A46bC8b76FD2825AEB43db9C9e89e89158ECdE", + "Bitpanda 1": "0xb1A63489469868dD1d0004922C36D5079d6331c6", + "Bitpanda 2": "0x5E8c4499fDD78A5EFe998b3ABF78658E02BB7702", + "Bitpanda 3": "0x0529ea5885702715e83923c59746ae8734c553B7", + "BingX 1": "0xC3dcd744db3f114f0edF03682b807b78A227Bf74", + "Bingx 2": "0x0b07f64ABc342B68AEc57c0936E4B6fD4452967E", + "HTX 1": "0xe0B7A39Fef902c21bAd124b144c62E7F85f5f5fA", + "HTX 2": "0xd3Cc0C7d40366A061397274Eae7C387D840e6ff8", + Bitbank: "0x3727cfCBD85390Bb11B3fF421878123AdB866be8", + Revolut: "0x9b0c45d46D386cEdD98873168C36efd0DcBa8d46", + "Paribu 1": "0xc80Afd311c9626528De66D86814770361Fe92416", + Coinspot: "0xf35A6bD6E0459A4B53A27862c51A2A7292b383d1", + "Bitvavo 1": "0x48EcA43dB3a3Ca192a5fB9b20F4fc4d96017AF0F", + SwissBorg: "0x28cC933fecf280E720299b1258e8680355D8841F", + "Coinbase Prime": "0xDfD76BbFEB9Eb8322F3696d3567e03f894C40d6c", + "Binance US": "0x43c5b1C2bE8EF194a509cF93Eb1Ab3Dbd07B97eD", + "Bitstamp 1": "0x7C43E0270c868D0341c636a38C07e5Ae93908a04", + "Bitstamp 2": "0x4c2eEb203DDC70291e33796527dE4272Ac9fafc1", + "Coinhako 1": "0xE66BAa0B612003AF308D78f066Bbdb9a5e00fF6c", + "Coinhako 2": "0xE66BAa0B612003AF308D78f066Bbdb9a5e00fF6c", + Bitfinex: "0x77134cbC06cB00b66F4c7e623D5fdBF6777635EC", + "Woo Network": "0x63DFE4e34A3bFC00eB0220786238a7C6cEF8Ffc4", + Koribit: "0xf0bc8FdDB1F358cEf470D63F96aE65B1D7914953", + "Indodax 1": "0x3C02290922a3618A4646E3BbCa65853eA45FE7C6", + "Indodax 2": "0x91Dca37856240E5e1906222ec79278b16420Dc92", + }, + [DaoIdEnum.TEST]: { + // Major centralized exchanges (CEX) - Alice and Bob for comprehensive coverage + Alice_CEX: "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", // Alice as CEX + Bob_CEX: "0x70997970C51812dc3A010C7d01b50e0d17dc79C8", // Bob as CEX + // ENS contract addresses for completeness + ENSToken: "0x244dE6b06E7087110b94Cde88A42d9aBA17efa52", + ENSGovernor: "0x7c28FC9709650D49c8d0aED2f6ece6b191F192a9", + ENSTimelock: "0xa7E99C1df635d13d61F7c81eCe571cc952E64526", + }, + [DaoIdEnum.GTC]: { + "Binance 1": "0xF977814e90dA44bFA03b6295A0616a897441aceC", + "Binance 2": "0x28C6c06298d514Db089934071355E5743bf21d60", + "Binance 3": "0x5a52E96BAcdaBb82fd05763E25335261B270Efcb", + "Binance 4": "0xDFd5293D8e347dFe59E90eFd55b2956a1343963d", + "Binance 5": "0x21a31Ee1afC51d94C2eFcCAa2092aD1028285549", + Bithumb: "0x74be0CF1c9972C00ed4EF290e0E5BCFd18873f13", + Upbit: "0x74be0CF1c9972C00ed4EF290e0E5BCFd18873f13", + "Upbit 2": "0xeDAe8A6cBA6867a0B7e565C21eaBAEe3D550fd9d", + "Coinbase 1": "0x237eF9564D74A1056c1A276B03C66055Fa61A700", + "Coinbase 2": "0x31Bc777E72A0A7F90cC7b1ec52eACeC806B27563", + "Coinbase 3": "0x11aC4fE470Cf8B5b3de59B31261030BD8514892d", + "Coinbase 4": "0x271Ac4A385F689f00D01716877e827702231447e", + "Coinbase 5": "0x4a630c042B2b07a0641d487b0Ccf5af36800415e", + "Coinbase 6": "0xA9D1e08C7793af67e9d92fe308d5697FB81d3E43", + Kraken: "0x310E035d176ccB589511eD16af7aE7BAc4fc7f83", + "Kraken 2": "0xC06f25517E906b7F9B4deC3C7889503Bb00b3370", + "Kraken 3": "0x22af984f13DFB5C80145E3F9eE1050Ae5a5FB651", + "Crypto.com": "0xCFFAd3200574698b78f32232aa9D63eABD290703", + "Crypto.com 2": "0xA023f08c70A23aBc7EdFc5B6b5E171d78dFc947e", + "Crypto.com 3": "0x46340b20830761efd32832A74d7169B29FEB9758", + Kucoin: "0x58edF78281334335EfFa23101bBe3371b6a36A51", + "Kucoin 2": "0xD6216fC19DB775Df9774a6E33526131dA7D19a2c", + Bittavo: "0xaB782bc7D4a2b306825de5a7730034F8F63ee1bC", + MEXC: "0x9642b23Ed1E01Df1092B92641051881a322F5D4E", + "MEXC 2": "0x75e89d5979E4f6Fba9F97c104c2F0AFB3F1dcB88", + Gate: "0x0D0707963952f2fBA59dD06f2b425ace40b492Fe", + BingX: "0xC3dcd744db3f114f0edF03682b807b78A227Bf74", + Bitget: "0x5bdf85216ec1e38D6458C870992A69e38e03F7Ef", + CoinEx: "0x38f6d5fb32f970Fe60924B282704899411126336", + Bitpanda: "0x0529ea5885702715e83923c59746ae8734c553B7", + }, + [DaoIdEnum.SCR]: { + "Binance 2": "0x98ADeF6F2ac8572ec48965509d69A8Dd5E8BbA9D", + "Binance 3": "0x687B50A70D33D71f9a82dD330b8C091e4D772508", + "Gate 2": "0xC882b111A75C0c657fC507C04FbFcD2cC984F071", + "OKX 2": "0xB0A27099582833c0Cb8C7A0565759fF145113d64", + Binance: "0xF977814e90dA44bFA03b6295A0616a897441aceC", + BingX: "0x2b3bf74B29f59fb8dDA41Cf3d6A8DA28CF8e7921", + Bitget: "0x1AB4973a48dc892Cd9971ECE8e01DcC7688f8F23", + Bitpanda: "0x0529ea5885702715e83923c59746ae8734c553B7", + Bybit: "0xf89d7b9c864f589bbF53a82105107622B35EaA40", + Gate: "0x0D0707963952f2fBA59dD06f2b425ace40b492Fe", + Kucoin: "0x2933782B5A8d72f2754103D1489614F29bfA4625", + OKX: "0x611f7bF868a6212f871e89F7e44684045DdFB09d", + }, + [DaoIdEnum.COMP]: { + Robinhood: "0x73AF3bcf944a6559933396c1577B257e2054D935", + "Robinhood 2": "0x841ed663F2636863D40be4EE76243377dff13a34", + "Binance 1": "0xF977814e90dA44bFA03b6295A0616a897441aceC", + "Binance 2": "0x47ac0Fb4F2D84898e4D9E7b4DaB3C24507a6D503", + "Binance 3": "0x28C6c06298d514Db089934071355E5743bf21d60", + "Binance 4": "0x21a31Ee1afC51d94C2eFcCAa2092aD1028285549", + "Binance 5": "0xDFd5293D8e347dFe59E90eFd55b2956a1343963d", + ByBit: "0x6522B7F9d481eCEB96557F44753a4b893F837E90", + OKX: "0x073F564419b625A45D8aEa3bb0dE4d5647113AD7", + Upbit: "0x47ac0Fb4F2D84898e4D9E7b4DaB3C24507a6D503", + BtcTurk: "0x76eC5A0D3632b2133d9f1980903305B62678Fbd3", + Bithumb: "0x75252a69676C2472EdF9974476e9c636ca7a8AF1", + Kraken: "0x7DAFbA1d69F6C01AE7567Ffd7b046Ca03B706f83", + "Kraken 2": "0xd2DD7b597Fd2435b6dB61ddf48544fd931e6869F", + "Kucoin 1": "0x2933782B5A8d72f2754103D1489614F29bfA4625", + "Kucoin 2": "0x58edF78281334335EfFa23101bBe3371b6a36A51", + }, + [DaoIdEnum.OBOL]: { + "Bybit Hot Wallet": "0xA31231E727Ca53Ff95f0D00a06C645110c4aB647", + "Binance Wallet": "0x93dEb693b170d56BdDe1B0a5222B14c0F885d976", + "Gate Cold Wallet": "0xC882b111A75C0c657fC507C04FbFcD2cC984F071", + "Gate Hot Wallet": "0x0D0707963952f2fBA59dD06f2b425ace40b492Fe", + "MEXC Hot Wallet": "0x9642b23Ed1E01Df1092B92641051881a322F5D4E", + "Binance Wallet Proxy": "0x73D8bD54F7Cf5FAb43fE4Ef40A62D390644946Db", + }, + [DaoIdEnum.ZK]: { + "Binance 1": "0xf977814e90da44bfa03b6295a0616a897441acec", + "Binance 2": "0x7aed074ca56f5050d5a2e512ecc5bf7103937d76", + "Binance 3": "0xa84fd90d8640fa63d194601e0b2d1c9094297083", + "Binance 4": "0x43684d03d81d3a4c70da68febdd61029d426f042", + "Binance 5": "0x98adef6f2ac8572ec48965509d69a8dd5e8bba9d", + Bybit: "0xacf9a5610cb9e6ec9c84ca7429815e95b6607e9f", + OKX1: "0x611f7bf868a6212f871e89f7e44684045ddfb09d", + BtcTurk: "0x7aed074ca56f5050d5a2e512ecc5bf7103937d76", + MEXC: "0xfe4931fb4deabc515f1a48b94b6b17653eeaa34f", + Bitget: "0x97b9d2102a9a65a26e1ee82d59e42d1b73b68689", + Kraken: "0xd2dd7b597fd2435b6db61ddf48544fd931e6869f", + Kucoin: "0xd6216fc19db775df9774a6e33526131da7d19a2c", + "Kucoin 2": "0x2933782b5a8d72f2754103d1489614f29bfa4625", + Gate: "0x0d0707963952f2fba59dd06f2b425ace40b492fe", + "Gate 2": "0xc882b111a75c0c657fc507c04fbfcd2cc984f071", + "Crypto.com": "0x2a584c02de672425729af2f174fb19fe734dde5d", + OKX2: "0xf9b52be2426f06ab6d560f64a7b15e820f33cbdb", + OKX3: "0xecf17c7f6a6090f1edd21e0beb2268197270fb44", + }, + [DaoIdEnum.SHU]: {}, + [DaoIdEnum.FLUID]: { + MEXC: "0x9642b23Ed1E01Df1092B92641051881a322F5D4E", + Gate: "0x0D0707963952f2fBA59dD06f2b425ace40b492Fe", + Bitvavo: "0xaB782bc7D4a2b306825de5a7730034F8F63ee1bC", + }, +}; + +export const DEXAddresses: Record> = { + [DaoIdEnum.UNI]: { + // ArbitrumL1ERC20Gateway: "0xa3a7b6f88361f48403514059f1f16c8e78d60eec", + Uniswap_UNI_ETH_V3_03: "0x1d42064Fc4Beb5F8aAF85F4617AE8b3b5B8Bd801", + Uniswap_UNI_ETH_V3_1: "0x360b9726186C0F62cc719450685ce70280774Dc8", + Uniswap_UNI_ETH_V2_03: "0xd3d2E2692501A5c9Ca623199D38826e513033a17", + Uniswap_UNI_USDT_V3_03: "0x3470447f3CecfFAc709D3e783A307790b0208d60", + Uniswap_UNI_AAVE_V3_03: "0x59c38b6775Ded821f010DbD30eCabdCF84E04756", + Uniswap_UNI_USDC_V3_03: "0xD0fC8bA7E267f2bc56044A7715A489d851dC6D78", + Uniswap_UNI_WBTC_V3_03: "0x8F0CB37cdFF37E004E0088f563E5fe39E05CCC5B", + Uniswap_UNI_LINK_V3_1: "0xA6B9a13B34db2A00284299c47DACF49FB62C1755", + Uniswap_UNI_1INCH_V3_1: "0x0619062B988576FE2d39b33fF23Fb1a0330c0ac7", + Uniswap_UNI_ETH_V3_005: "0xfaA318479b7755b2dBfDD34dC306cb28B420Ad12", + Sushi_UNI_ETH_V2_03: "0xDafd66636E2561b0284EDdE37e42d192F2844D40", + BalancerCow_UNI_ETH: "0xa81b22966f1841e383e69393175e2cc65f0a8854", + }, + [DaoIdEnum.ENS]: { + Uniswap_ENS_5: "0x92560C178cE069CC014138eD3C2F5221Ba71f58a", + SushiSwapEthENSV2: "0xa1181481beb2dc5de0daf2c85392d81c704bf75d", + }, + [DaoIdEnum.ARB]: {}, + [DaoIdEnum.AAVE]: {}, + [DaoIdEnum.NOUNS]: {}, + [DaoIdEnum.LIL_NOUNS]: {}, + [DaoIdEnum.OP]: { + "Velodrome Finance": "0x47029bc8f5CBe3b464004E87eF9c9419a48018cd", + "Uniswap 1": "0x9a13F98Cb987694C9F086b1F5eB990EeA8264Ec3", + "Uniswap 2": "0xFC1f3296458F9b2a27a0B91dd7681C4020E09D05", + "Uniswap 3": "0xA39fe8F7A00CE28B572617d3a0bC1c2B44110e79", + "WooFi 1": "0x5520385bFcf07Ec87C4c53A7d8d65595Dff69FA4", + Curve: "0xd8dD9a8b2AcA88E68c46aF9008259d0EC04b7751", + Balancer: "0xBA12222222228d8Ba445958a75a0704d566BF2C8", + Mux: "0xc6BD76FA1E9e789345e003B361e4A0037DFb7260", + }, + [DaoIdEnum.TEST]: { + // DEX pools - Charlie and David for comprehensive coverage + Charlie_DEX_Pool: "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC", // Charlie as DEX + David_DEX_Pool: "0x90F79bf6EB2c4f870365E785982E1f101E93b906", // David as DEX + // ENS contract addresses involved in DEX-like operations + ENSToken: "0x244dE6b06E7087110b94Cde88A42d9aBA17efa52", + ENSTimelock: "0xa7E99C1df635d13d61F7c81eCe571cc952E64526", + }, + [DaoIdEnum.GTC]: { + Uniswap: "0xD017617f6F0fD22796E137a8240cc38F52a147B2", + }, + [DaoIdEnum.SCR]: { + Honeypop: "0x7761786afAB6E496e6Bf3EBe56fc2ea71cd02d7D", + DEX: "0x7761786afAB6E496e6Bf3EBe56fc2ea71cd02d7D", + "Ambient Finance": "0xaaaaAAAACB71BF2C8CaE522EA5fa455571A74106", + SyncSwap: "0x7160570BB153Edd0Ea1775EC2b2Ac9b65F1aB61B", + Nuri: "0x76c662b1e25CB67D7365191B55813D8CD3Fdac02", + }, + [DaoIdEnum.COMP]: { + Uniswap: "0x5598931BfBb43EEC686fa4b5b92B5152ebADC2f6", + "Uniswap 2": "0xea4Ba4CE14fdd287f380b55419B1C5b6c3f22ab6", + "Pancake Swap": "0x0392957571F28037607C14832D16f8B653eDd472", + }, + [DaoIdEnum.OBOL]: { + "Uniswap V3 Pool": "0x57F52C9faa6D40c5163D76b8D7dD81ddB7c95434", + "Uniswap PoolManager": "0x000000000004444c5dc75cB358380D2e3dE08A90", + }, + [DaoIdEnum.ZK]: { + "Pancake Swap": "0xf92b0178bc932a59d45c1c4aac81712aac6a5b61", + Uniswap: "0x3d7264539E6e3f596bb485E3091f3Ae02Ad01ef8", + }, + [DaoIdEnum.SHU]: { + "Uniswap V3": "0x7A922aea89288d8c91777BeECc68DF4A17151df1", + }, + [DaoIdEnum.FLUID]: { + "Uniswap V3 INST/WETH": "0xc1cd3D0913f4633b43FcdDBCd7342bC9b71C676f", + }, +}; + +export const LendingAddresses: Record> = { + [DaoIdEnum.UNI]: { + AaveEthUni: "0xF6D2224916DDFbbab6e6bd0D1B7034f4Ae0CaB18", + MorphoBlue: "0xBBBBBbbBBb9cC5e90e3b3Af64bdAF62C37EEFFCb", + CompoundCUNI: "0x35A18000230DA775CAc24873d00Ff85BccdeD550", + }, + [DaoIdEnum.ENS]: { + //After research using intel.arkm and defi llama token-usage page, I only found this lending address so far + AaveEthENS: "0x545bD6c032eFdde65A377A6719DEF2796C8E0f2e", + }, + [DaoIdEnum.ARB]: {}, + [DaoIdEnum.AAVE]: {}, + [DaoIdEnum.NOUNS]: {}, + [DaoIdEnum.LIL_NOUNS]: {}, + [DaoIdEnum.OP]: { + Aave: "0x513c7E3a9c69cA3e22550eF58AC1C0088e918FFf", + Superfluid: "0x1828Bff08BD244F7990edDCd9B19cc654b33cDB4", + Moonwell: "0x9fc345a20541Bf8773988515c5950eD69aF01847", + "Silo Finance": "0x8ED1609D796345661d36291B411992e85DE7B224", + "Compound 1": "0x2e44e174f7D53F0212823acC11C01A11d58c5bCB", + "Compound 2": "0x995E394b8B2437aC8Ce61Ee0bC610D617962B214", + "Exactly Protocol": "0xa430A427bd00210506589906a71B54d6C256CEdb", + Morpho: "0xF057afeEc22E220f47AD4220871364e9E828b2e9", + dForce: "0x7702dC73e8f8D9aE95CF50933aDbEE68e9F1D725", + }, + [DaoIdEnum.TEST]: { + // Lending protocols - different addresses for comprehensive flag coverage + Alice_Lending_Protocol: "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", // Alice as lending + Charlie_Lending_Pool: "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC", // Charlie as lending + // ENS contract addresses involved in lending-like operations + ENSGovernor: "0x7c28FC9709650D49c8d0aED2f6ece6b191F192a9", + ENSTimelock: "0xa7E99C1df635d13d61F7c81eCe571cc952E64526", + }, + [DaoIdEnum.GTC]: {}, + [DaoIdEnum.SCR]: { + Aave: "0x25718130C2a8eb94e2e1FAFB5f1cDd4b459aCf64", + }, + [DaoIdEnum.COMP]: { + Compound: "0xc3d688B66703497DAA19211EEdff47f25384cdc3", + "Compound 2": "0x3Afdc9BCA9213A35503b077a6072F3D0d5AB0840", + }, + [DaoIdEnum.OBOL]: {}, + [DaoIdEnum.ZK]: { + Aave: "0xd6cd2c0fc55936498726cacc497832052a9b2d1b", + Venus: "0x697a70779c1a03ba2bd28b7627a902bff831b616", + }, + [DaoIdEnum.SHU]: {}, + [DaoIdEnum.FLUID]: {}, +}; + +export const BurningAddresses: Record< + DaoIdEnum, + { + ZeroAddress: Address; + Dead: Address; + TokenContract: Address; + Airdrop?: Address; + } +> = { + [DaoIdEnum.UNI]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0x1f9840a85d5af5bf1d1762f925bdaddc4201f984", + Airdrop: "0x090D4613473dEE047c3f2706764f49E0821D256e", + }, + [DaoIdEnum.ENS]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72", + }, + [DaoIdEnum.ARB]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0xB50721BCf8d664c30412Cfbc6cf7a15145234ad1", + }, + [DaoIdEnum.AAVE]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0x000000000000000000000000000000000000dEaD", + }, + [DaoIdEnum.OP]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0x4200000000000000000000000000000000000042", + }, + [DaoIdEnum.TEST]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0x244dE6b06E7087110b94Cde88A42d9aBA17efa52", + }, + [DaoIdEnum.GTC]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0xDe30da39c46104798bB5aA3fe8B9e0e1F348163F", + }, + [DaoIdEnum.NOUNS]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0x9C8fF314C9Bc7F6e59A9d9225Fb22946427eDC03", + }, + [DaoIdEnum.LIL_NOUNS]: { + ZeroAddress: zeroAddress, + Dead: "0x0000000000000000000000000000000000000000", + TokenContract: "0x4b10701Bfd7BFEdc47d50562b76b436fbB5BdB3B", + }, + [DaoIdEnum.SCR]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0xd29687c813D741E2F938F4aC377128810E217b1b", + }, + [DaoIdEnum.COMP]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0xc00e94Cb662C3520282E6f5717214004A7f26888", + }, + [DaoIdEnum.OBOL]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0x0B010000b7624eb9B3DfBC279673C76E9D29D5F7", + }, + [DaoIdEnum.ZK]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0x5A7d6b2F92C77FAD6CCaBd7EE0624E64907Eaf3E", + }, + [DaoIdEnum.SHU]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0xe485E2f1bab389C08721B291f6b59780feC83Fd7", + }, + [DaoIdEnum.FLUID]: { + ZeroAddress: zeroAddress, + Dead: "0x000000000000000000000000000000000000dEaD", + TokenContract: "0x6f40d4A6237C257fff2dB00FA0510DeEECd303eb", + }, +}; + +export const NonCirculatingAddresses: Record< + DaoIdEnum, + Record +> = { + [DaoIdEnum.UNI]: {}, + [DaoIdEnum.ENS]: { + // https://etherscan.io/address/0xd7a029db2585553978190db5e85ec724aa4df23f + // Linear vesting for contributors, unlock end Dec 2025 + "Token Timelock": "0xd7a029db2585553978190db5e85ec724aa4df23f", + }, + [DaoIdEnum.ARB]: {}, + [DaoIdEnum.AAVE]: { + // https://etherscan.io/address/0x317625234562B1526Ea2FaC4030Ea499C5291de4 + // Permanently locked - LEND migration discontinued + "LEND to AAVE Migrator": "0x317625234562B1526Ea2FaC4030Ea499C5291de4", + }, + [DaoIdEnum.OP]: {}, + [DaoIdEnum.NOUNS]: {}, + [DaoIdEnum.TEST]: {}, + [DaoIdEnum.GTC]: {}, + [DaoIdEnum.SCR]: {}, + [DaoIdEnum.FLUID]: {}, + [DaoIdEnum.COMP]: {}, + [DaoIdEnum.OBOL]: {}, + [DaoIdEnum.ZK]: { + // https://docs.zknation.io/zk-nation/zksync-governance-contract-addresses + "Initial Merkle Distributor": "0x66fd4fc8fa52c9bec2aba368047a0b27e24ecfe4", + "Second ZK Distributor": "0xb294F411cB52c7C6B6c0B0b61DBDf398a8b0725d", + "Third ZK Distributor": "0xf29d698e74ef1904bcfdb20ed38f9f3ef0a89e5b", + "Matter Labs Allocation": "0xa97fbc75ccbc7d4353c4d2676ed18cd0c5aaf7e6", + "Foundation Allocation": "0xd78dc27d4db8f428c67f542216a2b23663838405", + "Guardians Allocation": "0x21b27952f8621f54f3cb652630e122ec81dd2dc1", + "Security Council Allocation": "0x0ad50686c159040e57ddce137db0b63c67473450", + "ZKsync Association Allocation": + "0x0681e3808a0aa12004fb815ebb4515dc823cfbb4", + }, + [DaoIdEnum.LIL_NOUNS]: {}, + [DaoIdEnum.SHU]: {}, +}; + +export const ProposalStatus = { + PENDING: "PENDING", + ACTIVE: "ACTIVE", + CANCELED: "CANCELED", + DEFEATED: "DEFEATED", + SUCCEEDED: "SUCCEEDED", + QUEUED: "QUEUED", + EXPIRED: "EXPIRED", + EXECUTED: "EXECUTED", + VETOED: "VETOED", + NO_QUORUM: "NO_QUORUM", +} as const; + +export type ProposalStatus = + (typeof ProposalStatus)[keyof typeof ProposalStatus]; + +export const MetricTypesEnum = { + TOTAL_SUPPLY: "TOTAL_SUPPLY", + DELEGATED_SUPPLY: "DELEGATED_SUPPLY", + CEX_SUPPLY: "CEX_SUPPLY", + DEX_SUPPLY: "DEX_SUPPLY", + LENDING_SUPPLY: "LENDING_SUPPLY", + CIRCULATING_SUPPLY: "CIRCULATING_SUPPLY", + TREASURY: "TREASURY", + NON_CIRCULATING_SUPPLY: "NON_CIRCULATING_SUPPLY", +} as const; + +export type MetricTypesEnum = + (typeof MetricTypesEnum)[keyof typeof MetricTypesEnum]; + +export const metricTypeArray = Object.values(MetricTypesEnum); diff --git a/apps/hypersync-indexer/src/lib/date-helpers.ts b/apps/hypersync-indexer/src/lib/date-helpers.ts new file mode 100644 index 000000000..4ef8104d5 --- /dev/null +++ b/apps/hypersync-indexer/src/lib/date-helpers.ts @@ -0,0 +1,62 @@ +/** + * Date and timestamp utilities for time-series data processing. + */ + +import { SECONDS_IN_DAY } from "./enums.ts"; + +/** + * Truncate timestamp (seconds) to midnight UTC + */ +export const truncateTimestampToMidnight = (timestampSec: number): number => { + return Math.floor(timestampSec / SECONDS_IN_DAY) * SECONDS_IN_DAY; +}; + +/** + * Calculate cutoff timestamp for filtering data by days + */ +export const calculateCutoffTimestamp = (days: number): number => { + return Math.floor(Date.now() / 1000) - days * SECONDS_IN_DAY; +}; + +/** + * Normalize all timestamps in a Map to midnight UTC (seconds) + */ +export const normalizeMapTimestamps = ( + map: Map, +): Map => { + const normalized = new Map(); + map.forEach((value, ts) => { + normalized.set(truncateTimestampToMidnight(ts), value); + }); + return normalized; +}; + +/** + * Get effective start date, adjusting if no initial value exists. + * + * When querying time-series data with forward-fill, if there's no initial value + * before the requested start date, we should start from the first real data point + * to avoid returning zeros/nulls. + * + * @param params.referenceDate - Requested start date (after ?? startDate) + * @param params.datesFromDb - Array of timestamps from database + * @param params.hasInitialValue - Whether an initial value exists before referenceDate + * @returns Effective start date to use + */ +export function getEffectiveStartDate(params: { + referenceDate?: number; + datesFromDb: number[]; + hasInitialValue: boolean; +}): number | undefined { + const { referenceDate, datesFromDb, hasInitialValue } = params; + + if (!referenceDate) return undefined; + if (hasInitialValue || datesFromDb.length === 0) return referenceDate; + + const sortedDates = [...datesFromDb].sort((a, b) => a - b); + const firstRealDate = sortedDates[0]; + + return firstRealDate && referenceDate < firstRealDate + ? firstRealDate + : referenceDate; +} diff --git a/apps/hypersync-indexer/src/lib/enums.ts b/apps/hypersync-indexer/src/lib/enums.ts new file mode 100644 index 000000000..84f040ae6 --- /dev/null +++ b/apps/hypersync-indexer/src/lib/enums.ts @@ -0,0 +1,21 @@ +export const DaoIdEnum = { + AAVE: "AAVE", + UNI: "UNI", + ENS: "ENS", + ARB: "ARB", + OP: "OP", + GTC: "GTC", + NOUNS: "NOUNS", + TEST: "TEST", + SCR: "SCR", + COMP: "COMP", + OBOL: "OBOL", + ZK: "ZK", + SHU: "SHU", + FLUID: "FLUID", + LIL_NOUNS: "LIL_NOUNS", +} as const; + +export type DaoIdEnum = (typeof DaoIdEnum)[keyof typeof DaoIdEnum]; + +export const SECONDS_IN_DAY = 24 * 60 * 60; diff --git a/apps/hypersync-indexer/src/lib/utils.ts b/apps/hypersync-indexer/src/lib/utils.ts new file mode 100644 index 000000000..907649e51 --- /dev/null +++ b/apps/hypersync-indexer/src/lib/utils.ts @@ -0,0 +1,32 @@ +import * as chains from "viem/chains"; + +/** + * Calculates the absolute difference between two numbers + */ +export function delta(a: bigint, b: bigint): bigint { + return a > b ? a - b : b - a; +} + +/** + * Returns the minimum of two or more numbers + */ +export function min(...values: bigint[]): bigint { + if (values.length === 0) { + throw new Error("At least one value must be provided"); + } + return values.reduce((min, value) => (value < min ? value : min)); +} + +/** + * Returns the maximum of two or more numbers + */ +export function max(...values: bigint[]): bigint { + if (values.length === 0) { + throw new Error("At least one value must be provided"); + } + return values.reduce((max, value) => (value > max ? value : max)); +} + +export function getChain(chainId: number): chains.Chain | undefined { + return Object.values(chains).find((chain) => chain.id === chainId); +} diff --git a/apps/hypersync-indexer/tsconfig.json b/apps/hypersync-indexer/tsconfig.json new file mode 100644 index 000000000..d17640ae8 --- /dev/null +++ b/apps/hypersync-indexer/tsconfig.json @@ -0,0 +1,34 @@ +{ + "compilerOptions": { + // Type checking + "strict": true, + "noUncheckedIndexedAccess": true, + // Interop constraints + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "isolatedModules": true, + "allowSyntheticDefaultImports": true, + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + // Language and environment + "moduleResolution": "bundler", + "module": "ESNext", + "noEmit": true, + "lib": ["ES2022"], + "target": "ES2022", + "skipLibCheck": true, + "baseUrl": "." + }, + "ts-node": { + "compilerOptions": { + "module": "CommonJS", + "moduleResolution": "node" + } + }, + "include": [ + "src/eventHandlers/**/*.ts", + "src/lib/**/*.ts", + "generated/**/*.ts" + ], + "exclude": ["node_modules", "test"] +} diff --git a/apps/indexer/package.json b/apps/indexer/package.json index bf3dd7892..b8542865e 100644 --- a/apps/indexer/package.json +++ b/apps/indexer/package.json @@ -4,7 +4,6 @@ "private": true, "scripts": { "dev": "ponder dev", - "envio": "envio dev", "start": "ponder start --views-schema=anticapture --schema=$RAILWAY_DEPLOYMENT_ID", "db:list": "ponder db list", "db:prune": "ponder db prune", @@ -25,7 +24,6 @@ "@types/node": "^20.16.5", "@types/pg": "^8.15.6", "dotenv": "^16.5.0", - "envio": "^2.32.12", "eslint": "^9", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.2.1", diff --git a/apps/indexer/src/eventHandlers/delegation.ts b/apps/indexer/src/eventHandlers/delegation.ts index 142c1b215..586d7d963 100644 --- a/apps/indexer/src/eventHandlers/delegation.ts +++ b/apps/indexer/src/eventHandlers/delegation.ts @@ -1,21 +1,26 @@ -import type { handlerContext } from "../../generated/index.js"; -import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; -import type { Address, Hex } from "viem"; -import { getAddress, zeroAddress } from "viem"; +import { Context } from "ponder:registry"; +import { + accountBalance, + accountPower, + delegation, + feedEvent, + votingPowerHistory, +} from "ponder:schema"; +import { Address, getAddress, Hex, zeroAddress } from "viem"; import { BurningAddresses, CEXAddresses, DEXAddresses, LendingAddresses, -} from "../lib/constants.ts"; -import { DaoIdEnum } from "../lib/enums.ts"; +} from "@/lib/constants"; +import { DaoIdEnum } from "@/lib/enums"; import { createAddressSet, ensureAccountExists, ensureAccountsExist, -} from "./shared.ts"; +} from "./shared"; type DelegationAddressSets = { cex: ReadonlySet
; @@ -24,8 +29,22 @@ type DelegationAddressSets = { burning: ReadonlySet
; }; +/** + * ### Creates: + * - New `Account` records (for delegator and delegate if they don't exist) + * - New `Delegation` record with calculated delegated value and flags + * - New `AccountBalance` record (if delegator doesn't have one for this token) + * - New `AccountPower` record (if delegate doesn't have one for this DAO) + * - New `Transaction` record (if this transaction hasn't been processed) + * + * ### Updates: + * - `Delegation`: Adds to existing delegated value if record already exists + * - `AccountBalance`: Changes the delegate assignment for the delegator + * - `AccountPower`: Increments the delegate's delegation count + * - `Transaction`: Updates transaction flags if record already exists + */ export const delegateChanged = async ( - context: handlerContext, + context: Context, daoId: DaoIdEnum, args: { delegator: Address; @@ -53,14 +72,17 @@ export const delegateChanged = async ( const normalizedDelegator = getAddress(delegator); const normalizedDelegate = getAddress(delegate); + // Ensure all required accounts exist in parallel await ensureAccountsExist(context, [delegator, delegate]); - const delegatorBalanceId = `${normalizedDelegator}-${getAddress(tokenId)}`; - const storedBalance = _delegatorBalance + const delegatorBalance = _delegatorBalance ? { balance: _delegatorBalance } - : await context.AccountBalance.get(delegatorBalanceId); - const delegatedValue = storedBalance?.balance ?? 0n; + : await context.db.find(accountBalance, { + accountId: normalizedDelegator, + tokenId: getAddress(tokenId), + }); + // Pre-compute address lists for flag determination (normalized to checksum) const { cex, dex, lending, burning } = addressSets ?? { cex: createAddressSet(Object.values(CEXAddresses[daoId] || {})), dex: createAddressSet(Object.values(DEXAddresses[daoId] || {})), @@ -68,90 +90,100 @@ export const delegateChanged = async ( burning: createAddressSet(Object.values(BurningAddresses[daoId] || {})), }; + // Determine flags for the delegation const isCex = cex.has(normalizedDelegator) || cex.has(normalizedDelegate); const isDex = dex.has(normalizedDelegator) || dex.has(normalizedDelegate); const isLending = lending.has(normalizedDelegator) || lending.has(normalizedDelegate); - const isTotal = + const isBurning = burning.has(normalizedDelegator) || burning.has(normalizedDelegate); + const isTotal = isBurning; - const delegationId = `${txHash}-${normalizedDelegator}-${normalizedDelegate}`; - const existingDelegation = await context.Delegation.get(delegationId); - context.Delegation.set({ - id: delegationId, - transactionHash: txHash, - daoId, - delegateAccountId: normalizedDelegate, - delegatorAccountId: normalizedDelegator, - delegatedValue: (existingDelegation?.delegatedValue ?? 0n) + delegatedValue, - previousDelegate: getAddress(previousDelegate), - timestamp, - logIndex, - isCex, - isDex, - isLending, - isTotal, - delegationType: undefined, - }); - - // Update delegator's balance record to point to new delegate - const existingBalance = await context.AccountBalance.get(delegatorBalanceId); - context.AccountBalance.set({ - id: delegatorBalanceId, - accountId: normalizedDelegator, - tokenId: getAddress(tokenId), - balance: existingBalance?.balance ?? 0n, - delegate: normalizedDelegate, - }); - - // Decrement previous delegate's count - if (previousDelegate !== zeroAddress) { - const prevPowerId = getAddress(previousDelegate); - const prevPower = await context.AccountPower.get(prevPowerId); - context.AccountPower.set({ - id: prevPowerId, - accountId: prevPowerId, + await context.db + .insert(delegation) + .values({ + transactionHash: txHash, daoId, - votingPower: prevPower?.votingPower ?? 0n, - votesCount: prevPower?.votesCount ?? 0, - proposalsCount: prevPower?.proposalsCount ?? 0, - delegationsCount: Math.max(0, (prevPower?.delegationsCount ?? 0) - 1), - lastVoteTimestamp: prevPower?.lastVoteTimestamp ?? 0n, + delegateAccountId: normalizedDelegate, + delegatorAccountId: normalizedDelegator, + delegatedValue: delegatorBalance?.balance ?? 0n, + previousDelegate: getAddress(previousDelegate), + timestamp, + logIndex, + isCex, + isDex, + isLending, + isTotal, + }) + .onConflictDoUpdate((current) => ({ + delegatedValue: + current.delegatedValue + (delegatorBalance?.balance ?? 0n), + })); + + await context.db + .insert(accountBalance) + .values({ + accountId: normalizedDelegator, + tokenId: getAddress(tokenId), + delegate: normalizedDelegate, + balance: BigInt(0), + }) + .onConflictDoUpdate({ + delegate: normalizedDelegate, }); + + if (previousDelegate !== zeroAddress) { + await context.db + .insert(accountPower) + .values({ + accountId: getAddress(previousDelegate), + daoId, + }) + .onConflictDoUpdate((current) => ({ + delegationsCount: Math.max(0, current.delegationsCount - 1), + })); } - // Increment new delegate's count - const delegatePowerId = normalizedDelegate; - const delegatePower = await context.AccountPower.get(delegatePowerId); - context.AccountPower.set({ - id: delegatePowerId, - accountId: normalizedDelegate, - daoId, - votingPower: delegatePower?.votingPower ?? 0n, - votesCount: delegatePower?.votesCount ?? 0, - proposalsCount: delegatePower?.proposalsCount ?? 0, - delegationsCount: (delegatePower?.delegationsCount ?? 0) + 1, - lastVoteTimestamp: delegatePower?.lastVoteTimestamp ?? 0n, - }); + await context.db + .insert(accountPower) + .values({ + accountId: normalizedDelegate, + daoId, + delegationsCount: 1, + }) + .onConflictDoUpdate((current) => ({ + delegationsCount: current.delegationsCount + 1, + })); - context.FeedEvent.set({ - id: `${txHash}-${logIndex}`, + await context.db.insert(feedEvent).values({ txHash, logIndex, - eventType: "DELEGATION" as EventType_t, - value: delegatedValue, + type: "DELEGATION", + value: delegatorBalance?.balance ?? 0n, timestamp, metadata: { delegator: normalizedDelegator, delegate: normalizedDelegate, previousDelegate: getAddress(previousDelegate), - amount: delegatedValue.toString(), + amount: delegatorBalance?.balance ?? 0n, }, }); }; +/** + * ### Creates: + * - New `Account` record (for delegate if it doesn't exist) + * - New `VotingPowerHistory` record with voting power change details + * - New `AccountPower` record (if delegate doesn't have one for this DAO) + * - New daily metric records (via `storeDailyBucket`) + * + * ### Updates: + * - `AccountPower`: Sets the delegate's current voting power to new balance + * - `Token`: Adjusts delegated supply by the balance delta + * - Daily bucket metrics for delegated supply tracking + */ export const delegatedVotesChanged = async ( - context: handlerContext, + context: Context, daoId: DaoIdEnum, args: { delegate: Address; @@ -169,43 +201,43 @@ export const delegatedVotesChanged = async ( await ensureAccountExists(context, delegate); - const diff = newBalance - oldBalance; - const deltaMod = diff > 0n ? diff : -diff; - - context.VotingPowerHistory.set({ - id: `${txHash}-${normalizedDelegate}-${logIndex}`, - daoId, - transactionHash: txHash, - accountId: normalizedDelegate, - votingPower: newBalance, - delta: diff, - deltaMod, - timestamp, - logIndex, - }); + const delta = newBalance - oldBalance; + const deltaMod = delta > 0n ? delta : -delta; - const existingPower = await context.AccountPower.get(normalizedDelegate); - context.AccountPower.set({ - id: normalizedDelegate, - accountId: normalizedDelegate, - daoId, - votingPower: newBalance, - votesCount: existingPower?.votesCount ?? 0, - proposalsCount: existingPower?.proposalsCount ?? 0, - delegationsCount: existingPower?.delegationsCount ?? 0, - lastVoteTimestamp: existingPower?.lastVoteTimestamp ?? 0n, - }); + await context.db + .insert(votingPowerHistory) + .values({ + daoId, + transactionHash: txHash, + accountId: normalizedDelegate, + votingPower: newBalance, + delta, + deltaMod, + timestamp, + logIndex, + }) + .onConflictDoNothing(); + + await context.db + .insert(accountPower) + .values({ + accountId: normalizedDelegate, + daoId, + votingPower: newBalance, + }) + .onConflictDoUpdate(() => ({ + votingPower: newBalance, + })); - context.FeedEvent.set({ - id: `${txHash}-${logIndex}`, + await context.db.insert(feedEvent).values({ txHash, logIndex, - eventType: "DELEGATION_VOTES_CHANGED" as EventType_t, + type: "DELEGATION_VOTES_CHANGED", value: deltaMod, timestamp, metadata: { - delta: diff.toString(), - deltaMod: deltaMod.toString(), + delta, + deltaMod, delegate: normalizedDelegate, }, }); diff --git a/apps/indexer/src/eventHandlers/index.ts b/apps/indexer/src/eventHandlers/index.ts index a5e8d7c14..93374519b 100644 --- a/apps/indexer/src/eventHandlers/index.ts +++ b/apps/indexer/src/eventHandlers/index.ts @@ -1,3 +1,3 @@ -export * from "./transfer.ts"; -export * from "./delegation.ts"; -export * from "./voting.ts"; +export * from "./transfer"; +export * from "./delegation"; +export * from "./voting"; diff --git a/apps/indexer/src/eventHandlers/metrics/circulating.ts b/apps/indexer/src/eventHandlers/metrics/circulating.ts index 24c7b2479..cce3b94d8 100644 --- a/apps/indexer/src/eventHandlers/metrics/circulating.ts +++ b/apps/indexer/src/eventHandlers/metrics/circulating.ts @@ -1,27 +1,32 @@ -import type { Address } from "viem"; -import { getAddress } from "viem"; -import type { handlerContext } from "../../../generated/index.js"; +import { Address, getAddress } from "viem"; +import { token } from "ponder:schema"; +import { Context } from "ponder:registry"; -import { storeDailyBucket } from "../shared.ts"; -import { MetricTypesEnum } from "../../lib/constants.ts"; +import { storeDailyBucket } from "../shared"; +import { MetricTypesEnum } from "@/lib/constants"; export const updateCirculatingSupply = async ( - context: handlerContext, + context: Context, daoId: string, tokenAddress: Address, timestamp: bigint, ) => { - const tokenId = getAddress(tokenAddress); - const token = await context.Token.get(tokenId); - if (!token) return false; + let currentCirculatingSupply = 0n; + let newCirculatingSupply = 0n; + await context.db + .update(token, { id: getAddress(tokenAddress) }) + .set((current) => { + currentCirculatingSupply = current.circulatingSupply; + newCirculatingSupply = + current.totalSupply - current.treasury - current.nonCirculatingSupply; + return { + circulatingSupply: newCirculatingSupply, + }; + }); - const currentCirculatingSupply = token.circulatingSupply; - const newCirculatingSupply = - token.totalSupply - token.treasury - token.nonCirculatingSupply; - - if (currentCirculatingSupply === newCirculatingSupply) return false; - - context.Token.set({ ...token, circulatingSupply: newCirculatingSupply }); + if (currentCirculatingSupply === newCirculatingSupply) { + return false; + } await storeDailyBucket( context, diff --git a/apps/indexer/src/eventHandlers/metrics/delegated.ts b/apps/indexer/src/eventHandlers/metrics/delegated.ts index 675d39025..d41c64f67 100644 --- a/apps/indexer/src/eventHandlers/metrics/delegated.ts +++ b/apps/indexer/src/eventHandlers/metrics/delegated.ts @@ -1,26 +1,28 @@ -import type { Address } from "viem"; -import { getAddress } from "viem"; -import type { handlerContext } from "../../../generated/index.js"; +import { Address, getAddress } from "viem"; +import { token } from "ponder:schema"; +import { Context } from "ponder:registry"; -import { DaoIdEnum } from "../../lib/enums.ts"; -import { MetricTypesEnum } from "../../lib/constants.ts"; -import { storeDailyBucket } from "../shared.ts"; +import { DaoIdEnum } from "@/lib/enums"; +import { MetricTypesEnum } from "@/lib/constants"; +import { storeDailyBucket } from "@/eventHandlers/shared"; export const updateDelegatedSupply = async ( - context: handlerContext, + context: Context, daoId: DaoIdEnum, tokenId: Address, amount: bigint, timestamp: bigint, ) => { - const normalizedId = getAddress(tokenId); - const token = await context.Token.get(normalizedId); - if (!token) return; + let currentDelegatedSupply = 0n; - const currentDelegatedSupply = token.delegatedSupply; - const newDelegatedSupply = currentDelegatedSupply + amount; - - context.Token.set({ ...token, delegatedSupply: newDelegatedSupply }); + const { delegatedSupply: newDelegatedSupply } = await context.db + .update(token, { id: getAddress(tokenId) }) + .set((current) => { + currentDelegatedSupply = current.delegatedSupply; + return { + delegatedSupply: current.delegatedSupply + amount, + }; + }); await storeDailyBucket( context, diff --git a/apps/indexer/src/eventHandlers/metrics/index.ts b/apps/indexer/src/eventHandlers/metrics/index.ts index a4126ee00..b6dcd69be 100644 --- a/apps/indexer/src/eventHandlers/metrics/index.ts +++ b/apps/indexer/src/eventHandlers/metrics/index.ts @@ -1,4 +1,4 @@ -export * from "./delegated.ts"; -export * from "./total.ts"; -export * from "./supply.ts"; -export * from "./circulating.ts"; +export * from "./delegated"; +export * from "./total"; +export * from "./supply"; +export * from "./circulating"; diff --git a/apps/indexer/src/eventHandlers/metrics/supply.ts b/apps/indexer/src/eventHandlers/metrics/supply.ts index ecac65839..2067b7058 100644 --- a/apps/indexer/src/eventHandlers/metrics/supply.ts +++ b/apps/indexer/src/eventHandlers/metrics/supply.ts @@ -1,16 +1,12 @@ -import type { Address } from "viem"; -import { getAddress } from "viem"; -import type { handlerContext } from "../../../generated/index.js"; +import { Address, getAddress } from "viem"; +import { token } from "ponder:schema"; +import { Context } from "ponder:registry"; -import { - AddressCollection, - storeDailyBucket, - toAddressSet, -} from "../shared.ts"; -import { MetricTypesEnum } from "../../lib/constants.ts"; +import { AddressCollection, storeDailyBucket, toAddressSet } from "../shared"; +import { MetricTypesEnum } from "@/lib/constants"; export const updateSupplyMetric = async ( - context: handlerContext, + context: Context, supplyField: | "lendingSupply" | "cexSupply" @@ -31,16 +27,18 @@ export const updateSupplyMetric = async ( const isFromRelevant = normalizedAddressList.has(getAddress(from)); if ((isToRelevant || isFromRelevant) && !(isToRelevant && isFromRelevant)) { - const tokenId = getAddress(tokenAddress); - const token = await context.Token.get(tokenId); - if (!token) return false; - - const currentSupply = token[supplyField]; - const newSupply = isToRelevant - ? currentSupply + value - : currentSupply - value; - - context.Token.set({ ...token, [supplyField]: newSupply }); + let currentSupply: bigint = 0n; + + const { [supplyField]: newSupply } = await context.db + .update(token, { id: getAddress(tokenAddress) }) + .set((current) => { + currentSupply = current[supplyField]; + return { + [supplyField]: isToRelevant + ? current[supplyField] + value + : current[supplyField] - value, + }; + }); await storeDailyBucket( context, diff --git a/apps/indexer/src/eventHandlers/metrics/total.ts b/apps/indexer/src/eventHandlers/metrics/total.ts index d80f01a61..7019c9e0e 100644 --- a/apps/indexer/src/eventHandlers/metrics/total.ts +++ b/apps/indexer/src/eventHandlers/metrics/total.ts @@ -1,17 +1,17 @@ -import type { Address } from "viem"; -import { getAddress } from "viem"; -import type { handlerContext } from "../../../generated/index.js"; +import { Address, getAddress } from "viem"; +import { token } from "ponder:schema"; +import { Context } from "ponder:registry"; -import { DaoIdEnum } from "../../lib/enums.ts"; -import { MetricTypesEnum } from "../../lib/constants.ts"; +import { DaoIdEnum } from "@/lib/enums"; +import { MetricTypesEnum } from "@/lib/constants"; import { AddressCollection, storeDailyBucket, toAddressSet, -} from "../shared.ts"; +} from "@/eventHandlers/shared"; export const updateTotalSupply = async ( - context: handlerContext, + context: Context, addressList: AddressCollection, metricType: MetricTypesEnum, from: Address, @@ -30,16 +30,19 @@ export const updateTotalSupply = async ( if (isTotalSupplyTransaction) { const isBurningTokens = normalizedAddressList.has(getAddress(to)); - const tokenId = getAddress(tokenAddress); - const token = await context.Token.get(tokenId); - if (!token) return false; - - const currentTotalSupply = token.totalSupply; - const newTotalSupply = isBurningTokens - ? currentTotalSupply - value - : currentTotalSupply + value; - - context.Token.set({ ...token, totalSupply: newTotalSupply }); + let currentTotalSupply = 0n; + const newTotalSupply = ( + await context.db + .update(token, { id: getAddress(tokenAddress) }) + .set((row) => { + currentTotalSupply = row.totalSupply; + return { + totalSupply: isBurningTokens + ? row.totalSupply - value + : row.totalSupply + value, + }; + }) + ).totalSupply; await storeDailyBucket( context, diff --git a/apps/indexer/src/eventHandlers/shared.ts b/apps/indexer/src/eventHandlers/shared.ts index 5771e329b..8efc79295 100644 --- a/apps/indexer/src/eventHandlers/shared.ts +++ b/apps/indexer/src/eventHandlers/shared.ts @@ -1,22 +1,10 @@ -import type { Address } from "viem"; -import { getAddress } from "viem"; -import type { handlerContext } from "../../generated/index.js"; -import type { MetricType_t } from "../../generated/src/db/Enums.gen.ts"; +import { Address, getAddress } from "viem"; +import { Context } from "ponder:registry"; +import { account, daoMetricsDayBucket, transaction } from "ponder:schema"; -import { MetricTypesEnum } from "../lib/constants.ts"; -import { delta, max, min } from "../lib/utils.ts"; -import { truncateTimestampToMidnight } from "../lib/date-helpers.ts"; - -const METRIC_TYPE_MAP: Record = { - [MetricTypesEnum.TOTAL_SUPPLY]: "total", - [MetricTypesEnum.DELEGATED_SUPPLY]: "delegated", - [MetricTypesEnum.CEX_SUPPLY]: "cex", - [MetricTypesEnum.DEX_SUPPLY]: "dex", - [MetricTypesEnum.LENDING_SUPPLY]: "lending", - [MetricTypesEnum.CIRCULATING_SUPPLY]: "circulating", - [MetricTypesEnum.TREASURY]: "treasury", - [MetricTypesEnum.NON_CIRCULATING_SUPPLY]: "non_circulating", -}; +import { MetricTypesEnum } from "@/lib/constants"; +import { delta, max, min } from "@/lib/utils"; +import { truncateTimestampToMidnight } from "@/lib/date-helpers"; export type AddressCollection = readonly Address[] | ReadonlySet
; @@ -27,7 +15,7 @@ const normalizeAddressCollection = ( return [...new Set(addresses.map((address) => getAddress(address)))]; } - return [...(addresses as ReadonlySet
)]; + return [...addresses]; }; export const createAddressSet = ( @@ -46,28 +34,37 @@ export const toAddressSet = ( }; export const ensureAccountExists = async ( - context: handlerContext, + context: Context, address: Address, ): Promise => { - await context.Account.getOrCreate({ id: getAddress(address) }); + await context.db + .insert(account) + .values({ + id: getAddress(address), + }) + .onConflictDoNothing(); }; /** - * Helper function to ensure multiple accounts exist + * Helper function to ensure multiple accounts exist in parallel */ export const ensureAccountsExist = async ( - context: handlerContext, + context: Context, addresses: Address[], ): Promise => { - const normalized = normalizeAddressCollection(addresses); - if (normalized.length === 0) return; - await Promise.all( - normalized.map((id) => context.Account.getOrCreate({ id })), - ); + const normalizedAddresses = normalizeAddressCollection(addresses); + if (normalizedAddresses.length === 0) { + return; + } + + await context.db + .insert(account) + .values(normalizedAddresses.map((id) => ({ id }))) + .onConflictDoNothing(); }; export const storeDailyBucket = async ( - context: handlerContext, + context: Context, metricType: MetricTypesEnum, currentValue: bigint, newValue: bigint, @@ -75,51 +72,42 @@ export const storeDailyBucket = async ( timestamp: bigint, tokenAddress: Address, ) => { - const vol = delta(newValue, currentValue); - const date = BigInt(truncateTimestampToMidnight(Number(timestamp))); - const tokenId = getAddress(tokenAddress); - const id = `${date}-${tokenId}-${metricType}`; - - const existing = await context.DaoMetricsDayBucket.get(id); - if (existing) { - context.DaoMetricsDayBucket.set({ - ...existing, - average: - (existing.average * BigInt(existing.count) + newValue) / - BigInt(existing.count + 1), - high: max(newValue, existing.high), - low: min(newValue, existing.low), - closeValue: newValue, - volume: existing.volume + vol, - count: existing.count + 1, - lastUpdate: timestamp, - }); - } else { - context.DaoMetricsDayBucket.set({ - id, - date, - tokenId, - metricType: METRIC_TYPE_MAP[metricType], + const volume = delta(newValue, currentValue); + await context.db + .insert(daoMetricsDayBucket) + .values({ + date: BigInt(truncateTimestampToMidnight(Number(timestamp))), + tokenId: getAddress(tokenAddress), + metricType, daoId, average: newValue, - openValue: newValue, + open: newValue, high: newValue, low: newValue, - closeValue: newValue, - volume: vol, + close: newValue, + volume, count: 1, lastUpdate: timestamp, - }); - } + }) + .onConflictDoUpdate((row) => ({ + average: + (row.average * BigInt(row.count) + newValue) / BigInt(row.count + 1), + high: max(newValue, row.high), + low: min(newValue, row.low), + close: newValue, + volume: row.volume + volume, + count: row.count + 1, + lastUpdate: timestamp, + })); }; export const handleTransaction = async ( - context: handlerContext, + context: Context, transactionHash: string, from: Address, to: Address, timestamp: bigint, - addresses: AddressCollection, + addresses: AddressCollection, // The addresses involved in this event { cex = [], dex = [], @@ -130,7 +118,12 @@ export const handleTransaction = async ( dex?: AddressCollection; lending?: AddressCollection; burning?: AddressCollection; - } = {}, + } = { + cex: [], + dex: [], + lending: [], + burning: [], + }, ) => { const normalizedAddresses = normalizeAddressCollection(addresses); const normalizedCex = toAddressSet(cex); @@ -151,16 +144,22 @@ export const handleTransaction = async ( return; } - const existing = await context.Transaction.get(transactionHash); - context.Transaction.set({ - id: transactionHash, - transactionHash, - fromAddress: getAddress(from), - toAddress: getAddress(to), - timestamp, - isCex: (existing?.isCex ?? false) || isCex, - isDex: (existing?.isDex ?? false) || isDex, - isLending: (existing?.isLending ?? false) || isLending, - isTotal: (existing?.isTotal ?? false) || isTotal, - }); + await context.db + .insert(transaction) + .values({ + transactionHash, + fromAddress: getAddress(from), + toAddress: getAddress(to), + timestamp, + isCex, + isDex, + isLending, + isTotal, + }) + .onConflictDoUpdate((existing) => ({ + isCex: existing.isCex || isCex, + isDex: existing.isDex || isDex, + isLending: existing.isLending || isLending, + isTotal: existing.isTotal || isTotal, + })); }; diff --git a/apps/indexer/src/eventHandlers/transfer.ts b/apps/indexer/src/eventHandlers/transfer.ts index f25566823..d72ed1664 100644 --- a/apps/indexer/src/eventHandlers/transfer.ts +++ b/apps/indexer/src/eventHandlers/transfer.ts @@ -1,18 +1,37 @@ -import type { handlerContext } from "../../generated/index.js"; -import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; -import type { Address, Hex } from "viem"; -import { getAddress, zeroAddress } from "viem"; +import { Context } from "ponder:registry"; +import { + accountBalance, + balanceHistory, + feedEvent, + transfer, +} from "ponder:schema"; +import { Address, getAddress, Hex, zeroAddress } from "viem"; -import { DaoIdEnum } from "../lib/enums.ts"; +import { DaoIdEnum } from "@/lib/enums"; -import { - AddressCollection, - ensureAccountsExist, - toAddressSet, -} from "./shared.ts"; +import { AddressCollection, ensureAccountsExist, toAddressSet } from "./shared"; +/** + * ### Creates: + * - New `Account` records (for sender and receiver if they don't exist) + * - New `accountBalance` record (for receiver if it doesn't exist) + * - New `accountBalance` record (for sender if it doesn't exist and not minting) + * - New `transfer` record with transaction details and classification flags + * - New daily metric records for supply tracking (via `updateSupplyMetric` calls) + * + * ### Updates: + * - `accountBalance`: Increments receiver's token balance by transfer value + * - `accountBalance`: Decrements sender's token balance by transfer value (if not minting from zero address) + * - `Token`: Adjusts lending supply based on transfers involving lending addresses + * - `Token`: Adjusts CEX supply based on transfers involving centralized exchange addresses + * - `Token`: Adjusts DEX supply based on transfers involving decentralized exchange addresses + * - `Token`: Adjusts treasury balance based on transfers involving treasury addresses + * - `Token`: Adjusts total supply based on transfers involving burning addresses + * - `Token`: Recalculates circulating supply after all supply changes + * - Daily bucket metrics for all supply types (lending, CEX, DEX, treasury, total, circulating) + */ export const tokenTransfer = async ( - context: handlerContext, + context: Context, daoId: DaoIdEnum, args: { from: Address; @@ -51,59 +70,58 @@ export const tokenTransfer = async ( await ensureAccountsExist(context, [from, to]); - // Upsert receiver balance and track current balance for history - const receiverBalanceId = `${normalizedTo}-${normalizedTokenId}`; - const existingReceiverBalance = - await context.AccountBalance.get(receiverBalanceId); - const currentReceiverBalance = existingReceiverBalance - ? existingReceiverBalance.balance + value - : value; - context.AccountBalance.set({ - id: receiverBalanceId, - accountId: normalizedTo, - tokenId: normalizedTokenId, - balance: currentReceiverBalance, - delegate: existingReceiverBalance?.delegate ?? zeroAddress, - }); - - context.BalanceHistory.set({ - id: `${transactionHash}-${normalizedTo}-${logIndex}`, - daoId, - transactionHash, - accountId: normalizedTo, - balance: currentReceiverBalance, - delta: value, - deltaMod: value > 0n ? value : -value, - timestamp, - logIndex, - }); - - if (from !== zeroAddress) { - const senderBalanceId = `${normalizedFrom}-${normalizedTokenId}`; - const existingSenderBalance = - await context.AccountBalance.get(senderBalanceId); - const currentSenderBalance = existingSenderBalance - ? existingSenderBalance.balance - value - : -value; - context.AccountBalance.set({ - id: senderBalanceId, - accountId: normalizedFrom, + const { balance: currentReceiverBalance } = await context.db + .insert(accountBalance) + .values({ + accountId: normalizedTo, tokenId: normalizedTokenId, - balance: currentSenderBalance, - delegate: existingSenderBalance?.delegate ?? zeroAddress, - }); + balance: value, + delegate: zeroAddress, + }) + .onConflictDoUpdate((current) => ({ + balance: current.balance + value, + })); - context.BalanceHistory.set({ - id: `${transactionHash}-${normalizedFrom}-${logIndex}`, + await context.db + .insert(balanceHistory) + .values({ daoId, - transactionHash, - accountId: normalizedFrom, - balance: currentSenderBalance, - delta: -value, + transactionHash: transactionHash, + accountId: normalizedTo, + balance: currentReceiverBalance, + delta: value, deltaMod: value > 0n ? value : -value, timestamp, logIndex, - }); + }) + .onConflictDoNothing(); + + if (from !== zeroAddress) { + const { balance: currentSenderBalance } = await context.db + .insert(accountBalance) + .values({ + accountId: normalizedFrom, + tokenId: normalizedTokenId, + balance: -value, + delegate: zeroAddress, + }) + .onConflictDoUpdate((current) => ({ + balance: current.balance - value, + })); + + await context.db + .insert(balanceHistory) + .values({ + daoId, + transactionHash: transactionHash, + accountId: normalizedFrom, + balance: currentSenderBalance, + delta: -value, + deltaMod: value > 0n ? value : -value, + timestamp, + logIndex, + }) + .onConflictDoNothing(); } const normalizedCex = toAddressSet(cex); @@ -111,39 +129,43 @@ export const tokenTransfer = async ( const normalizedLending = toAddressSet(lending); const normalizedBurning = toAddressSet(burning); - const transferId = `${transactionHash}-${normalizedFrom}-${normalizedTo}`; - const existingTransfer = await context.Transfer.get(transferId); - context.Transfer.set({ - id: transferId, - transactionHash, - daoId, - tokenId: normalizedTokenId, - amount: (existingTransfer?.amount ?? 0n) + value, - fromAccountId: normalizedFrom, - toAccountId: normalizedTo, - timestamp, - logIndex, - isCex: normalizedCex.has(normalizedFrom) || normalizedCex.has(normalizedTo), - isDex: normalizedDex.has(normalizedFrom) || normalizedDex.has(normalizedTo), - isLending: - normalizedLending.has(normalizedFrom) || - normalizedLending.has(normalizedTo), - isTotal: - normalizedBurning.has(normalizedFrom) || - normalizedBurning.has(normalizedTo), - }); + await context.db + .insert(transfer) + .values({ + transactionHash, + daoId, + tokenId: normalizedTokenId, + amount: value, + fromAccountId: normalizedFrom, + toAccountId: normalizedTo, + timestamp, + logIndex, + isCex: + normalizedCex.has(normalizedFrom) || normalizedCex.has(normalizedTo), + isDex: + normalizedDex.has(normalizedFrom) || normalizedDex.has(normalizedTo), + isLending: + normalizedLending.has(normalizedFrom) || + normalizedLending.has(normalizedTo), + isTotal: + normalizedBurning.has(normalizedFrom) || + normalizedBurning.has(normalizedTo), + }) + .onConflictDoUpdate((current) => ({ + amount: current.amount + value, + })); - context.FeedEvent.set({ - id: `${transactionHash}-${logIndex}`, + // Insert feed event for activity feed + await context.db.insert(feedEvent).values({ txHash: transactionHash, logIndex, - eventType: "TRANSFER" as EventType_t, + type: "TRANSFER", value, timestamp, metadata: { from: normalizedFrom, to: normalizedTo, - amount: value.toString(), + amount: value, }, }); }; diff --git a/apps/indexer/src/eventHandlers/voting.ts b/apps/indexer/src/eventHandlers/voting.ts index d3b82ad26..c2ff06422 100644 --- a/apps/indexer/src/eventHandlers/voting.ts +++ b/apps/indexer/src/eventHandlers/voting.ts @@ -1,14 +1,32 @@ -import type { handlerContext } from "../../generated/index.js"; -import type { EventType_t } from "../../generated/src/db/Enums.gen.ts"; -import type { Address, Hex } from "viem"; -import { getAddress } from "viem"; +import { Context } from "ponder:registry"; +import { + accountPower, + feedEvent, + proposalsOnchain, + votesOnchain, +} from "ponder:schema"; +import { Address, getAddress, Hex } from "viem"; -import { ProposalStatus } from "../lib/constants.ts"; +import { ProposalStatus } from "@/lib/constants"; -import { ensureAccountExists } from "./shared.ts"; +import { ensureAccountExists } from "./shared"; +/** + * ### Creates: + * - New `Account` record (for voter if it doesn't exist) + * - New `AccountPower` record (if voter doesn't have one for this DAO) + * - New `votesOnchain` record with vote details (transaction hash, support, voting power, reason) + * + * ### Updates: + * - `AccountPower`: Increments voter's total vote count by 1 + * - `AccountPower`: Sets last vote timestamp to current vote timestamp + * - `AccountPower`: Sets first vote timestamp (only if voter has never voted before) + * - `proposalsOnchain`: Increments `againstVotes` if support is 0 (against) + * - `proposalsOnchain`: Increments `forVotes` if support is 1 (for) + * - `proposalsOnchain`: Increments `abstainVotes` if support is 2 (abstain) + */ export const voteCast = async ( - context: handlerContext, + context: Context, daoId: string, args: { proposalId: string; @@ -34,26 +52,26 @@ export const voteCast = async ( await ensureAccountExists(context, voter); - const normalizedVoter = getAddress(voter); - const powerId = normalizedVoter; - const existingPower = await context.AccountPower.get(powerId); - context.AccountPower.set({ - id: powerId, - accountId: normalizedVoter, - daoId, - votingPower: existingPower?.votingPower ?? 0n, - votesCount: (existingPower?.votesCount ?? 0) + 1, - proposalsCount: existingPower?.proposalsCount ?? 0, - delegationsCount: existingPower?.delegationsCount ?? 0, - lastVoteTimestamp: timestamp, - }); + // Update account power with vote statistics + await context.db + .insert(accountPower) + .values({ + accountId: getAddress(voter), + daoId, + votesCount: 1, + lastVoteTimestamp: timestamp, + }) + .onConflictDoUpdate((current) => ({ + votesCount: current.votesCount + 1, + lastVoteTimestamp: timestamp, + })); - context.VoteOnchain.set({ - id: `${normalizedVoter}-${proposalId}`, - txHash, + // Create vote record + await context.db.insert(votesOnchain).values({ + txHash: txHash, daoId, proposalId, - voterAccountId: normalizedVoter, + voterAccountId: getAddress(voter), support: support.toString(), votingPower, reason, @@ -61,49 +79,61 @@ export const voteCast = async ( }); // Update proposal vote totals - const proposal = await context.ProposalOnchain.get(proposalId); - if (proposal) { - context.ProposalOnchain.set({ - ...proposal, - againstVotes: proposal.againstVotes + (support === 0 ? votingPower : 0n), - forVotes: proposal.forVotes + (support === 1 ? votingPower : 0n), - abstainVotes: proposal.abstainVotes + (support === 2 ? votingPower : 0n), - }); - } + await context.db + .update(proposalsOnchain, { id: proposalId }) + .set((current) => ({ + againstVotes: current.againstVotes + (support === 0 ? votingPower : 0n), + forVotes: current.forVotes + (support === 1 ? votingPower : 0n), + abstainVotes: current.abstainVotes + (support === 2 ? votingPower : 0n), + })); - context.FeedEvent.set({ - id: `${txHash}-${logIndex}`, + const proposal = await context.db.find(proposalsOnchain, { id: proposalId }); + + await context.db.insert(feedEvent).values({ txHash, logIndex, - eventType: "VOTE" as EventType_t, + type: "VOTE", value: votingPower, timestamp, metadata: { - voter: normalizedVoter, + voter: getAddress(voter), reason, support, - votingPower: votingPower.toString(), + votingPower, proposalId, - title: proposal?.title ?? null, + title: proposal?.title ?? undefined, }, }); }; const MAX_TITLE_LENGTH = 200; +/** + * Extracts a proposal title from a markdown description. + * + * Strategy: + * 1. Normalize literal `\n` sequences to real newlines (some proposers + * submit descriptions with escaped newlines). + * 2. If the first non-empty line is an H1 (`# Title`), use it. + * 3. Otherwise, use the first non-empty line that is not a section header + * (H2+), truncated to MAX_TITLE_LENGTH characters. + */ function parseProposalTitle(description: string): string { + // Normalize literal "\n" (two chars) into real newlines const normalized = description.replace(/\\n/g, "\n"); const lines = normalized.split("\n"); + // Pass 1: look for an H1 among leading lines (before any content) for (const line of lines) { const trimmed = line.trim(); if (!trimmed) continue; if (/^# /.test(trimmed)) { return trimmed.replace(/^# +/, ""); } - break; + break; // stop at first non-empty, non-H1 line } + // Pass 2: no H1 found — use first non-empty, non-header line for (const line of lines) { const trimmed = line.trim(); if (!trimmed || /^#{1,6}\s/.test(trimmed)) continue; @@ -115,8 +145,21 @@ function parseProposalTitle(description: string): string { return ""; } +/** + * ### Creates: + * - New `Account` record (for proposer if it doesn't exist) + * - New `proposalsOnchain` record with proposal details (targets, values, signatures, calldatas, blocks, description, status) + * - New `AccountPower` record (if proposer doesn't have one for this DAO) + * + * ### Updates: + * - `AccountPower`: Increments proposer's total proposals count by 1 + * + * ### Calculates: + * - Proposal end timestamp based on block delta and average block time + * - Sets initial proposal status to PENDING + */ export const proposalCreated = async ( - context: handlerContext, + context: Context, daoId: string, blockTime: number, args: { @@ -156,14 +199,13 @@ export const proposalCreated = async ( const title = parseProposalTitle(description); const blockDelta = parseInt(endBlock) - Number(blockNumber); - - context.ProposalOnchain.set({ + await context.db.insert(proposalsOnchain).values({ id: proposalId, txHash, daoId, proposerAccountId: getAddress(proposer), targets: targets.map((a) => getAddress(a)), - values: values.map((v) => v.toString()), + values, signatures, calldatas, startBlock: parseInt(startBlock), @@ -175,54 +217,56 @@ export const proposalCreated = async ( status: ProposalStatus.PENDING, endTimestamp: timestamp + BigInt(blockDelta * blockTime), proposalType: args.proposalType, - forVotes: 0n, - againstVotes: 0n, - abstainVotes: 0n, }); - const powerId = getAddress(proposer); - const existingPower = await context.AccountPower.get(powerId); - const proposerVotingPower = existingPower?.votingPower ?? 0n; - context.AccountPower.set({ - id: powerId, - accountId: powerId, - daoId, - votingPower: proposerVotingPower, - votesCount: existingPower?.votesCount ?? 0, - proposalsCount: (existingPower?.proposalsCount ?? 0) + 1, - delegationsCount: existingPower?.delegationsCount ?? 0, - lastVoteTimestamp: existingPower?.lastVoteTimestamp ?? 0n, - }); + // Update proposer's proposal count + const { votingPower: proposerVotingPower } = await context.db + .insert(accountPower) + .values({ + accountId: getAddress(proposer), + daoId, + proposalsCount: 1, + }) + .onConflictDoUpdate((current) => ({ + proposalsCount: current.proposalsCount + 1, + })); - context.FeedEvent.set({ - id: `${txHash}-${logIndex}`, + // Insert feed event for activity feed + // Proposals are always high relevance as they are significant governance actions + await context.db.insert(feedEvent).values({ txHash, logIndex, - eventType: "PROPOSAL" as EventType_t, - value: 0n, + type: "PROPOSAL", timestamp, metadata: { id: proposalId, proposer: getAddress(proposer), - votingPower: proposerVotingPower.toString(), + votingPower: proposerVotingPower, title, }, }); }; +/** + * ### Updates: + * - `proposalsOnchain`: Sets the proposal status to the provided status value + */ export const updateProposalStatus = async ( - context: handlerContext, + context: Context, proposalId: string, status: string, ) => { - const proposal = await context.ProposalOnchain.get(proposalId); - if (proposal) { - context.ProposalOnchain.set({ ...proposal, status }); - } + await context.db.update(proposalsOnchain, { id: proposalId }).set({ + status, + }); }; +/** + * ### Updates: + * - `proposalsOnchain`: Sets the new deadline (endBlock) and endTimestamp + */ export const proposalExtended = async ( - context: handlerContext, + context: Context, proposalId: string, blockTime: number, extendedDeadline: bigint, @@ -230,32 +274,32 @@ export const proposalExtended = async ( logIndex: number, timestamp: bigint, ) => { - const proposal = await context.ProposalOnchain.get(proposalId); - if (!proposal) return; + let endTimestamp: bigint | undefined; - const endTimestamp = - proposal.endTimestamp + - BigInt((Number(extendedDeadline) - proposal.endBlock) * blockTime); - - context.ProposalOnchain.set({ - ...proposal, - endBlock: Number(extendedDeadline), - endTimestamp, + await context.db.update(proposalsOnchain, { id: proposalId }).set((row) => { + endTimestamp = + row.endTimestamp + + BigInt((Number(extendedDeadline) - row.endBlock) * blockTime); + return { + row, + endBlock: Number(extendedDeadline), + endTimestamp, + }; }); - context.FeedEvent.set({ - id: `${txHash}-${logIndex}`, + const proposal = await context.db.find(proposalsOnchain, { id: proposalId }); + + await context.db.insert(feedEvent).values({ txHash, logIndex, - eventType: "PROPOSAL_EXTENDED" as EventType_t, - value: 0n, + type: "PROPOSAL_EXTENDED", timestamp, metadata: { id: proposalId, - title: proposal.title, + title: proposal?.title ?? undefined, endBlock: Number(extendedDeadline), - endTimestamp: endTimestamp.toString(), - proposer: getAddress(proposal.proposerAccountId), + endTimestamp, + proposer: getAddress(proposal!.proposerAccountId), }, }); }; diff --git a/apps/indexer/src/lib/blockTime.ts b/apps/indexer/src/lib/blockTime.ts new file mode 100644 index 000000000..e765535e8 --- /dev/null +++ b/apps/indexer/src/lib/blockTime.ts @@ -0,0 +1,15 @@ +import { DaysEnum } from "./enums"; + +export function calculateHistoricalBlockNumber( + days: DaysEnum, + currentBlockNumber: number, + blockTime: number, +): number { + const blocksToGoBack = Math.floor(days / blockTime); + const historicalBlockNumber = Math.max( + 0, + currentBlockNumber - blocksToGoBack, + ); + + return historicalBlockNumber; +} diff --git a/apps/indexer/src/lib/constants.ts b/apps/indexer/src/lib/constants.ts index a01a4773a..0562ff8fa 100644 --- a/apps/indexer/src/lib/constants.ts +++ b/apps/indexer/src/lib/constants.ts @@ -1,7 +1,6 @@ -import type { Address } from "viem"; -import { zeroAddress } from "viem"; +import { Address, zeroAddress } from "viem"; -import { DaoIdEnum } from "./enums.ts"; +import { DaoIdEnum } from "./enums"; export const CONTRACT_ADDRESSES = { [DaoIdEnum.UNI]: { @@ -875,34 +874,28 @@ export const NonCirculatingAddresses: Record< [DaoIdEnum.SHU]: {}, }; -export const ProposalStatus = { - PENDING: "PENDING", - ACTIVE: "ACTIVE", - CANCELED: "CANCELED", - DEFEATED: "DEFEATED", - SUCCEEDED: "SUCCEEDED", - QUEUED: "QUEUED", - EXPIRED: "EXPIRED", - EXECUTED: "EXECUTED", - VETOED: "VETOED", - NO_QUORUM: "NO_QUORUM", -} as const; - -export type ProposalStatus = - (typeof ProposalStatus)[keyof typeof ProposalStatus]; - -export const MetricTypesEnum = { - TOTAL_SUPPLY: "TOTAL_SUPPLY", - DELEGATED_SUPPLY: "DELEGATED_SUPPLY", - CEX_SUPPLY: "CEX_SUPPLY", - DEX_SUPPLY: "DEX_SUPPLY", - LENDING_SUPPLY: "LENDING_SUPPLY", - CIRCULATING_SUPPLY: "CIRCULATING_SUPPLY", - TREASURY: "TREASURY", - NON_CIRCULATING_SUPPLY: "NON_CIRCULATING_SUPPLY", -} as const; +export enum ProposalStatus { + PENDING = "PENDING", + ACTIVE = "ACTIVE", + CANCELED = "CANCELED", + DEFEATED = "DEFEATED", + SUCCEEDED = "SUCCEEDED", + QUEUED = "QUEUED", + EXPIRED = "EXPIRED", + EXECUTED = "EXECUTED", + VETOED = "VETOED", + NO_QUORUM = "NO_QUORUM", +} -export type MetricTypesEnum = - (typeof MetricTypesEnum)[keyof typeof MetricTypesEnum]; +export enum MetricTypesEnum { + TOTAL_SUPPLY = "TOTAL_SUPPLY", + DELEGATED_SUPPLY = "DELEGATED_SUPPLY", + CEX_SUPPLY = "CEX_SUPPLY", + DEX_SUPPLY = "DEX_SUPPLY", + LENDING_SUPPLY = "LENDING_SUPPLY", + CIRCULATING_SUPPLY = "CIRCULATING_SUPPLY", + TREASURY = "TREASURY", + NON_CIRCULATING_SUPPLY = "NON_CIRCULATING_SUPPLY", +} export const metricTypeArray = Object.values(MetricTypesEnum); diff --git a/apps/indexer/src/lib/date-helpers.ts b/apps/indexer/src/lib/date-helpers.ts index 4ef8104d5..f884e70a6 100644 --- a/apps/indexer/src/lib/date-helpers.ts +++ b/apps/indexer/src/lib/date-helpers.ts @@ -2,7 +2,7 @@ * Date and timestamp utilities for time-series data processing. */ -import { SECONDS_IN_DAY } from "./enums.ts"; +import { SECONDS_IN_DAY } from "./enums"; /** * Truncate timestamp (seconds) to midnight UTC diff --git a/apps/indexer/src/lib/enums.ts b/apps/indexer/src/lib/enums.ts index 84f040ae6..431d750d0 100644 --- a/apps/indexer/src/lib/enums.ts +++ b/apps/indexer/src/lib/enums.ts @@ -1,21 +1,27 @@ -export const DaoIdEnum = { - AAVE: "AAVE", - UNI: "UNI", - ENS: "ENS", - ARB: "ARB", - OP: "OP", - GTC: "GTC", - NOUNS: "NOUNS", - TEST: "TEST", - SCR: "SCR", - COMP: "COMP", - OBOL: "OBOL", - ZK: "ZK", - SHU: "SHU", - FLUID: "FLUID", - LIL_NOUNS: "LIL_NOUNS", -} as const; - -export type DaoIdEnum = (typeof DaoIdEnum)[keyof typeof DaoIdEnum]; +export enum DaoIdEnum { + AAVE = "AAVE", + UNI = "UNI", + ENS = "ENS", + ARB = "ARB", + OP = "OP", + GTC = "GTC", + NOUNS = "NOUNS", + TEST = "TEST", + SCR = "SCR", + COMP = "COMP", + OBOL = "OBOL", + ZK = "ZK", + SHU = "SHU", + FLUID = "FLUID", + LIL_NOUNS = "LIL_NOUNS", +} export const SECONDS_IN_DAY = 24 * 60 * 60; + +export enum DaysEnum { + "7d" = 7 * 24 * 60 * 60, + "30d" = 30 * 24 * 60 * 60, + "90d" = 90 * 24 * 60 * 60, + "180d" = 180 * 24 * 60 * 60, + "365d" = 365 * 24 * 60 * 60, +} diff --git a/apps/indexer/src/lib/query-helpers.ts b/apps/indexer/src/lib/query-helpers.ts index 0d17b1c81..987cbbfa5 100644 --- a/apps/indexer/src/lib/query-helpers.ts +++ b/apps/indexer/src/lib/query-helpers.ts @@ -2,7 +2,7 @@ * Query helpers for pagination and data filtering in time-series APIs. */ -import { truncateTimestampToMidnight } from "./date-helpers.ts"; +import { truncateTimestampToMidnight } from "./date-helpers"; /** * Filter data by cutoff date with fallback to last value before cutoff. diff --git a/apps/indexer/src/lib/time-series.ts b/apps/indexer/src/lib/time-series.ts index 0cb1554de..5776ca935 100644 --- a/apps/indexer/src/lib/time-series.ts +++ b/apps/indexer/src/lib/time-series.ts @@ -4,8 +4,8 @@ * Forward-fill: Use the last known value for any missing data points. */ -import { SECONDS_IN_DAY } from "./enums.ts"; -import { truncateTimestampToMidnight } from "./date-helpers.ts"; +import { SECONDS_IN_DAY } from "./enums"; +import { truncateTimestampToMidnight } from "./date-helpers"; /** * Forward-fill sparse data across a master timeline. diff --git a/apps/indexer/tsconfig.json b/apps/indexer/tsconfig.json index a7a5c1851..c5588ac63 100644 --- a/apps/indexer/tsconfig.json +++ b/apps/indexer/tsconfig.json @@ -23,17 +23,6 @@ "@/*": ["./src/*"] } }, - "ts-node": { - "compilerOptions": { - "module": "CommonJS", - "moduleResolution": "node" - } - }, - "include": [ - "src/eventHandlers/**/*.ts", - "src/lib/**/*.ts", - "src/env.ts", - "generated/**/*.ts" - ], + "include": ["./**/*.ts"], "exclude": ["node_modules", "test"] } diff --git a/eslint.config.mjs b/eslint.config.mjs index e303951e0..8a78324c7 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -31,16 +31,7 @@ export default [ "apps/api-gateway/schema.graphql", "**/storybook-static/**", "**/.storybook/**", - // HyperIndex: ignore generated files and old Ponder files during migration - "apps/indexer/generated/**", - "apps/indexer/src/indexer/**", - "apps/indexer/src/index.ts", - "apps/indexer/src/metrics.ts", - "apps/indexer/src/api/**", - "apps/indexer/ponder.config.ts", - "apps/indexer/ponder.schema.ts", - "apps/indexer/ponder-env.d.ts", - "apps/indexer/config/**", + "apps/hypersync-indexer/generated/**", ], }, @@ -136,17 +127,17 @@ export default [ }, }, - // Indexer lib — allow const + type with same name (enum-as-const pattern) + // API mappers - allow Zod schema + type with same name pattern { - files: ["apps/indexer/src/lib/**/*.{js,ts}"], + files: ["apps/api/src/mappers/**/*.{js,ts}"], rules: { "@typescript-eslint/no-redeclare": "off", }, }, - // API mappers - allow Zod schema + type with same name pattern + // HyperIndex indexer lib - allow const + type with same name pattern { - files: ["apps/api/src/mappers/**/*.{js,ts}"], + files: ["apps/hypersync-indexer/src/lib/**/*.{js,ts}"], rules: { "@typescript-eslint/no-redeclare": "off", }, diff --git a/package.json b/package.json index 26bf3f80e..d3d094781 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "scripts": { "dashboard": "dotenv -- turbo run --filter=@anticapture/dashboard", "indexer": "dotenv -- turbo run --filter=@anticapture/indexer", + "hypersync-indexer": "dotenv -- turbo run --filter=@anticapture/hypersync-indexer", "gateway": "dotenv -- turbo run --filter=@anticapture/api-gateway", "gateful": "dotenv -- turbo run --filter=@anticapture/gateful", "api": "dotenv -- turbo run --filter=@anticapture/api", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f6ff734ef..d6557ccd8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -547,6 +547,37 @@ importers: specifier: ^4.0.18 version: 4.1.0(@opentelemetry/api@1.9.0)(@types/node@25.4.0)(jsdom@24.1.3(bufferutil@4.0.9)(utf-8-validate@5.0.10))(msw@2.12.10(@types/node@25.4.0)(typescript@5.9.3))(vite@7.0.5(@types/node@25.4.0)(jiti@2.6.1)(lightningcss@1.31.1)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)) + apps/hypersync-indexer: + dependencies: + viem: + specifier: ^2.37.11 + version: 2.47.2(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) + devDependencies: + "@types/node": + specifier: ^20.16.5 + version: 20.19.37 + dotenv: + specifier: ^16.5.0 + version: 16.6.1 + envio: + specifier: ^2.32.12 + version: 2.32.12(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) + eslint: + specifier: ^9 + version: 9.39.4(jiti@2.6.1) + eslint-config-prettier: + specifier: ^9.1.0 + version: 9.1.2(eslint@9.39.4(jiti@2.6.1)) + eslint-plugin-prettier: + specifier: ^5.2.1 + version: 5.5.5(@types/eslint@9.6.1)(eslint-config-prettier@9.1.2(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1))(prettier@3.8.1) + prettier: + specifier: ^3.5.3 + version: 3.8.1 + typescript: + specifier: ^5.8.3 + version: 5.9.3 + apps/indexer: dependencies: "@anticapture/observability": @@ -577,9 +608,6 @@ importers: dotenv: specifier: ^16.5.0 version: 16.6.1 - envio: - specifier: ^2.32.12 - version: 2.32.12(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) eslint: specifier: ^9 version: 9.39.4(jiti@2.6.1) @@ -9419,14 +9447,6 @@ packages: } engines: { node: ">=0.4.0" } - acorn@8.15.0: - resolution: - { - integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==, - } - engines: { node: ">=0.4.0" } - hasBin: true - acorn@8.16.0: resolution: { @@ -12844,12 +12864,6 @@ packages: } engines: { node: ">=16" } - flatted@3.3.3: - resolution: - { - integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==, - } - flatted@3.4.1: resolution: { @@ -21914,7 +21928,7 @@ snapshots: "@commitlint/config-validator@19.8.1": dependencies: "@commitlint/types": 19.8.1 - ajv: 8.17.1 + ajv: 8.18.0 "@commitlint/ensure@19.8.1": dependencies: @@ -28476,16 +28490,14 @@ snapshots: dependencies: acorn: 8.16.0 - acorn-jsx@5.3.2(acorn@8.15.0): + acorn-jsx@5.3.2(acorn@8.16.0): dependencies: - acorn: 8.15.0 + acorn: 8.16.0 acorn-walk@8.3.4: dependencies: acorn: 8.16.0 - acorn@8.15.0: {} - acorn@8.16.0: {} adjust-sourcemap-loader@4.0.0: @@ -28516,9 +28528,9 @@ snapshots: optionalDependencies: ajv: 8.17.1 - ajv-formats@3.0.1(ajv@8.17.1): + ajv-formats@2.1.1(ajv@8.18.0): optionalDependencies: - ajv: 8.17.1 + ajv: 8.18.0 ajv-formats@3.0.1(ajv@8.18.0): optionalDependencies: @@ -28528,9 +28540,9 @@ snapshots: dependencies: ajv: 6.14.0 - ajv-keywords@5.1.0(ajv@8.17.1): + ajv-keywords@5.1.0(ajv@8.18.0): dependencies: - ajv: 8.17.1 + ajv: 8.18.0 fast-deep-equal: 3.1.3 ajv@6.14.0: @@ -29284,8 +29296,8 @@ snapshots: conf@12.0.0: dependencies: - ajv: 8.17.1 - ajv-formats: 2.1.1(ajv@8.17.1) + ajv: 8.18.0 + ajv-formats: 2.1.1(ajv@8.18.0) atomically: 2.0.3 debounce-fn: 5.1.2 dot-prop: 8.0.2 @@ -30479,8 +30491,8 @@ snapshots: espree@10.4.0: dependencies: - acorn: 8.15.0 - acorn-jsx: 5.3.2(acorn@8.15.0) + acorn: 8.16.0 + acorn-jsx: 5.3.2(acorn@8.16.0) eslint-visitor-keys: 4.2.1 esprima@4.0.1: {} @@ -30656,8 +30668,8 @@ snapshots: fast-json-stringify@5.16.1: dependencies: "@fastify/merge-json-schemas": 0.1.1 - ajv: 8.17.1 - ajv-formats: 3.0.1(ajv@8.17.1) + ajv: 8.18.0 + ajv-formats: 3.0.1(ajv@8.18.0) fast-deep-equal: 3.1.3 fast-uri: 2.4.0 json-schema-ref-resolver: 1.0.1 @@ -30781,11 +30793,9 @@ snapshots: flat-cache@4.0.1: dependencies: - flatted: 3.3.3 + flatted: 3.4.1 keyv: 4.5.4 - flatted@3.3.3: {} - flatted@3.4.1: {} follow-redirects@1.15.11: {} @@ -34189,9 +34199,9 @@ snapshots: schema-utils@4.3.3: dependencies: "@types/json-schema": 7.0.15 - ajv: 8.17.1 - ajv-formats: 2.1.1(ajv@8.17.1) - ajv-keywords: 5.1.0(ajv@8.17.1) + ajv: 8.18.0 + ajv-formats: 2.1.1(ajv@8.18.0) + ajv-keywords: 5.1.0(ajv@8.18.0) scrypt-js@3.0.1: {} From 1a1c72b5a88da10959e44cfffb5a27de2de23d81 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 13:47:03 -0300 Subject: [PATCH 03/17] chore: use Dockerfile and remove tracked generated files --- Dockerfile.hypersync-indexer | 27 + apps/hypersync-indexer/.gitignore | 21 + apps/hypersync-indexer/generated/.gitignore | 32 - apps/hypersync-indexer/generated/.npmrc | 1 - apps/hypersync-indexer/generated/LICENSE.md | 67 - .../generated/docker-compose.yaml | 52 - apps/hypersync-indexer/generated/index.d.ts | 26 - apps/hypersync-indexer/generated/index.js | 13 - apps/hypersync-indexer/generated/package.json | 49 - .../generated/persisted_state.envio.json | 7 - .../generated/pnpm-lock.yaml | 3012 ----------------- .../hypersync-indexer/generated/rescript.json | 35 - .../generated/schema.graphql | 1258 ------- .../generated/src/Benchmark.res | 394 --- .../generated/src/ConfigYAML.gen.ts | 38 - .../generated/src/ConfigYAML.res | 92 - apps/hypersync-indexer/generated/src/Env.res | 247 -- .../generated/src/EventProcessing.res | 478 --- .../generated/src/Generated.res | 206 -- .../generated/src/GqlDbCustomTypes.gen.ts | 8 - .../generated/src/GqlDbCustomTypes.res | 11 - .../generated/src/Handlers.gen.ts | 165 - .../generated/src/Handlers.res | 23 - apps/hypersync-indexer/generated/src/IO.res | 396 --- .../generated/src/Index.bs.js | 209 -- .../hypersync-indexer/generated/src/Index.res | 332 -- .../generated/src/Js.shim.ts | 11 - .../generated/src/LoadLayer.res | 444 --- .../generated/src/LoadLayer.resi | 32 - apps/hypersync-indexer/generated/src/Path.res | 1 - .../generated/src/PersistedState.res | 25 - .../generated/src/TestHelpers.gen.ts | 188 - .../generated/src/TestHelpers.res | 504 --- .../src/TestHelpers_MockAddresses.gen.ts | 12 - .../src/TestHelpers_MockAddresses.res | 30 - .../generated/src/TestHelpers_MockDb.gen.ts | 87 - .../generated/src/TestHelpers_MockDb.res | 820 ----- .../generated/src/Types.gen.ts | 557 --- .../hypersync-indexer/generated/src/Types.res | 957 ------ apps/hypersync-indexer/generated/src/Types.ts | 832 ----- .../generated/src/UserContext.res | 360 -- .../generated/src/bindings/Dotenv.res | 17 - .../generated/src/bindings/Ethers.gen.ts | 15 - .../generated/src/bindings/OpaqueTypes.ts | 5 - .../generated/src/bindings/RescriptMocha.res | 123 - .../generated/src/bindings/Yargs.res | 8 - .../hypersync-indexer/generated/src/db/Db.res | 30 - .../generated/src/db/DbFunctions.res | 33 - .../generated/src/db/DbFunctionsEntities.res | 22 - .../src/db/DbFunctionsImplementation.js | 17 - .../generated/src/db/Entities.gen.ts | 233 -- .../generated/src/db/Entities.res | 2077 ------------ .../generated/src/db/Enums.gen.ts | 41 - .../generated/src/db/Enums.res | 108 - .../generated/src/db/Migrations.res | 57 - .../src/eventFetching/ChainFetcher.res | 464 --- .../src/eventFetching/ChainManager.res | 174 - .../src/eventFetching/NetworkSources.res | 95 - .../generated/src/globalState/GlobalState.res | 1188 ------- .../src/globalState/GlobalStateManager.res | 76 - .../src/globalState/GlobalStateManager.resi | 7 - .../generated/src/ink/EnvioInkApp.res | 67 - .../generated/src/ink/bindings/DateFns.res | 71 - .../generated/src/ink/bindings/Ink.res | 355 -- .../generated/src/ink/bindings/Style.res | 123 - .../ink/components/BufferedProgressBar.res | 40 - .../src/ink/components/ChainData.res | 161 - .../src/ink/components/CustomHooks.res | 114 - .../generated/src/ink/components/Messages.res | 41 - .../generated/src/ink/components/SyncETA.res | 198 -- generated@0.1.0 | 0 71 files changed, 48 insertions(+), 17971 deletions(-) create mode 100644 Dockerfile.hypersync-indexer create mode 100644 apps/hypersync-indexer/.gitignore delete mode 100644 apps/hypersync-indexer/generated/.gitignore delete mode 100644 apps/hypersync-indexer/generated/.npmrc delete mode 100644 apps/hypersync-indexer/generated/LICENSE.md delete mode 100644 apps/hypersync-indexer/generated/docker-compose.yaml delete mode 100644 apps/hypersync-indexer/generated/index.d.ts delete mode 100644 apps/hypersync-indexer/generated/index.js delete mode 100644 apps/hypersync-indexer/generated/package.json delete mode 100644 apps/hypersync-indexer/generated/persisted_state.envio.json delete mode 100644 apps/hypersync-indexer/generated/pnpm-lock.yaml delete mode 100644 apps/hypersync-indexer/generated/rescript.json delete mode 100644 apps/hypersync-indexer/generated/schema.graphql delete mode 100644 apps/hypersync-indexer/generated/src/Benchmark.res delete mode 100644 apps/hypersync-indexer/generated/src/ConfigYAML.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/ConfigYAML.res delete mode 100644 apps/hypersync-indexer/generated/src/Env.res delete mode 100644 apps/hypersync-indexer/generated/src/EventProcessing.res delete mode 100644 apps/hypersync-indexer/generated/src/Generated.res delete mode 100644 apps/hypersync-indexer/generated/src/GqlDbCustomTypes.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/GqlDbCustomTypes.res delete mode 100644 apps/hypersync-indexer/generated/src/Handlers.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/Handlers.res delete mode 100644 apps/hypersync-indexer/generated/src/IO.res delete mode 100644 apps/hypersync-indexer/generated/src/Index.bs.js delete mode 100644 apps/hypersync-indexer/generated/src/Index.res delete mode 100644 apps/hypersync-indexer/generated/src/Js.shim.ts delete mode 100644 apps/hypersync-indexer/generated/src/LoadLayer.res delete mode 100644 apps/hypersync-indexer/generated/src/LoadLayer.resi delete mode 100644 apps/hypersync-indexer/generated/src/Path.res delete mode 100644 apps/hypersync-indexer/generated/src/PersistedState.res delete mode 100644 apps/hypersync-indexer/generated/src/TestHelpers.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/TestHelpers.res delete mode 100644 apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.res delete mode 100644 apps/hypersync-indexer/generated/src/TestHelpers_MockDb.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/TestHelpers_MockDb.res delete mode 100644 apps/hypersync-indexer/generated/src/Types.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/Types.res delete mode 100644 apps/hypersync-indexer/generated/src/Types.ts delete mode 100644 apps/hypersync-indexer/generated/src/UserContext.res delete mode 100644 apps/hypersync-indexer/generated/src/bindings/Dotenv.res delete mode 100644 apps/hypersync-indexer/generated/src/bindings/Ethers.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/bindings/OpaqueTypes.ts delete mode 100644 apps/hypersync-indexer/generated/src/bindings/RescriptMocha.res delete mode 100644 apps/hypersync-indexer/generated/src/bindings/Yargs.res delete mode 100644 apps/hypersync-indexer/generated/src/db/Db.res delete mode 100644 apps/hypersync-indexer/generated/src/db/DbFunctions.res delete mode 100644 apps/hypersync-indexer/generated/src/db/DbFunctionsEntities.res delete mode 100644 apps/hypersync-indexer/generated/src/db/DbFunctionsImplementation.js delete mode 100644 apps/hypersync-indexer/generated/src/db/Entities.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/db/Entities.res delete mode 100644 apps/hypersync-indexer/generated/src/db/Enums.gen.ts delete mode 100644 apps/hypersync-indexer/generated/src/db/Enums.res delete mode 100644 apps/hypersync-indexer/generated/src/db/Migrations.res delete mode 100644 apps/hypersync-indexer/generated/src/eventFetching/ChainFetcher.res delete mode 100644 apps/hypersync-indexer/generated/src/eventFetching/ChainManager.res delete mode 100644 apps/hypersync-indexer/generated/src/eventFetching/NetworkSources.res delete mode 100644 apps/hypersync-indexer/generated/src/globalState/GlobalState.res delete mode 100644 apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.res delete mode 100644 apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.resi delete mode 100644 apps/hypersync-indexer/generated/src/ink/EnvioInkApp.res delete mode 100644 apps/hypersync-indexer/generated/src/ink/bindings/DateFns.res delete mode 100644 apps/hypersync-indexer/generated/src/ink/bindings/Ink.res delete mode 100644 apps/hypersync-indexer/generated/src/ink/bindings/Style.res delete mode 100644 apps/hypersync-indexer/generated/src/ink/components/BufferedProgressBar.res delete mode 100644 apps/hypersync-indexer/generated/src/ink/components/ChainData.res delete mode 100644 apps/hypersync-indexer/generated/src/ink/components/CustomHooks.res delete mode 100644 apps/hypersync-indexer/generated/src/ink/components/Messages.res delete mode 100644 apps/hypersync-indexer/generated/src/ink/components/SyncETA.res delete mode 100644 generated@0.1.0 diff --git a/Dockerfile.hypersync-indexer b/Dockerfile.hypersync-indexer new file mode 100644 index 000000000..73fe67fd8 --- /dev/null +++ b/Dockerfile.hypersync-indexer @@ -0,0 +1,27 @@ +FROM node:20-slim AS base + +FROM base AS builder +WORKDIR /app +COPY . . +RUN npx turbo@2.3.1 prune @anticapture/hypersync-indexer --docker + +FROM base AS installer +RUN apt-get update && apt-get install -y curl && npm install -g pnpm ts-node +WORKDIR /app +COPY package.json package.json +COPY pnpm-lock.yaml pnpm-lock.yaml +COPY --from=builder /app/out/json/ . +RUN pnpm install +COPY --from=builder /app/out/full/ . +COPY --from=builder /app/turbo.json turbo.json +WORKDIR /app/apps/hypersync-indexer +RUN pnpm envio codegen + +FROM base AS runner +ENV NODE_ENV=production +RUN apt-get update && apt-get install -y curl && npm install -g ts-node +WORKDIR /app +COPY --from=installer /app . +WORKDIR /app/apps/hypersync-indexer + +ENTRYPOINT ["npm", "run", "start"] diff --git a/apps/hypersync-indexer/.gitignore b/apps/hypersync-indexer/.gitignore new file mode 100644 index 000000000..22fc4b26d --- /dev/null +++ b/apps/hypersync-indexer/.gitignore @@ -0,0 +1,21 @@ +# Dependencies +/node_modules + +# Debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# Misc +.DS_Store + +# Env files +.env + +# HyperIndex codegen output (regenerated by `envio codegen`) +/generated/ + +# HyperIndex runtime state +persisted_state.envio.json +logs/ diff --git a/apps/hypersync-indexer/generated/.gitignore b/apps/hypersync-indexer/generated/.gitignore deleted file mode 100644 index 69f7d54c4..000000000 --- a/apps/hypersync-indexer/generated/.gitignore +++ /dev/null @@ -1,32 +0,0 @@ -*.exe -*.obj -*.out -*.compile -*.native -*.byte -*.cmo -*.annot -*.cmi -*.cmx -*.cmt -*.cmti -*.cma -*.a -*.cmxa -*.obj -*~ -*.annot -*.cmj -*.bak -/lib/ -*.mlast -*.mliast -.vscode -.merlin -.bsb.lock -/node_modules/ -/benchmarks/ -*.res.js -*.res.mjs -logs/* -*BenchmarkCache.json diff --git a/apps/hypersync-indexer/generated/.npmrc b/apps/hypersync-indexer/generated/.npmrc deleted file mode 100644 index d3a714f25..000000000 --- a/apps/hypersync-indexer/generated/.npmrc +++ /dev/null @@ -1 +0,0 @@ -save-exact=true # Since we want specific versions of envio to be completely stable. diff --git a/apps/hypersync-indexer/generated/LICENSE.md b/apps/hypersync-indexer/generated/LICENSE.md deleted file mode 100644 index 418103b21..000000000 --- a/apps/hypersync-indexer/generated/LICENSE.md +++ /dev/null @@ -1,67 +0,0 @@ -## HyperIndex End-User License Agreement (EULA) - -This agreement describes the users' rights and the conditions upon which the Software and Generated Code may be used. The user should review the entire agreement, including any supplemental license terms that accompany the Software, since all of the terms are important and together create this agreement that applies to them. - -### 1. Definitions - -**Software:** HyperIndex, a copyrightable work created by Envio and licensed under this End User License Agreement (“EULA”). - -**Generated Code:** In the context of this license agreement, the term "generated code" refers to computer programming code that is produced automatically by the Software based on input provided by the user. - -**Licensed Material:** The Software and Generated Code defined here will be collectively referred to as “Licensed Material”. - -### 2. Installation and User Rights - -**License:** The Software is provided under this EULA. By agreeing to the EULA terms, you are granted the right to install and operate one instance of the Software on your device (referred to as the licensed device), for the use of one individual at a time, on the condition that you adhere to all terms outlined in this agreement. -The licensor provides you with a non-exclusive, royalty-free, worldwide license that is non-sublicensable and non-transferable. This license allows you to use the Software subject to the limitations and conditions outlined in this EULA. -With one license, the user can only use the Software on a single device. - -**Device:** In this agreement, "device" refers to a hardware system, whether physical or virtual, equipped with an internal storage device capable of executing the Software. This includes hardware partitions, which are considered as individual devices for the purposes of this agreement. Updates may be provided to the Software, and these updates may alter the minimum hardware requirements necessary for the Software. It is the responsibility of users to comply with any changing hardware requirements. - -**Updates:** The Software may be updated automatically. With each update, the EULA may be amended, and it is the users' responsibility to comply with the amendments. - -**Limitations:** Envio reserves all rights, including those under intellectual property laws, not expressly granted in this agreement. For instance, this license does not confer upon you the right to, and you are prohibited from: - -(i) Publishing, copying (other than the permitted backup copy), renting, leasing, or lending the Software; - -(ii) Transferring the Software (except as permitted by this agreement); - -(iii) Circumventing any technical restrictions or limitations in the Software; - -(iv) Using the Software as server Software, for commercial hosting, making the Software available for simultaneous use by multiple users over a network, installing the Software on a server and allowing users to access it remotely, or installing the Software on a device solely for remote user use; - -(v) Reverse engineering, decompiling, or disassembling the Software, or attempting to do so, except and only to the extent that the foregoing restriction is (a) permitted by applicable law; (b) permitted by licensing terms governing the use of open-source components that may be included with the Software and - -(vi) When using the Software, you may not use any features in any manner that could interfere with anyone else's use of them, or attempt to gain unauthorized access to or use of any service, data, account, or network. - -These limitations apply specifically to the Software and do not extend to the Generated Code. Details regarding the use of the Generated Code, including associated limitations, are provided below. - -### 3. Use of the Generated Code - -**Limitations:** Users can use, copy, distribute, make available, and create derivative works of the Generated Code freely, subject to the limitations and conditions specified below. - -(i) The user is prohibited from offering the Generated Code or any software that includes the Generated Code to third parties as a hosted or managed service, where the service grants users access to a significant portion of the Software's features or functionality. - -(ii) The user is not permitted to tamper with, alter, disable, or bypass the functionality of the license key in the Software. Additionally, the user may not eliminate or conceal any functionality within the Software that is safeguarded by the license key. - -(iii) Any modification, removal, or concealment of licensing, copyright, or other notices belonging to the licensor in the Software is strictly forbidden. The use of the licensor's trademarks is subject to relevant laws. - -**Credit:** If the user utilizes the Generated Code to develop and release a new software, product or service, the license agreement for said software, product or service must include proper credit to HyperIndex. - -**Liability:** Envio does not provide any assurance that the Generated Code functions correctly, nor does it assume any responsibility in this regard. - -Additionally, it will be the responsibility of the user to assess whether the Generated Code is suitable for the products and services provided by the user. Envio will not bear any responsibility if the Generated Code is found unsuitable for the products and services provided by the user. - -### 4. Additional Terms - -**Disclaimer of Warranties and Limitation of Liability:** - -(i) Unless expressly undertaken by the Licensor separately, the Licensed Material is provided on an as-is, as-available basis, and the Licensor makes no representations or warranties of any kind regarding the Licensed Material, whether express, implied, statutory, or otherwise. This encompasses, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether known or discoverable. If disclaimers of warranties are not permitted in whole or in part, this disclaimer may not apply to You. - -(ii) To the fullest extent permitted by law, under no circumstances shall the Licensor be liable to You under any legal theory (including, but not limited to, negligence) for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising from the use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. If limitations of liability are not permitted in whole or in part, this limitation may not apply to You. - -(iii) The disclaimers of warranties and limitations of liability outlined above shall be construed in a manner that most closely approximates an absolute disclaimer and waiver of all liability, to the fullest extent permitted by law. - -**Applicable Law and Competent Courts:** This EULA and shall be governed by and construed in accordance with the laws of England. The courts of England shall have exclusive jurisdiction to settle any dispute arising out of or in connection with this EULA. - -**Additional Agreements:** If the user chooses to use the Software, it may be required to agree to additional terms or agreements outside of this EULA. diff --git a/apps/hypersync-indexer/generated/docker-compose.yaml b/apps/hypersync-indexer/generated/docker-compose.yaml deleted file mode 100644 index 9ecc5fe16..000000000 --- a/apps/hypersync-indexer/generated/docker-compose.yaml +++ /dev/null @@ -1,52 +0,0 @@ -services: - envio-postgres: - image: postgres:17.5 - restart: always - ports: - - "${ENVIO_PG_PORT:-5433}:5432" - volumes: - - db_data:/var/lib/postgresql/data - environment: - POSTGRES_PASSWORD: ${ENVIO_PG_PASSWORD:-testing} - POSTGRES_USER: ${ENVIO_PG_USER:-postgres} - POSTGRES_DB: ${ENVIO_PG_DATABASE:-envio-dev} - networks: - - my-proxy-net - graphql-engine: - image: hasura/graphql-engine:v2.43.0 - ports: - - "${HASURA_EXTERNAL_PORT:-8080}:8080" - user: 1001:1001 - depends_on: - - "envio-postgres" - restart: always - environment: - # TODO: refine migrations. For now we will run hasura setup via custom scripts, rather than standard migrations. - # See details of this image here: https://hasura.io/docs/latest/graphql/core/migrations/advanced/auto-apply-migrations.html - # HASURA_GRAPHQL_MIGRATIONS_DIR: /hasura-migrations - # HASURA_GRAPHQL_METADATA_DIR: /hasura-metadata - HASURA_GRAPHQL_DATABASE_URL: postgres://${ENVIO_PG_USER:-postgres}:${ENVIO_PG_PASSWORD:-testing}@envio-postgres:5432/${ENVIO_PG_DATABASE:-envio-dev} - HASURA_GRAPHQL_ENABLE_CONSOLE: ${HASURA_GRAPHQL_ENABLE_CONSOLE:-true} # can make this default to false once we are further in our development. - HASURA_GRAPHQL_ENABLED_LOG_TYPES: - startup, http-log, webhook-log, websocket-log, - query-log - HASURA_GRAPHQL_NO_OF_RETRIES: 10 - HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-testing} - HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES: "true" - PORT: 8080 - HASURA_GRAPHQL_UNAUTHORIZED_ROLE: public - healthcheck: - # CMD from a GH issue thread, no curl or wget installed with image - # but hasura comes with a /healthz public endpoint for checking server health - test: timeout 1s bash -c ':> /dev/tcp/127.0.0.1/8080' || exit 1 - interval: 5s - timeout: 2s - retries: 50 - start_period: 5s - networks: - - my-proxy-net -volumes: - db_data: -networks: - my-proxy-net: - name: local_test_network diff --git a/apps/hypersync-indexer/generated/index.d.ts b/apps/hypersync-indexer/generated/index.d.ts deleted file mode 100644 index 66185c668..000000000 --- a/apps/hypersync-indexer/generated/index.d.ts +++ /dev/null @@ -1,26 +0,0 @@ -export { - ENSGovernor, - ENSToken, - onBlock -} from "./src/Handlers.gen"; -export type * from "./src/Types.gen"; -import { - ENSGovernor, - ENSToken, - MockDb, - Addresses -} from "./src/TestHelpers.gen"; - -export const TestHelpers = { - ENSGovernor, - ENSToken, - MockDb, - Addresses -}; - -export { - EventType, - MetricType, -} from "./src/Enum.gen"; - -export {default as BigDecimal} from 'bignumber.js'; diff --git a/apps/hypersync-indexer/generated/index.js b/apps/hypersync-indexer/generated/index.js deleted file mode 100644 index 075fc5440..000000000 --- a/apps/hypersync-indexer/generated/index.js +++ /dev/null @@ -1,13 +0,0 @@ -/** - This file serves as an entry point when referencing generated as a node module - */ - -const handlers = require("./src/Handlers.res.js"); -const TestHelpers = require("./src/TestHelpers.res.js"); -const BigDecimal = require("bignumber.js"); - -module.exports = { - ...handlers, - BigDecimal, - TestHelpers, -}; diff --git a/apps/hypersync-indexer/generated/package.json b/apps/hypersync-indexer/generated/package.json deleted file mode 100644 index 00e12e16f..000000000 --- a/apps/hypersync-indexer/generated/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "generated", - "version": "0.1.0", - "main": "index.js", - "types": "index.d.ts", - "private": true, - "scripts": { - "clean": "rescript clean", - "build": "rescript", - "watch": "rescript -w", - "format": "rescript format -all", - "db-up": "node -e 'require(`./src/db/Migrations.res.js`).runUpMigrations(true)'", - "db-down": "node -e 'require(`./src/db/Migrations.res.js`).runDownMigrations(true)'", - "db-setup": "node -e 'require(`./src/db/Migrations.res.js`).runUpMigrations(true, true)'", - "print-benchmark-summary": "node -e 'require(`./src/Benchmark.res.js`).Summary.printSummary()'", - "start": "ts-node src/Index.res.js" - }, - "keywords": [ - "ReScript" - ], - "engines": { - "node": ">=18.0.0" - }, - "author": "", - "license": "MIT", - "dependencies": { - "ts-node": "10.9.1", - "@rescript/react": "0.12.1", - "bignumber.js": "9.1.2", - "date-fns": "3.3.1", - "dotenv": "16.4.5", - "ethers": "6.8.0", - "express": "4.19.2", - "ink": "3.2.0", - "ink-big-text": "1.2.0", - "ink-spinner": "4.0.3", - "js-sdsl": "4.4.2", - "pino": "8.16.1", - "postgres": "3.4.1", - "react": "18.2.0", - "rescript": "11.1.3", - "rescript-envsafe": "5.0.0", - "rescript-schema": "9.3.0", - "envio": "2.32.12", - "viem": "2.21.0", - "yargs": "17.7.2", - "prom-client": "15.0.0" - } -} diff --git a/apps/hypersync-indexer/generated/persisted_state.envio.json b/apps/hypersync-indexer/generated/persisted_state.envio.json deleted file mode 100644 index 05e2dd24b..000000000 --- a/apps/hypersync-indexer/generated/persisted_state.envio.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "envio_version": "2.32.12", - "config_hash": "[236, 193, 166, 118, 4, 248, 28, 111, 95, 5, 106, 36, 46, 36, 209, 74, 248, 27, 4, 69, 60, 66, 65, 99, 48, 89, 175, 255, 204, 241, 113, 249]", - "schema_hash": "0713695db6e57c692ec36029c80572226826c973473597b63e3b085219363624", - "handler_files_hash": "a3b06148f0aa5235020dcf46a3d27d54c5cd69afcae844b9c31a8fd32d4bb275", - "abi_files_hash": "17400171c10de0ef7089db6132608fce07e5447f9329200c91096baffed85f32" -} diff --git a/apps/hypersync-indexer/generated/pnpm-lock.yaml b/apps/hypersync-indexer/generated/pnpm-lock.yaml deleted file mode 100644 index 9c65e6b8d..000000000 --- a/apps/hypersync-indexer/generated/pnpm-lock.yaml +++ /dev/null @@ -1,3012 +0,0 @@ -lockfileVersion: "9.0" - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - .: - dependencies: - "@rescript/react": - specifier: 0.12.1 - version: 0.12.1(react-dom@19.2.4(react@18.2.0))(react@18.2.0) - bignumber.js: - specifier: 9.1.2 - version: 9.1.2 - date-fns: - specifier: 3.3.1 - version: 3.3.1 - dotenv: - specifier: 16.4.5 - version: 16.4.5 - envio: - specifier: 2.32.12 - version: 2.32.12(typescript@6.0.2) - ethers: - specifier: 6.8.0 - version: 6.8.0 - express: - specifier: 4.19.2 - version: 4.19.2 - ink: - specifier: 3.2.0 - version: 3.2.0(react@18.2.0) - ink-big-text: - specifier: 1.2.0 - version: 1.2.0(ink@3.2.0(react@18.2.0))(react@18.2.0) - ink-spinner: - specifier: 4.0.3 - version: 4.0.3(ink@3.2.0(react@18.2.0))(react@18.2.0) - js-sdsl: - specifier: 4.4.2 - version: 4.4.2 - pino: - specifier: 8.16.1 - version: 8.16.1 - postgres: - specifier: 3.4.1 - version: 3.4.1 - prom-client: - specifier: 15.0.0 - version: 15.0.0 - react: - specifier: 18.2.0 - version: 18.2.0 - rescript: - specifier: 11.1.3 - version: 11.1.3 - rescript-envsafe: - specifier: 5.0.0 - version: 5.0.0(rescript-schema@9.3.0(rescript@11.1.3))(rescript@11.1.3) - rescript-schema: - specifier: 9.3.0 - version: 9.3.0(rescript@11.1.3) - ts-node: - specifier: 10.9.1 - version: 10.9.1(@types/node@18.15.13)(typescript@6.0.2) - viem: - specifier: 2.21.0 - version: 2.21.0(typescript@6.0.2) - yargs: - specifier: 17.7.2 - version: 17.7.2 - -packages: - "@adraffy/ens-normalize@1.10.0": - resolution: - { - integrity: sha512-nA9XHtlAkYfJxY7bce8DcN7eKxWWCWkU+1GR9d+U6MbNpfwQp8TI7vqOsBsMcHoT4mBu2kypKoSKnghEzOOq5Q==, - } - - "@cspotcode/source-map-support@0.8.1": - resolution: - { - integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==, - } - engines: { node: ">=12" } - - "@elastic/ecs-helpers@1.1.0": - resolution: - { - integrity: sha512-MDLb2aFeGjg46O5mLpdCzT5yOUDnXToJSrco2ShqGIXxNJaM8uJjX+4nd+hRYV4Vex8YJyDtOFEVBldQct6ndg==, - } - engines: { node: ">=10" } - - "@elastic/ecs-pino-format@1.4.0": - resolution: - { - integrity: sha512-eCSBUTgl8KbPyxky8cecDRLCYu2C1oFV4AZ72bEsI+TxXEvaljaL2kgttfzfu7gW+M89eCz55s49uF2t+YMTWA==, - } - engines: { node: ">=10" } - - "@envio-dev/hyperfuel-client-darwin-arm64@1.2.2": - resolution: - { - integrity: sha512-eQyd9kJCIz/4WCTjkjpQg80DA3pdneHP7qhJIVQ2ZG+Jew9o5XDG+uI0Y16AgGzZ6KGmJSJF6wyUaaAjJfbO1Q==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [darwin] - - "@envio-dev/hyperfuel-client-darwin-x64@1.2.2": - resolution: - { - integrity: sha512-l7lRMSoyIiIvKZgQPfgqg7H1xnrQ37A8yUp4S2ys47R8f/wSCSrmMaY1u7n6CxVYCpR9fajwy0/356UgwwhVKw==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [darwin] - - "@envio-dev/hyperfuel-client-linux-arm64-gnu@1.2.2": - resolution: - { - integrity: sha512-kNiC/1fKuXnoSxp8yEsloDw4Ot/mIcNoYYGLl2CipSIpBtSuiBH5nb6eBcxnRZdKOwf5dKZtZ7MVPL9qJocNJw==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [linux] - - "@envio-dev/hyperfuel-client-linux-x64-gnu@1.2.2": - resolution: - { - integrity: sha512-XDkvkBG/frS+xiZkJdY4KqOaoAwyxPdi2MysDQgF8NmZdssi32SWch0r4LTqKWLLlCBg9/R55POeXL5UAjg2wQ==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [linux] - - "@envio-dev/hyperfuel-client-linux-x64-musl@1.2.2": - resolution: - { - integrity: sha512-DKnKJJSwsYtA7YT0EFGhFB5Eqoo42X0l0vZBv4lDuxngEXiiNjeLemXoKQVDzhcbILD7eyXNa5jWUc+2hpmkEg==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [linux] - - "@envio-dev/hyperfuel-client-win32-x64-msvc@1.2.2": - resolution: - { - integrity: sha512-SwIgTAVM9QhCFPyHwL+e1yQ6o3paV6q25klESkXw+r/KW9QPhOOyA6Yr8nfnur3uqMTLJHAKHTLUnkyi/Nh7Aw==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [win32] - - "@envio-dev/hyperfuel-client@1.2.2": - resolution: - { - integrity: sha512-raKA6DshYSle0sAOHBV1OkSRFMN+Mkz8sFiMmS3k+m5nP6pP56E17CRRePBL5qmR6ZgSEvGOz/44QUiKNkK9Pg==, - } - engines: { node: ">= 10" } - - "@envio-dev/hypersync-client-darwin-arm64@1.3.0": - resolution: - { - integrity: sha512-JZwiVRbMSuJnKsVUpfjTHc3YgAMvGlyuqWQxVc7Eok4Xp/sZLUCXRQUykbCh6fOUWRmoa2JG/ykP/NotoTRCBg==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [darwin] - - "@envio-dev/hypersync-client-darwin-x64@1.3.0": - resolution: - { - integrity: sha512-2eSzQqqqFBMK2enVucYGcny5Ep4DEKYxf3Xme7z9qp2d3c6fMcbVvM4Gt8KOzb7ySjwJ2gU+qY2h545T2NiJXQ==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [darwin] - - "@envio-dev/hypersync-client-linux-arm64-gnu@1.3.0": - resolution: - { - integrity: sha512-gsjMp3WKekwnA89HvJXvcTM3BE5wVFG/qTF4rmk3rGiXhZ+MGaZQKrYRAhnzQZblueFtF/xnnBYpO35Z3ZFThg==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [linux] - - "@envio-dev/hypersync-client-linux-x64-gnu@1.3.0": - resolution: - { - integrity: sha512-Lkvi4lRVwCyFOXf9LYH2X91zmW2l1vbfojKhTwKgqFWv6PMN5atlYjt+/NcUCAAhk5EUavWGjoikwnvLp870cg==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [linux] - - "@envio-dev/hypersync-client-linux-x64-musl@1.3.0": - resolution: - { - integrity: sha512-UIjB/gUX2sl23EMXLBxqtkgMnOjNSiaHK+CSU5vXMXkzL3fOGbz24bvyaPsSv82cxCFEE0yTwlSKkCX6/L8o6Q==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [linux] - - "@envio-dev/hypersync-client@1.3.0": - resolution: - { - integrity: sha512-wUdfZzbsFPbGq6n/1mmUMsWuiAil+m+fL/GBX5LGUyMJV86TXy2SBtAqYYNyDxWLO6gvGr6PYKrP8pLVAUZDZg==, - } - engines: { node: ">= 10" } - - "@jridgewell/resolve-uri@3.1.2": - resolution: - { - integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==, - } - engines: { node: ">=6.0.0" } - - "@jridgewell/sourcemap-codec@1.5.5": - resolution: - { - integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==, - } - - "@jridgewell/trace-mapping@0.3.9": - resolution: - { - integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==, - } - - "@noble/curves@1.2.0": - resolution: - { - integrity: sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==, - } - - "@noble/curves@1.4.0": - resolution: - { - integrity: sha512-p+4cb332SFCrReJkCYe8Xzm0OWi4Jji5jVdIZRL/PmacmDkFNw6MrrV+gGpiPxLHbV+zKFRywUWbaseT+tZRXg==, - } - - "@noble/hashes@1.3.2": - resolution: - { - integrity: sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==, - } - engines: { node: ">= 16" } - - "@noble/hashes@1.4.0": - resolution: - { - integrity: sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==, - } - engines: { node: ">= 16" } - - "@opentelemetry/api@1.9.1": - resolution: - { - integrity: sha512-gLyJlPHPZYdAk1JENA9LeHejZe1Ti77/pTeFm/nMXmQH/HFZlcS/O2XJB+L8fkbrNSqhdtlvjBVjxwUYanNH5Q==, - } - engines: { node: ">=8.0.0" } - - "@rescript/react@0.12.1": - resolution: - { - integrity: sha512-ZD7nhDr5FZgLYqRH9s4CNM+LRz/3IMuTb+LH12fd2Akk0xYkYUP+DZveB2VQUC2UohJnTf/c8yPSNsiFihVCCg==, - } - peerDependencies: - react: ">=18.0.0" - react-dom: ">=18.0.0" - - "@scure/base@1.1.9": - resolution: - { - integrity: sha512-8YKhl8GHiNI/pU2VMaofa2Tor7PJRAjwQLBBuilkJ9L5+13yVbC7JO/wS7piioAvPSwR3JKM1IJ/u4xQzbcXKg==, - } - - "@scure/bip32@1.4.0": - resolution: - { - integrity: sha512-sVUpc0Vq3tXCkDGYVWGIZTRfnvu8LoTDaev7vbwh0omSvVORONr960MQWdKqJDCReIEmTj3PAr73O3aoxz7OPg==, - } - - "@scure/bip39@1.3.0": - resolution: - { - integrity: sha512-disdg7gHuTDZtY+ZdkmLpPCk7fxZSu3gBiEGuoC1XYxv9cGx3Z6cpTggCgW6odSOOIXCiDjuGejW+aJKCY/pIQ==, - } - - "@tsconfig/node10@1.0.12": - resolution: - { - integrity: sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==, - } - - "@tsconfig/node12@1.0.11": - resolution: - { - integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==, - } - - "@tsconfig/node14@1.0.3": - resolution: - { - integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==, - } - - "@tsconfig/node16@1.0.4": - resolution: - { - integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==, - } - - "@types/node@18.15.13": - resolution: - { - integrity: sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==, - } - - "@types/yoga-layout@1.9.2": - resolution: - { - integrity: sha512-S9q47ByT2pPvD65IvrWp7qppVMpk9WGMbVq9wbWZOHg6tnXSD4vyhao6nOSBwwfDdV2p3Kx9evA9vI+XWTfDvw==, - } - - abitype@1.0.5: - resolution: - { - integrity: sha512-YzDhti7cjlfaBhHutMaboYB21Ha3rXR9QTkNJFzYC4kC8YclaiwPBBBJY8ejFdu2wnJeZCVZSMlQJ7fi8S6hsw==, - } - peerDependencies: - typescript: ">=5.0.4" - zod: ^3 >=3.22.0 - peerDependenciesMeta: - typescript: - optional: true - zod: - optional: true - - abort-controller@3.0.0: - resolution: - { - integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==, - } - engines: { node: ">=6.5" } - - accepts@1.3.8: - resolution: - { - integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==, - } - engines: { node: ">= 0.6" } - - acorn-walk@8.3.5: - resolution: - { - integrity: sha512-HEHNfbars9v4pgpW6SO1KSPkfoS0xVOM/9UzkJltjlsHZmJasxg8aXkuZa7SMf8vKGIBhpUsPluQSqhJFCqebw==, - } - engines: { node: ">=0.4.0" } - - acorn@8.16.0: - resolution: - { - integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==, - } - engines: { node: ">=0.4.0" } - hasBin: true - - aes-js@4.0.0-beta.5: - resolution: - { - integrity: sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==, - } - - ajv@6.14.0: - resolution: - { - integrity: sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==, - } - - ansi-escapes@4.3.2: - resolution: - { - integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==, - } - engines: { node: ">=8" } - - ansi-regex@5.0.1: - resolution: - { - integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==, - } - engines: { node: ">=8" } - - ansi-styles@4.3.0: - resolution: - { - integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==, - } - engines: { node: ">=8" } - - arg@4.1.3: - resolution: - { - integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==, - } - - array-flatten@1.1.1: - resolution: - { - integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==, - } - - astral-regex@2.0.0: - resolution: - { - integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==, - } - engines: { node: ">=8" } - - atomic-sleep@1.0.0: - resolution: - { - integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==, - } - engines: { node: ">=8.0.0" } - - auto-bind@4.0.0: - resolution: - { - integrity: sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ==, - } - engines: { node: ">=8" } - - balanced-match@1.0.2: - resolution: - { - integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==, - } - - base64-js@1.5.1: - resolution: - { - integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==, - } - - bignumber.js@9.1.2: - resolution: - { - integrity: sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==, - } - - bintrees@1.0.2: - resolution: - { - integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==, - } - - body-parser@1.20.2: - resolution: - { - integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==, - } - engines: { node: ">= 0.8", npm: 1.2.8000 || >= 1.4.16 } - - brace-expansion@2.0.3: - resolution: - { - integrity: sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==, - } - - buffer@6.0.3: - resolution: - { - integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==, - } - - bytes@3.1.2: - resolution: - { - integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==, - } - engines: { node: ">= 0.8" } - - call-bind-apply-helpers@1.0.2: - resolution: - { - integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==, - } - engines: { node: ">= 0.4" } - - call-bound@1.0.4: - resolution: - { - integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==, - } - engines: { node: ">= 0.4" } - - cfonts@2.10.1: - resolution: - { - integrity: sha512-l5IcLv4SaOdL/EGR6BpOF5SEro88VcGJJ6+xbvJb+wXi19YC6UeHE/brv7a4vIcLZopnt3Ys3zWeNnyfB04UPg==, - } - engines: { node: ">=10" } - hasBin: true - - chalk@4.1.2: - resolution: - { - integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==, - } - engines: { node: ">=10" } - - ci-info@2.0.0: - resolution: - { - integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==, - } - - cli-boxes@2.2.1: - resolution: - { - integrity: sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==, - } - engines: { node: ">=6" } - - cli-cursor@3.1.0: - resolution: - { - integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==, - } - engines: { node: ">=8" } - - cli-spinners@2.9.2: - resolution: - { - integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==, - } - engines: { node: ">=6" } - - cli-truncate@2.1.0: - resolution: - { - integrity: sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==, - } - engines: { node: ">=8" } - - cliui@8.0.1: - resolution: - { - integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==, - } - engines: { node: ">=12" } - - code-excerpt@3.0.0: - resolution: - { - integrity: sha512-VHNTVhd7KsLGOqfX3SyeO8RyYPMp1GJOg194VITk04WMYCv4plV68YWe6TJZxd9MhobjtpMRnVky01gqZsalaw==, - } - engines: { node: ">=10" } - - color-convert@2.0.1: - resolution: - { - integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==, - } - engines: { node: ">=7.0.0" } - - color-name@1.1.4: - resolution: - { - integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==, - } - - colorette@2.0.20: - resolution: - { - integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==, - } - - content-disposition@0.5.4: - resolution: - { - integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==, - } - engines: { node: ">= 0.6" } - - content-type@1.0.5: - resolution: - { - integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==, - } - engines: { node: ">= 0.6" } - - convert-to-spaces@1.0.2: - resolution: - { - integrity: sha512-cj09EBuObp9gZNQCzc7hByQyrs6jVGE+o9kSJmeUoj+GiPiJvi5LYqEH/Hmme4+MTLHM+Ejtq+FChpjjEnsPdQ==, - } - engines: { node: ">= 4" } - - cookie-signature@1.0.6: - resolution: - { - integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==, - } - - cookie@0.6.0: - resolution: - { - integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==, - } - engines: { node: ">= 0.6" } - - create-require@1.1.1: - resolution: - { - integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==, - } - - date-fns@3.3.1: - resolution: - { - integrity: sha512-y8e109LYGgoQDveiEBD3DYXKba1jWf5BA8YU1FL5Tvm0BTdEfy54WLCwnuYWZNnzzvALy/QQ4Hov+Q9RVRv+Zw==, - } - - dateformat@4.6.3: - resolution: - { - integrity: sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==, - } - - debug@2.6.9: - resolution: - { - integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==, - } - peerDependencies: - supports-color: "*" - peerDependenciesMeta: - supports-color: - optional: true - - deepmerge@4.3.1: - resolution: - { - integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==, - } - engines: { node: ">=0.10.0" } - - define-property@1.0.0: - resolution: - { - integrity: sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==, - } - engines: { node: ">=0.10.0" } - - depd@2.0.0: - resolution: - { - integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==, - } - engines: { node: ">= 0.8" } - - destroy@1.2.0: - resolution: - { - integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==, - } - engines: { node: ">= 0.8", npm: 1.2.8000 || >= 1.4.16 } - - diff@4.0.4: - resolution: - { - integrity: sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==, - } - engines: { node: ">=0.3.1" } - - dotenv@16.4.5: - resolution: - { - integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==, - } - engines: { node: ">=12" } - - dunder-proto@1.0.1: - resolution: - { - integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==, - } - engines: { node: ">= 0.4" } - - ee-first@1.1.1: - resolution: - { - integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==, - } - - emoji-regex@8.0.0: - resolution: - { - integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==, - } - - encodeurl@1.0.2: - resolution: - { - integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==, - } - engines: { node: ">= 0.8" } - - end-of-stream@1.4.5: - resolution: - { - integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==, - } - - envio-darwin-arm64@2.32.12: - resolution: - { - integrity: sha512-TLs9jjXUHVqKcBReMHgD7C06lbfWfnMkit3uT55XmgiJYc8zS85T0XmDCnCX4BRbZN7uzMNORqnUc2J3/LR9sQ==, - } - cpu: [arm64] - os: [darwin] - - envio-darwin-x64@2.32.12: - resolution: - { - integrity: sha512-JfKU3LaqxO/aabEAIvpHGKhDGNEiVGvcmmi98cZfG1/vP4S5lO+8KDEp563CaB986N6KtGJRKnDWivvCsseZMw==, - } - cpu: [x64] - os: [darwin] - - envio-linux-arm64@2.32.12: - resolution: - { - integrity: sha512-3sBfuR6JLcAkrFcoEfw2WiaPU3VyXGy4kf26HB5BJE/iJUqha+wHoDbv46MfFGuaC0QyM34QvlG0yGRES0ohPw==, - } - cpu: [arm64] - os: [linux] - - envio-linux-x64@2.32.12: - resolution: - { - integrity: sha512-886q+yztKVrhgkwOfoFKARDStbjk1032YBtA6tqrCN8uWjqgzAf30ZDPurJGlq26hQqYNKRp2LhgxChpivsvFw==, - } - cpu: [x64] - os: [linux] - - envio@2.32.12: - resolution: - { - integrity: sha512-bk9y/AjU+kYxO1a9c/jg8RFDrKKKWU0wCffnwtoXo7KGKmPDKq1WyNzVw6sTeboSfGB0i82hJ97WgSAwRAnR1Q==, - } - hasBin: true - - es-define-property@1.0.1: - resolution: - { - integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==, - } - engines: { node: ">= 0.4" } - - es-errors@1.3.0: - resolution: - { - integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==, - } - engines: { node: ">= 0.4" } - - es-object-atoms@1.1.1: - resolution: - { - integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==, - } - engines: { node: ">= 0.4" } - - escalade@3.2.0: - resolution: - { - integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==, - } - engines: { node: ">=6" } - - escape-html@1.0.3: - resolution: - { - integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==, - } - - escape-string-regexp@2.0.0: - resolution: - { - integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==, - } - engines: { node: ">=8" } - - etag@1.8.1: - resolution: - { - integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==, - } - engines: { node: ">= 0.6" } - - ethers@6.8.0: - resolution: - { - integrity: sha512-zrFbmQRlraM+cU5mE4CZTLBurZTs2gdp2ld0nG/f3ecBK+x6lZ69KSxBqZ4NjclxwfTxl5LeNufcBbMsTdY53Q==, - } - engines: { node: ">=14.0.0" } - - event-target-shim@5.0.1: - resolution: - { - integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==, - } - engines: { node: ">=6" } - - events@3.3.0: - resolution: - { - integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==, - } - engines: { node: ">=0.8.x" } - - eventsource-parser@3.0.6: - resolution: - { - integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==, - } - engines: { node: ">=18.0.0" } - - eventsource@4.1.0: - resolution: - { - integrity: sha512-2GuF51iuHX6A9xdTccMTsNb7VO0lHZihApxhvQzJB5A03DvHDd2FQepodbMaztPBmBcE/ox7o2gqaxGhYB9LhQ==, - } - engines: { node: ">=20.0.0" } - - express@4.19.2: - resolution: - { - integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==, - } - engines: { node: ">= 0.10.0" } - - fast-copy@3.0.2: - resolution: - { - integrity: sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ==, - } - - fast-deep-equal@3.1.3: - resolution: - { - integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==, - } - - fast-json-stable-stringify@2.1.0: - resolution: - { - integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==, - } - - fast-json-stringify@2.7.13: - resolution: - { - integrity: sha512-ar+hQ4+OIurUGjSJD1anvYSDcUflywhKjfxnsW4TBTD7+u0tJufv6DKRWoQk3vI6YBOWMoz0TQtfbe7dxbQmvA==, - } - engines: { node: ">= 10.0.0" } - - fast-redact@3.5.0: - resolution: - { - integrity: sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A==, - } - engines: { node: ">=6" } - - fast-safe-stringify@2.1.1: - resolution: - { - integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==, - } - - finalhandler@1.2.0: - resolution: - { - integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==, - } - engines: { node: ">= 0.8" } - - forwarded@0.2.0: - resolution: - { - integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==, - } - engines: { node: ">= 0.6" } - - fresh@0.5.2: - resolution: - { - integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==, - } - engines: { node: ">= 0.6" } - - fs.realpath@1.0.0: - resolution: - { - integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==, - } - - function-bind@1.1.2: - resolution: - { - integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==, - } - - get-caller-file@2.0.5: - resolution: - { - integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==, - } - engines: { node: 6.* || 8.* || >= 10.* } - - get-intrinsic@1.3.0: - resolution: - { - integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==, - } - engines: { node: ">= 0.4" } - - get-proto@1.0.1: - resolution: - { - integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==, - } - engines: { node: ">= 0.4" } - - glob@8.1.0: - resolution: - { - integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==, - } - engines: { node: ">=12" } - deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me - - gopd@1.2.0: - resolution: - { - integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==, - } - engines: { node: ">= 0.4" } - - has-flag@4.0.0: - resolution: - { - integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==, - } - engines: { node: ">=8" } - - has-symbols@1.1.0: - resolution: - { - integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==, - } - engines: { node: ">= 0.4" } - - hasown@2.0.2: - resolution: - { - integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==, - } - engines: { node: ">= 0.4" } - - help-me@4.2.0: - resolution: - { - integrity: sha512-TAOnTB8Tz5Dw8penUuzHVrKNKlCIbwwbHnXraNJxPwf8LRtE2HlM84RYuezMFcwOJmoYOCWVDyJ8TQGxn9PgxA==, - } - - http-errors@2.0.0: - resolution: - { - integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==, - } - engines: { node: ">= 0.8" } - - iconv-lite@0.4.24: - resolution: - { - integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==, - } - engines: { node: ">=0.10.0" } - - ieee754@1.2.1: - resolution: - { - integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==, - } - - indent-string@4.0.0: - resolution: - { - integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==, - } - engines: { node: ">=8" } - - inflight@1.0.6: - resolution: - { - integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==, - } - deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. - - inherits@2.0.4: - resolution: - { - integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==, - } - - ink-big-text@1.2.0: - resolution: - { - integrity: sha512-xDfn8oOhiji9c4wojTKSaBnEfgpTTd3KL7jsMYVht4SbpfLdSKvVZiMi3U5v45eSjLm1ycMmeMWAP1G99lWL5Q==, - } - engines: { node: ">=8" } - peerDependencies: - ink: ">=2.0.0" - react: ">=16.8.0" - - ink-spinner@4.0.3: - resolution: - { - integrity: sha512-uJ4nbH00MM9fjTJ5xdw0zzvtXMkeGb0WV6dzSWvFv2/+ks6FIhpkt+Ge/eLdh0Ah6Vjw5pLMyNfoHQpRDRVFbQ==, - } - engines: { node: ">=10" } - peerDependencies: - ink: ">=3.0.5" - react: ">=16.8.2" - - ink@3.2.0: - resolution: - { - integrity: sha512-firNp1q3xxTzoItj/eOOSZQnYSlyrWks5llCTVX37nJ59K3eXbQ8PtzCguqo8YI19EELo5QxaKnJd4VxzhU8tg==, - } - engines: { node: ">=10" } - peerDependencies: - "@types/react": ">=16.8.0" - react: ">=16.8.0" - peerDependenciesMeta: - "@types/react": - optional: true - - ipaddr.js@1.9.1: - resolution: - { - integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==, - } - engines: { node: ">= 0.10" } - - is-accessor-descriptor@1.0.1: - resolution: - { - integrity: sha512-YBUanLI8Yoihw923YeFUS5fs0fF2f5TSFTNiYAAzhhDscDa3lEqYuz1pDOEP5KvX94I9ey3vsqjJcLVFVU+3QA==, - } - engines: { node: ">= 0.10" } - - is-buffer@1.1.6: - resolution: - { - integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==, - } - - is-ci@2.0.0: - resolution: - { - integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==, - } - hasBin: true - - is-data-descriptor@1.0.1: - resolution: - { - integrity: sha512-bc4NlCDiCr28U4aEsQ3Qs2491gVq4V8G7MQyws968ImqjKuYtTJXrl7Vq7jsN7Ly/C3xj5KWFrY7sHNeDkAzXw==, - } - engines: { node: ">= 0.4" } - - is-descriptor@1.0.3: - resolution: - { - integrity: sha512-JCNNGbwWZEVaSPtS45mdtrneRWJFp07LLmykxeFV5F6oBvNF8vHSfJuJgoT472pSfk+Mf8VnlrspaFBHWM8JAw==, - } - engines: { node: ">= 0.4" } - - is-fullwidth-code-point@3.0.0: - resolution: - { - integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==, - } - engines: { node: ">=8" } - - is-number@3.0.0: - resolution: - { - integrity: sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==, - } - engines: { node: ">=0.10.0" } - - isows@1.0.4: - resolution: - { - integrity: sha512-hEzjY+x9u9hPmBom9IIAqdJCwNLax+xrPb51vEPpERoFlIxgmZcHzsT5jKG06nvInKOBGvReAVz80Umed5CczQ==, - } - peerDependencies: - ws: "*" - - joycon@3.1.1: - resolution: - { - integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==, - } - engines: { node: ">=10" } - - js-sdsl@4.4.2: - resolution: - { - integrity: sha512-dwXFwByc/ajSV6m5bcKAPwe4yDDF6D614pxmIi5odytzxRlwqF6nwoiCek80Ixc7Cvma5awClxrzFtxCQvcM8w==, - } - - js-tokens@4.0.0: - resolution: - { - integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, - } - - json-schema-traverse@0.4.1: - resolution: - { - integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==, - } - - kind-of@3.2.2: - resolution: - { - integrity: sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==, - } - engines: { node: ">=0.10.0" } - - lodash@4.18.1: - resolution: - { - integrity: sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==, - } - - loose-envify@1.4.0: - resolution: - { - integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==, - } - hasBin: true - - make-error@1.3.6: - resolution: - { - integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==, - } - - math-intrinsics@1.1.0: - resolution: - { - integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==, - } - engines: { node: ">= 0.4" } - - media-typer@0.3.0: - resolution: - { - integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==, - } - engines: { node: ">= 0.6" } - - merge-descriptors@1.0.1: - resolution: - { - integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==, - } - - methods@1.1.2: - resolution: - { - integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==, - } - engines: { node: ">= 0.6" } - - mime-db@1.52.0: - resolution: - { - integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==, - } - engines: { node: ">= 0.6" } - - mime-types@2.1.35: - resolution: - { - integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==, - } - engines: { node: ">= 0.6" } - - mime@1.6.0: - resolution: - { - integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==, - } - engines: { node: ">=4" } - hasBin: true - - mimic-fn@2.1.0: - resolution: - { - integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==, - } - engines: { node: ">=6" } - - minimatch@5.1.9: - resolution: - { - integrity: sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==, - } - engines: { node: ">=10" } - - minimist@1.2.8: - resolution: - { - integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==, - } - - ms@2.0.0: - resolution: - { - integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==, - } - - ms@2.1.3: - resolution: - { - integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, - } - - negotiator@0.6.3: - resolution: - { - integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==, - } - engines: { node: ">= 0.6" } - - object-assign@4.1.1: - resolution: - { - integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==, - } - engines: { node: ">=0.10.0" } - - object-inspect@1.13.4: - resolution: - { - integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==, - } - engines: { node: ">= 0.4" } - - on-exit-leak-free@2.1.2: - resolution: - { - integrity: sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==, - } - engines: { node: ">=14.0.0" } - - on-finished@2.4.1: - resolution: - { - integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==, - } - engines: { node: ">= 0.8" } - - once@1.4.0: - resolution: - { - integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==, - } - - onetime@5.1.2: - resolution: - { - integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==, - } - engines: { node: ">=6" } - - parseurl@1.3.3: - resolution: - { - integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==, - } - engines: { node: ">= 0.8" } - - patch-console@1.0.0: - resolution: - { - integrity: sha512-nxl9nrnLQmh64iTzMfyylSlRozL7kAXIaxw1fVcLYdyhNkJCRUzirRZTikXGJsg+hc4fqpneTK6iU2H1Q8THSA==, - } - engines: { node: ">=10" } - - path-to-regexp@0.1.7: - resolution: - { - integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==, - } - - pino-abstract-transport@1.1.0: - resolution: - { - integrity: sha512-lsleG3/2a/JIWUtf9Q5gUNErBqwIu1tUKTT3dUzaf5DySw9ra1wcqKjJjLX1VTY64Wk1eEOYsVGSaGfCK85ekA==, - } - - pino-abstract-transport@1.2.0: - resolution: - { - integrity: sha512-Guhh8EZfPCfH+PMXAb6rKOjGQEoy0xlAIn+irODG5kgfYV+BQ0rGYYWTIel3P5mmyXqkYkPmdIkywsn6QKUR1Q==, - } - - pino-pretty@10.2.3: - resolution: - { - integrity: sha512-4jfIUc8TC1GPUfDyMSlW1STeORqkoxec71yhxIpLDQapUu8WOuoz2TTCoidrIssyz78LZC69whBMPIKCMbi3cw==, - } - hasBin: true - - pino-std-serializers@6.2.2: - resolution: - { - integrity: sha512-cHjPPsE+vhj/tnhCy/wiMh3M3z3h/j15zHQX+S9GkTBgqJuTuJzYJ4gUyACLhDaJ7kk9ba9iRDmbH2tJU03OiA==, - } - - pino@8.16.1: - resolution: - { - integrity: sha512-3bKsVhBmgPjGV9pyn4fO/8RtoVDR8ssW1ev819FsRXlRNgW8gR/9Kx+gCK4UPWd4JjrRDLWpzd/pb1AyWm3MGA==, - } - hasBin: true - - postgres@3.4.1: - resolution: - { - integrity: sha512-Wasjv6WEzrZXbwKByR2RGD7MBfj7VBqco3hYWz8ifzSAp6tb2L6MlmcKFzkmgV1jT7/vKlcSa+lxXZeTdeVMzQ==, - } - engines: { node: ">=12" } - - process-warning@2.3.2: - resolution: - { - integrity: sha512-n9wh8tvBe5sFmsqlg+XQhaQLumwpqoAUruLwjCopgTmUBjJ/fjtBsJzKleCaIGBOMXYEhp1YfKl4d7rJ5ZKJGA==, - } - - process@0.11.10: - resolution: - { - integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==, - } - engines: { node: ">= 0.6.0" } - - prom-client@15.0.0: - resolution: - { - integrity: sha512-UocpgIrKyA2TKLVZDSfm8rGkL13C19YrQBAiG3xo3aDFWcHedxRxI3z+cIcucoxpSO0h5lff5iv/SXoxyeopeA==, - } - engines: { node: ^16 || ^18 || >=20 } - - prop-types@15.8.1: - resolution: - { - integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==, - } - - proxy-addr@2.0.7: - resolution: - { - integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==, - } - engines: { node: ">= 0.10" } - - pump@3.0.4: - resolution: - { - integrity: sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==, - } - - punycode@2.3.1: - resolution: - { - integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==, - } - engines: { node: ">=6" } - - qs@6.11.0: - resolution: - { - integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==, - } - engines: { node: ">=0.6" } - - quick-format-unescaped@4.0.4: - resolution: - { - integrity: sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==, - } - - range-parser@1.2.1: - resolution: - { - integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==, - } - engines: { node: ">= 0.6" } - - raw-body@2.5.2: - resolution: - { - integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==, - } - engines: { node: ">= 0.8" } - - react-devtools-core@4.28.5: - resolution: - { - integrity: sha512-cq/o30z9W2Wb4rzBefjv5fBalHU0rJGZCHAkf/RHSBWSSYwh8PlQTqqOJmgIIbBtpj27T6FIPXeomIjZtCNVqA==, - } - - react-dom@19.2.4: - resolution: - { - integrity: sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==, - } - peerDependencies: - react: ^19.2.4 - - react-is@16.13.1: - resolution: - { - integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==, - } - - react-reconciler@0.26.2: - resolution: - { - integrity: sha512-nK6kgY28HwrMNwDnMui3dvm3rCFjZrcGiuwLc5COUipBK5hWHLOxMJhSnSomirqWwjPBJKV1QcbkI0VJr7Gl1Q==, - } - engines: { node: ">=0.10.0" } - peerDependencies: - react: ^17.0.2 - - react@18.2.0: - resolution: - { - integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==, - } - engines: { node: ">=0.10.0" } - - readable-stream@3.6.2: - resolution: - { - integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==, - } - engines: { node: ">= 6" } - - readable-stream@4.7.0: - resolution: - { - integrity: sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==, - } - engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } - - real-require@0.2.0: - resolution: - { - integrity: sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==, - } - engines: { node: ">= 12.13.0" } - - require-directory@2.1.1: - resolution: - { - integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==, - } - engines: { node: ">=0.10.0" } - - rescript-envsafe@5.0.0: - resolution: - { - integrity: sha512-xSQbNsFSSQEynvLWUYtI7GJJhzicACLTq5aO1tjgK0N2Vcm9qlrkcLSmnU8tTohebEu9zgm1V/xYY+oGeQgLvA==, - } - peerDependencies: - rescript: 11.x - rescript-schema: 9.x - - rescript-schema@9.3.0: - resolution: - { - integrity: sha512-NiHAjlhFKZCmNhx/Ij40YltCEJJgVNhBWTN/ZfagTg5hdWWuvCiUacxZv+Q/QQolrAhTnHnCrL7RDvZBogHl5A==, - } - peerDependencies: - rescript: 11.x - peerDependenciesMeta: - rescript: - optional: true - - rescript@11.1.3: - resolution: - { - integrity: sha512-bI+yxDcwsv7qE34zLuXeO8Qkc2+1ng5ErlSjnUIZdrAWKoGzHXpJ6ZxiiRBUoYnoMsgRwhqvrugIFyNgWasmsw==, - } - engines: { node: ">=10" } - hasBin: true - - restore-cursor@3.1.0: - resolution: - { - integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==, - } - engines: { node: ">=8" } - - rfdc@1.4.1: - resolution: - { - integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==, - } - - safe-buffer@5.2.1: - resolution: - { - integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==, - } - - safe-stable-stringify@2.5.0: - resolution: - { - integrity: sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==, - } - engines: { node: ">=10" } - - safer-buffer@2.1.2: - resolution: - { - integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==, - } - - scheduler@0.20.2: - resolution: - { - integrity: sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==, - } - - scheduler@0.27.0: - resolution: - { - integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==, - } - - secure-json-parse@2.7.0: - resolution: - { - integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==, - } - - send@0.18.0: - resolution: - { - integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==, - } - engines: { node: ">= 0.8.0" } - - serve-static@1.15.0: - resolution: - { - integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==, - } - engines: { node: ">= 0.8.0" } - - setprototypeof@1.2.0: - resolution: - { - integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==, - } - - shell-quote@1.8.3: - resolution: - { - integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==, - } - engines: { node: ">= 0.4" } - - side-channel-list@1.0.0: - resolution: - { - integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==, - } - engines: { node: ">= 0.4" } - - side-channel-map@1.0.1: - resolution: - { - integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==, - } - engines: { node: ">= 0.4" } - - side-channel-weakmap@1.0.2: - resolution: - { - integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==, - } - engines: { node: ">= 0.4" } - - side-channel@1.1.0: - resolution: - { - integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==, - } - engines: { node: ">= 0.4" } - - signal-exit@3.0.7: - resolution: - { - integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==, - } - - slice-ansi@3.0.0: - resolution: - { - integrity: sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==, - } - engines: { node: ">=8" } - - sonic-boom@3.8.1: - resolution: - { - integrity: sha512-y4Z8LCDBuum+PBP3lSV7RHrXscqksve/bi0as7mhwVnBW+/wUqKT/2Kb7um8yqcFy0duYbbPxzt89Zy2nOCaxg==, - } - - split2@4.2.0: - resolution: - { - integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==, - } - engines: { node: ">= 10.x" } - - stack-utils@2.0.6: - resolution: - { - integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==, - } - engines: { node: ">=10" } - - statuses@2.0.1: - resolution: - { - integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==, - } - engines: { node: ">= 0.8" } - - string-similarity@4.0.4: - resolution: - { - integrity: sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==, - } - deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. - - string-width@4.2.3: - resolution: - { - integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==, - } - engines: { node: ">=8" } - - string_decoder@1.3.0: - resolution: - { - integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==, - } - - strip-ansi@6.0.1: - resolution: - { - integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==, - } - engines: { node: ">=8" } - - strip-json-comments@3.1.1: - resolution: - { - integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==, - } - engines: { node: ">=8" } - - supports-color@7.2.0: - resolution: - { - integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==, - } - engines: { node: ">=8" } - - tdigest@0.1.2: - resolution: - { - integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==, - } - - thread-stream@2.7.0: - resolution: - { - integrity: sha512-qQiRWsU/wvNolI6tbbCKd9iKaTnCXsTwVxhhKM6nctPdujTyztjlbUkUTUymidWcMnZ5pWR0ej4a0tjsW021vw==, - } - - toidentifier@1.0.1: - resolution: - { - integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==, - } - engines: { node: ">=0.6" } - - ts-node@10.9.1: - resolution: - { - integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==, - } - hasBin: true - peerDependencies: - "@swc/core": ">=1.2.50" - "@swc/wasm": ">=1.2.50" - "@types/node": "*" - typescript: ">=2.7" - peerDependenciesMeta: - "@swc/core": - optional: true - "@swc/wasm": - optional: true - - tslib@2.4.0: - resolution: - { - integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==, - } - - type-fest@0.12.0: - resolution: - { - integrity: sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==, - } - engines: { node: ">=10" } - - type-fest@0.21.3: - resolution: - { - integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==, - } - engines: { node: ">=10" } - - type-is@1.6.18: - resolution: - { - integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==, - } - engines: { node: ">= 0.6" } - - typescript@6.0.2: - resolution: - { - integrity: sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ==, - } - engines: { node: ">=14.17" } - hasBin: true - - unpipe@1.0.0: - resolution: - { - integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==, - } - engines: { node: ">= 0.8" } - - uri-js@4.4.1: - resolution: - { - integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==, - } - - util-deprecate@1.0.2: - resolution: - { - integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==, - } - - utils-merge@1.0.1: - resolution: - { - integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==, - } - engines: { node: ">= 0.4.0" } - - v8-compile-cache-lib@3.0.1: - resolution: - { - integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==, - } - - vary@1.1.2: - resolution: - { - integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==, - } - engines: { node: ">= 0.8" } - - viem@2.21.0: - resolution: - { - integrity: sha512-9g3Gw2nOU6t4bNuoDI5vwVExzIxseU0J7Jjx10gA2RNQVrytIrLxggW++tWEe3w4mnnm/pS1WgZFjQ/QKf/nHw==, - } - peerDependencies: - typescript: ">=5.0.4" - peerDependenciesMeta: - typescript: - optional: true - - webauthn-p256@0.0.5: - resolution: - { - integrity: sha512-drMGNWKdaixZNobeORVIqq7k5DsRC9FnG201K2QjeOoQLmtSDaSsVZdkg6n5jUALJKcAG++zBPJXmv6hy0nWFg==, - } - - widest-line@3.1.0: - resolution: - { - integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==, - } - engines: { node: ">=8" } - - window-size@1.1.1: - resolution: - { - integrity: sha512-5D/9vujkmVQ7pSmc0SCBmHXbkv6eaHwXEx65MywhmUMsI8sGqJ972APq1lotfcwMKPFLuCFfL8xGHLIp7jaBmA==, - } - engines: { node: ">= 0.10.0" } - hasBin: true - - wrap-ansi@6.2.0: - resolution: - { - integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==, - } - engines: { node: ">=8" } - - wrap-ansi@7.0.0: - resolution: - { - integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==, - } - engines: { node: ">=10" } - - wrappy@1.0.2: - resolution: - { - integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==, - } - - ws@7.5.10: - resolution: - { - integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==, - } - engines: { node: ">=8.3.0" } - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - - ws@8.17.1: - resolution: - { - integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==, - } - engines: { node: ">=10.0.0" } - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ">=5.0.2" - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - - ws@8.5.0: - resolution: - { - integrity: sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg==, - } - engines: { node: ">=10.0.0" } - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - - y18n@5.0.8: - resolution: - { - integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==, - } - engines: { node: ">=10" } - - yargs-parser@21.1.1: - resolution: - { - integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==, - } - engines: { node: ">=12" } - - yargs@17.7.2: - resolution: - { - integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==, - } - engines: { node: ">=12" } - - yn@3.1.1: - resolution: - { - integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==, - } - engines: { node: ">=6" } - - yoga-layout-prebuilt@1.10.0: - resolution: - { - integrity: sha512-YnOmtSbv4MTf7RGJMK0FvZ+KD8OEe/J5BNnR0GHhD8J/XcG/Qvxgszm0Un6FTHWW4uHlTgP0IztiXQnGyIR45g==, - } - engines: { node: ">=8" } - -snapshots: - "@adraffy/ens-normalize@1.10.0": {} - - "@cspotcode/source-map-support@0.8.1": - dependencies: - "@jridgewell/trace-mapping": 0.3.9 - - "@elastic/ecs-helpers@1.1.0": - dependencies: - fast-json-stringify: 2.7.13 - - "@elastic/ecs-pino-format@1.4.0": - dependencies: - "@elastic/ecs-helpers": 1.1.0 - - "@envio-dev/hyperfuel-client-darwin-arm64@1.2.2": - optional: true - - "@envio-dev/hyperfuel-client-darwin-x64@1.2.2": - optional: true - - "@envio-dev/hyperfuel-client-linux-arm64-gnu@1.2.2": - optional: true - - "@envio-dev/hyperfuel-client-linux-x64-gnu@1.2.2": - optional: true - - "@envio-dev/hyperfuel-client-linux-x64-musl@1.2.2": - optional: true - - "@envio-dev/hyperfuel-client-win32-x64-msvc@1.2.2": - optional: true - - "@envio-dev/hyperfuel-client@1.2.2": - optionalDependencies: - "@envio-dev/hyperfuel-client-darwin-arm64": 1.2.2 - "@envio-dev/hyperfuel-client-darwin-x64": 1.2.2 - "@envio-dev/hyperfuel-client-linux-arm64-gnu": 1.2.2 - "@envio-dev/hyperfuel-client-linux-x64-gnu": 1.2.2 - "@envio-dev/hyperfuel-client-linux-x64-musl": 1.2.2 - "@envio-dev/hyperfuel-client-win32-x64-msvc": 1.2.2 - - "@envio-dev/hypersync-client-darwin-arm64@1.3.0": - optional: true - - "@envio-dev/hypersync-client-darwin-x64@1.3.0": - optional: true - - "@envio-dev/hypersync-client-linux-arm64-gnu@1.3.0": - optional: true - - "@envio-dev/hypersync-client-linux-x64-gnu@1.3.0": - optional: true - - "@envio-dev/hypersync-client-linux-x64-musl@1.3.0": - optional: true - - "@envio-dev/hypersync-client@1.3.0": - optionalDependencies: - "@envio-dev/hypersync-client-darwin-arm64": 1.3.0 - "@envio-dev/hypersync-client-darwin-x64": 1.3.0 - "@envio-dev/hypersync-client-linux-arm64-gnu": 1.3.0 - "@envio-dev/hypersync-client-linux-x64-gnu": 1.3.0 - "@envio-dev/hypersync-client-linux-x64-musl": 1.3.0 - - "@jridgewell/resolve-uri@3.1.2": {} - - "@jridgewell/sourcemap-codec@1.5.5": {} - - "@jridgewell/trace-mapping@0.3.9": - dependencies: - "@jridgewell/resolve-uri": 3.1.2 - "@jridgewell/sourcemap-codec": 1.5.5 - - "@noble/curves@1.2.0": - dependencies: - "@noble/hashes": 1.3.2 - - "@noble/curves@1.4.0": - dependencies: - "@noble/hashes": 1.4.0 - - "@noble/hashes@1.3.2": {} - - "@noble/hashes@1.4.0": {} - - "@opentelemetry/api@1.9.1": {} - - "@rescript/react@0.12.1(react-dom@19.2.4(react@18.2.0))(react@18.2.0)": - dependencies: - react: 18.2.0 - react-dom: 19.2.4(react@18.2.0) - - "@scure/base@1.1.9": {} - - "@scure/bip32@1.4.0": - dependencies: - "@noble/curves": 1.4.0 - "@noble/hashes": 1.4.0 - "@scure/base": 1.1.9 - - "@scure/bip39@1.3.0": - dependencies: - "@noble/hashes": 1.4.0 - "@scure/base": 1.1.9 - - "@tsconfig/node10@1.0.12": {} - - "@tsconfig/node12@1.0.11": {} - - "@tsconfig/node14@1.0.3": {} - - "@tsconfig/node16@1.0.4": {} - - "@types/node@18.15.13": {} - - "@types/yoga-layout@1.9.2": {} - - abitype@1.0.5(typescript@6.0.2): - optionalDependencies: - typescript: 6.0.2 - - abort-controller@3.0.0: - dependencies: - event-target-shim: 5.0.1 - - accepts@1.3.8: - dependencies: - mime-types: 2.1.35 - negotiator: 0.6.3 - - acorn-walk@8.3.5: - dependencies: - acorn: 8.16.0 - - acorn@8.16.0: {} - - aes-js@4.0.0-beta.5: {} - - ajv@6.14.0: - dependencies: - fast-deep-equal: 3.1.3 - fast-json-stable-stringify: 2.1.0 - json-schema-traverse: 0.4.1 - uri-js: 4.4.1 - - ansi-escapes@4.3.2: - dependencies: - type-fest: 0.21.3 - - ansi-regex@5.0.1: {} - - ansi-styles@4.3.0: - dependencies: - color-convert: 2.0.1 - - arg@4.1.3: {} - - array-flatten@1.1.1: {} - - astral-regex@2.0.0: {} - - atomic-sleep@1.0.0: {} - - auto-bind@4.0.0: {} - - balanced-match@1.0.2: {} - - base64-js@1.5.1: {} - - bignumber.js@9.1.2: {} - - bintrees@1.0.2: {} - - body-parser@1.20.2: - dependencies: - bytes: 3.1.2 - content-type: 1.0.5 - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - on-finished: 2.4.1 - qs: 6.11.0 - raw-body: 2.5.2 - type-is: 1.6.18 - unpipe: 1.0.0 - transitivePeerDependencies: - - supports-color - - brace-expansion@2.0.3: - dependencies: - balanced-match: 1.0.2 - - buffer@6.0.3: - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - - bytes@3.1.2: {} - - call-bind-apply-helpers@1.0.2: - dependencies: - es-errors: 1.3.0 - function-bind: 1.1.2 - - call-bound@1.0.4: - dependencies: - call-bind-apply-helpers: 1.0.2 - get-intrinsic: 1.3.0 - - cfonts@2.10.1: - dependencies: - chalk: 4.1.2 - window-size: 1.1.1 - - chalk@4.1.2: - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - - ci-info@2.0.0: {} - - cli-boxes@2.2.1: {} - - cli-cursor@3.1.0: - dependencies: - restore-cursor: 3.1.0 - - cli-spinners@2.9.2: {} - - cli-truncate@2.1.0: - dependencies: - slice-ansi: 3.0.0 - string-width: 4.2.3 - - cliui@8.0.1: - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 - - code-excerpt@3.0.0: - dependencies: - convert-to-spaces: 1.0.2 - - color-convert@2.0.1: - dependencies: - color-name: 1.1.4 - - color-name@1.1.4: {} - - colorette@2.0.20: {} - - content-disposition@0.5.4: - dependencies: - safe-buffer: 5.2.1 - - content-type@1.0.5: {} - - convert-to-spaces@1.0.2: {} - - cookie-signature@1.0.6: {} - - cookie@0.6.0: {} - - create-require@1.1.1: {} - - date-fns@3.3.1: {} - - dateformat@4.6.3: {} - - debug@2.6.9: - dependencies: - ms: 2.0.0 - - deepmerge@4.3.1: {} - - define-property@1.0.0: - dependencies: - is-descriptor: 1.0.3 - - depd@2.0.0: {} - - destroy@1.2.0: {} - - diff@4.0.4: {} - - dotenv@16.4.5: {} - - dunder-proto@1.0.1: - dependencies: - call-bind-apply-helpers: 1.0.2 - es-errors: 1.3.0 - gopd: 1.2.0 - - ee-first@1.1.1: {} - - emoji-regex@8.0.0: {} - - encodeurl@1.0.2: {} - - end-of-stream@1.4.5: - dependencies: - once: 1.4.0 - - envio-darwin-arm64@2.32.12: - optional: true - - envio-darwin-x64@2.32.12: - optional: true - - envio-linux-arm64@2.32.12: - optional: true - - envio-linux-x64@2.32.12: - optional: true - - envio@2.32.12(typescript@6.0.2): - dependencies: - "@elastic/ecs-pino-format": 1.4.0 - "@envio-dev/hyperfuel-client": 1.2.2 - "@envio-dev/hypersync-client": 1.3.0 - bignumber.js: 9.1.2 - eventsource: 4.1.0 - pino: 8.16.1 - pino-pretty: 10.2.3 - prom-client: 15.0.0 - rescript: 11.1.3 - rescript-schema: 9.3.0(rescript@11.1.3) - viem: 2.21.0(typescript@6.0.2) - optionalDependencies: - envio-darwin-arm64: 2.32.12 - envio-darwin-x64: 2.32.12 - envio-linux-arm64: 2.32.12 - envio-linux-x64: 2.32.12 - transitivePeerDependencies: - - bufferutil - - typescript - - utf-8-validate - - zod - - es-define-property@1.0.1: {} - - es-errors@1.3.0: {} - - es-object-atoms@1.1.1: - dependencies: - es-errors: 1.3.0 - - escalade@3.2.0: {} - - escape-html@1.0.3: {} - - escape-string-regexp@2.0.0: {} - - etag@1.8.1: {} - - ethers@6.8.0: - dependencies: - "@adraffy/ens-normalize": 1.10.0 - "@noble/curves": 1.2.0 - "@noble/hashes": 1.3.2 - "@types/node": 18.15.13 - aes-js: 4.0.0-beta.5 - tslib: 2.4.0 - ws: 8.5.0 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - event-target-shim@5.0.1: {} - - events@3.3.0: {} - - eventsource-parser@3.0.6: {} - - eventsource@4.1.0: - dependencies: - eventsource-parser: 3.0.6 - - express@4.19.2: - dependencies: - accepts: 1.3.8 - array-flatten: 1.1.1 - body-parser: 1.20.2 - content-disposition: 0.5.4 - content-type: 1.0.5 - cookie: 0.6.0 - cookie-signature: 1.0.6 - debug: 2.6.9 - depd: 2.0.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - finalhandler: 1.2.0 - fresh: 0.5.2 - http-errors: 2.0.0 - merge-descriptors: 1.0.1 - methods: 1.1.2 - on-finished: 2.4.1 - parseurl: 1.3.3 - path-to-regexp: 0.1.7 - proxy-addr: 2.0.7 - qs: 6.11.0 - range-parser: 1.2.1 - safe-buffer: 5.2.1 - send: 0.18.0 - serve-static: 1.15.0 - setprototypeof: 1.2.0 - statuses: 2.0.1 - type-is: 1.6.18 - utils-merge: 1.0.1 - vary: 1.1.2 - transitivePeerDependencies: - - supports-color - - fast-copy@3.0.2: {} - - fast-deep-equal@3.1.3: {} - - fast-json-stable-stringify@2.1.0: {} - - fast-json-stringify@2.7.13: - dependencies: - ajv: 6.14.0 - deepmerge: 4.3.1 - rfdc: 1.4.1 - string-similarity: 4.0.4 - - fast-redact@3.5.0: {} - - fast-safe-stringify@2.1.1: {} - - finalhandler@1.2.0: - dependencies: - debug: 2.6.9 - encodeurl: 1.0.2 - escape-html: 1.0.3 - on-finished: 2.4.1 - parseurl: 1.3.3 - statuses: 2.0.1 - unpipe: 1.0.0 - transitivePeerDependencies: - - supports-color - - forwarded@0.2.0: {} - - fresh@0.5.2: {} - - fs.realpath@1.0.0: {} - - function-bind@1.1.2: {} - - get-caller-file@2.0.5: {} - - get-intrinsic@1.3.0: - dependencies: - call-bind-apply-helpers: 1.0.2 - es-define-property: 1.0.1 - es-errors: 1.3.0 - es-object-atoms: 1.1.1 - function-bind: 1.1.2 - get-proto: 1.0.1 - gopd: 1.2.0 - has-symbols: 1.1.0 - hasown: 2.0.2 - math-intrinsics: 1.1.0 - - get-proto@1.0.1: - dependencies: - dunder-proto: 1.0.1 - es-object-atoms: 1.1.1 - - glob@8.1.0: - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 5.1.9 - once: 1.4.0 - - gopd@1.2.0: {} - - has-flag@4.0.0: {} - - has-symbols@1.1.0: {} - - hasown@2.0.2: - dependencies: - function-bind: 1.1.2 - - help-me@4.2.0: - dependencies: - glob: 8.1.0 - readable-stream: 3.6.2 - - http-errors@2.0.0: - dependencies: - depd: 2.0.0 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: 2.0.1 - toidentifier: 1.0.1 - - iconv-lite@0.4.24: - dependencies: - safer-buffer: 2.1.2 - - ieee754@1.2.1: {} - - indent-string@4.0.0: {} - - inflight@1.0.6: - dependencies: - once: 1.4.0 - wrappy: 1.0.2 - - inherits@2.0.4: {} - - ink-big-text@1.2.0(ink@3.2.0(react@18.2.0))(react@18.2.0): - dependencies: - cfonts: 2.10.1 - ink: 3.2.0(react@18.2.0) - prop-types: 15.8.1 - react: 18.2.0 - - ink-spinner@4.0.3(ink@3.2.0(react@18.2.0))(react@18.2.0): - dependencies: - cli-spinners: 2.9.2 - ink: 3.2.0(react@18.2.0) - react: 18.2.0 - - ink@3.2.0(react@18.2.0): - dependencies: - ansi-escapes: 4.3.2 - auto-bind: 4.0.0 - chalk: 4.1.2 - cli-boxes: 2.2.1 - cli-cursor: 3.1.0 - cli-truncate: 2.1.0 - code-excerpt: 3.0.0 - indent-string: 4.0.0 - is-ci: 2.0.0 - lodash: 4.18.1 - patch-console: 1.0.0 - react: 18.2.0 - react-devtools-core: 4.28.5 - react-reconciler: 0.26.2(react@18.2.0) - scheduler: 0.20.2 - signal-exit: 3.0.7 - slice-ansi: 3.0.0 - stack-utils: 2.0.6 - string-width: 4.2.3 - type-fest: 0.12.0 - widest-line: 3.1.0 - wrap-ansi: 6.2.0 - ws: 7.5.10 - yoga-layout-prebuilt: 1.10.0 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - ipaddr.js@1.9.1: {} - - is-accessor-descriptor@1.0.1: - dependencies: - hasown: 2.0.2 - - is-buffer@1.1.6: {} - - is-ci@2.0.0: - dependencies: - ci-info: 2.0.0 - - is-data-descriptor@1.0.1: - dependencies: - hasown: 2.0.2 - - is-descriptor@1.0.3: - dependencies: - is-accessor-descriptor: 1.0.1 - is-data-descriptor: 1.0.1 - - is-fullwidth-code-point@3.0.0: {} - - is-number@3.0.0: - dependencies: - kind-of: 3.2.2 - - isows@1.0.4(ws@8.17.1): - dependencies: - ws: 8.17.1 - - joycon@3.1.1: {} - - js-sdsl@4.4.2: {} - - js-tokens@4.0.0: {} - - json-schema-traverse@0.4.1: {} - - kind-of@3.2.2: - dependencies: - is-buffer: 1.1.6 - - lodash@4.18.1: {} - - loose-envify@1.4.0: - dependencies: - js-tokens: 4.0.0 - - make-error@1.3.6: {} - - math-intrinsics@1.1.0: {} - - media-typer@0.3.0: {} - - merge-descriptors@1.0.1: {} - - methods@1.1.2: {} - - mime-db@1.52.0: {} - - mime-types@2.1.35: - dependencies: - mime-db: 1.52.0 - - mime@1.6.0: {} - - mimic-fn@2.1.0: {} - - minimatch@5.1.9: - dependencies: - brace-expansion: 2.0.3 - - minimist@1.2.8: {} - - ms@2.0.0: {} - - ms@2.1.3: {} - - negotiator@0.6.3: {} - - object-assign@4.1.1: {} - - object-inspect@1.13.4: {} - - on-exit-leak-free@2.1.2: {} - - on-finished@2.4.1: - dependencies: - ee-first: 1.1.1 - - once@1.4.0: - dependencies: - wrappy: 1.0.2 - - onetime@5.1.2: - dependencies: - mimic-fn: 2.1.0 - - parseurl@1.3.3: {} - - patch-console@1.0.0: {} - - path-to-regexp@0.1.7: {} - - pino-abstract-transport@1.1.0: - dependencies: - readable-stream: 4.7.0 - split2: 4.2.0 - - pino-abstract-transport@1.2.0: - dependencies: - readable-stream: 4.7.0 - split2: 4.2.0 - - pino-pretty@10.2.3: - dependencies: - colorette: 2.0.20 - dateformat: 4.6.3 - fast-copy: 3.0.2 - fast-safe-stringify: 2.1.1 - help-me: 4.2.0 - joycon: 3.1.1 - minimist: 1.2.8 - on-exit-leak-free: 2.1.2 - pino-abstract-transport: 1.2.0 - pump: 3.0.4 - readable-stream: 4.7.0 - secure-json-parse: 2.7.0 - sonic-boom: 3.8.1 - strip-json-comments: 3.1.1 - - pino-std-serializers@6.2.2: {} - - pino@8.16.1: - dependencies: - atomic-sleep: 1.0.0 - fast-redact: 3.5.0 - on-exit-leak-free: 2.1.2 - pino-abstract-transport: 1.1.0 - pino-std-serializers: 6.2.2 - process-warning: 2.3.2 - quick-format-unescaped: 4.0.4 - real-require: 0.2.0 - safe-stable-stringify: 2.5.0 - sonic-boom: 3.8.1 - thread-stream: 2.7.0 - - postgres@3.4.1: {} - - process-warning@2.3.2: {} - - process@0.11.10: {} - - prom-client@15.0.0: - dependencies: - "@opentelemetry/api": 1.9.1 - tdigest: 0.1.2 - - prop-types@15.8.1: - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - react-is: 16.13.1 - - proxy-addr@2.0.7: - dependencies: - forwarded: 0.2.0 - ipaddr.js: 1.9.1 - - pump@3.0.4: - dependencies: - end-of-stream: 1.4.5 - once: 1.4.0 - - punycode@2.3.1: {} - - qs@6.11.0: - dependencies: - side-channel: 1.1.0 - - quick-format-unescaped@4.0.4: {} - - range-parser@1.2.1: {} - - raw-body@2.5.2: - dependencies: - bytes: 3.1.2 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - unpipe: 1.0.0 - - react-devtools-core@4.28.5: - dependencies: - shell-quote: 1.8.3 - ws: 7.5.10 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - react-dom@19.2.4(react@18.2.0): - dependencies: - react: 18.2.0 - scheduler: 0.27.0 - - react-is@16.13.1: {} - - react-reconciler@0.26.2(react@18.2.0): - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - react: 18.2.0 - scheduler: 0.20.2 - - react@18.2.0: - dependencies: - loose-envify: 1.4.0 - - readable-stream@3.6.2: - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - - readable-stream@4.7.0: - dependencies: - abort-controller: 3.0.0 - buffer: 6.0.3 - events: 3.3.0 - process: 0.11.10 - string_decoder: 1.3.0 - - real-require@0.2.0: {} - - require-directory@2.1.1: {} - - rescript-envsafe@5.0.0(rescript-schema@9.3.0(rescript@11.1.3))(rescript@11.1.3): - dependencies: - rescript: 11.1.3 - rescript-schema: 9.3.0(rescript@11.1.3) - - rescript-schema@9.3.0(rescript@11.1.3): - optionalDependencies: - rescript: 11.1.3 - - rescript@11.1.3: {} - - restore-cursor@3.1.0: - dependencies: - onetime: 5.1.2 - signal-exit: 3.0.7 - - rfdc@1.4.1: {} - - safe-buffer@5.2.1: {} - - safe-stable-stringify@2.5.0: {} - - safer-buffer@2.1.2: {} - - scheduler@0.20.2: - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - - scheduler@0.27.0: {} - - secure-json-parse@2.7.0: {} - - send@0.18.0: - dependencies: - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - fresh: 0.5.2 - http-errors: 2.0.0 - mime: 1.6.0 - ms: 2.1.3 - on-finished: 2.4.1 - range-parser: 1.2.1 - statuses: 2.0.1 - transitivePeerDependencies: - - supports-color - - serve-static@1.15.0: - dependencies: - encodeurl: 1.0.2 - escape-html: 1.0.3 - parseurl: 1.3.3 - send: 0.18.0 - transitivePeerDependencies: - - supports-color - - setprototypeof@1.2.0: {} - - shell-quote@1.8.3: {} - - side-channel-list@1.0.0: - dependencies: - es-errors: 1.3.0 - object-inspect: 1.13.4 - - side-channel-map@1.0.1: - dependencies: - call-bound: 1.0.4 - es-errors: 1.3.0 - get-intrinsic: 1.3.0 - object-inspect: 1.13.4 - - side-channel-weakmap@1.0.2: - dependencies: - call-bound: 1.0.4 - es-errors: 1.3.0 - get-intrinsic: 1.3.0 - object-inspect: 1.13.4 - side-channel-map: 1.0.1 - - side-channel@1.1.0: - dependencies: - es-errors: 1.3.0 - object-inspect: 1.13.4 - side-channel-list: 1.0.0 - side-channel-map: 1.0.1 - side-channel-weakmap: 1.0.2 - - signal-exit@3.0.7: {} - - slice-ansi@3.0.0: - dependencies: - ansi-styles: 4.3.0 - astral-regex: 2.0.0 - is-fullwidth-code-point: 3.0.0 - - sonic-boom@3.8.1: - dependencies: - atomic-sleep: 1.0.0 - - split2@4.2.0: {} - - stack-utils@2.0.6: - dependencies: - escape-string-regexp: 2.0.0 - - statuses@2.0.1: {} - - string-similarity@4.0.4: {} - - string-width@4.2.3: - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - - string_decoder@1.3.0: - dependencies: - safe-buffer: 5.2.1 - - strip-ansi@6.0.1: - dependencies: - ansi-regex: 5.0.1 - - strip-json-comments@3.1.1: {} - - supports-color@7.2.0: - dependencies: - has-flag: 4.0.0 - - tdigest@0.1.2: - dependencies: - bintrees: 1.0.2 - - thread-stream@2.7.0: - dependencies: - real-require: 0.2.0 - - toidentifier@1.0.1: {} - - ts-node@10.9.1(@types/node@18.15.13)(typescript@6.0.2): - dependencies: - "@cspotcode/source-map-support": 0.8.1 - "@tsconfig/node10": 1.0.12 - "@tsconfig/node12": 1.0.11 - "@tsconfig/node14": 1.0.3 - "@tsconfig/node16": 1.0.4 - "@types/node": 18.15.13 - acorn: 8.16.0 - acorn-walk: 8.3.5 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.4 - make-error: 1.3.6 - typescript: 6.0.2 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - - tslib@2.4.0: {} - - type-fest@0.12.0: {} - - type-fest@0.21.3: {} - - type-is@1.6.18: - dependencies: - media-typer: 0.3.0 - mime-types: 2.1.35 - - typescript@6.0.2: {} - - unpipe@1.0.0: {} - - uri-js@4.4.1: - dependencies: - punycode: 2.3.1 - - util-deprecate@1.0.2: {} - - utils-merge@1.0.1: {} - - v8-compile-cache-lib@3.0.1: {} - - vary@1.1.2: {} - - viem@2.21.0(typescript@6.0.2): - dependencies: - "@adraffy/ens-normalize": 1.10.0 - "@noble/curves": 1.4.0 - "@noble/hashes": 1.4.0 - "@scure/bip32": 1.4.0 - "@scure/bip39": 1.3.0 - abitype: 1.0.5(typescript@6.0.2) - isows: 1.0.4(ws@8.17.1) - webauthn-p256: 0.0.5 - ws: 8.17.1 - optionalDependencies: - typescript: 6.0.2 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - zod - - webauthn-p256@0.0.5: - dependencies: - "@noble/curves": 1.4.0 - "@noble/hashes": 1.4.0 - - widest-line@3.1.0: - dependencies: - string-width: 4.2.3 - - window-size@1.1.1: - dependencies: - define-property: 1.0.0 - is-number: 3.0.0 - - wrap-ansi@6.2.0: - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - - wrap-ansi@7.0.0: - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - - wrappy@1.0.2: {} - - ws@7.5.10: {} - - ws@8.17.1: {} - - ws@8.5.0: {} - - y18n@5.0.8: {} - - yargs-parser@21.1.1: {} - - yargs@17.7.2: - dependencies: - cliui: 8.0.1 - escalade: 3.2.0 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 21.1.1 - - yn@3.1.1: {} - - yoga-layout-prebuilt@1.10.0: - dependencies: - "@types/yoga-layout": 1.9.2 diff --git a/apps/hypersync-indexer/generated/rescript.json b/apps/hypersync-indexer/generated/rescript.json deleted file mode 100644 index e99f4ead7..000000000 --- a/apps/hypersync-indexer/generated/rescript.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "generated", - "version": "0.1.0", - "sources": [ - { - "dir": "src", - "subdirs": true - } - ], - "gentypeconfig": { - "shims": { - "Js": "Js" - }, - "generatedFileExtension": ".gen.ts", - "debug": { - "all": false, - "basic": false - } - }, - "package-specs": { - "module": "commonjs", - "in-source": true - }, - "jsx": { - "version": 4 - }, - "suffix": ".res.js", - "bs-dependencies": [ - "rescript-envsafe", - "rescript-schema", - "@rescript/react", - "envio" - ], - "bsc-flags": ["-open RescriptSchema"] -} diff --git a/apps/hypersync-indexer/generated/schema.graphql b/apps/hypersync-indexer/generated/schema.graphql deleted file mode 100644 index f7245e50e..000000000 --- a/apps/hypersync-indexer/generated/schema.graphql +++ /dev/null @@ -1,1258 +0,0 @@ -""" -The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). -""" -scalar JSON - -scalar BigInt - -type PageInfo { - hasNextPage: Boolean! - hasPreviousPage: Boolean! - startCursor: String - endCursor: String -} - -type Meta { - status: JSON -} - -type Query { - token(id: String!): token - tokens(where: tokenFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): tokenPage! - account(id: String!): account - accounts(where: accountFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountPage! - accountBalance(accountId: String!, tokenId: String!): accountBalance - accountBalances(where: accountBalanceFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountBalancePage! - accountPower(accountId: String!): accountPower - accountPowers(where: accountPowerFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountPowerPage! - votingPowerHistory(transactionHash: String!, accountId: String!, logIndex: Float!): votingPowerHistory - votingPowerHistorys(where: votingPowerHistoryFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): votingPowerHistoryPage! - balanceHistory(transactionHash: String!, accountId: String!, logIndex: Float!): balanceHistory - balanceHistorys(where: balanceHistoryFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): balanceHistoryPage! - delegation(transactionHash: String!, delegatorAccountId: String!, delegateAccountId: String!): delegation - delegations(where: delegationFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): delegationPage! - transfer(transactionHash: String!, fromAccountId: String!, toAccountId: String!): transfer - transfers(where: transferFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transferPage! - votesOnchain(voterAccountId: String!, proposalId: String!): votesOnchain - votesOnchains(where: votesOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): votesOnchainPage! - proposalsOnchain(id: String!): proposalsOnchain - proposalsOnchains(where: proposalsOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): proposalsOnchainPage! - daoMetricsDayBucket(date: BigInt!, tokenId: String!, metricType: String!): daoMetricsDayBucket - daoMetricsDayBuckets(where: daoMetricsDayBucketFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): daoMetricsDayBucketPage! - transaction(transactionHash: String!): transaction - transactions(where: transactionFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transactionPage! - tokenPrice(timestamp: BigInt!): tokenPrice - tokenPrices(where: tokenPriceFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): tokenPricePage! - _meta: Meta -} - -type token { - id: String! - name: String - decimals: Int! - totalSupply: BigInt! - delegatedSupply: BigInt! - cexSupply: BigInt! - dexSupply: BigInt! - lendingSupply: BigInt! - circulatingSupply: BigInt! - treasury: BigInt! -} - -type tokenPage { - items: [token!]! - pageInfo: PageInfo! - totalCount: Int! -} - -input tokenFilter { - AND: [tokenFilter] - OR: [tokenFilter] - id: String - id_not: String - id_in: [String] - id_not_in: [String] - id_contains: String - id_not_contains: String - id_starts_with: String - id_ends_with: String - id_not_starts_with: String - id_not_ends_with: String - name: String - name_not: String - name_in: [String] - name_not_in: [String] - name_contains: String - name_not_contains: String - name_starts_with: String - name_ends_with: String - name_not_starts_with: String - name_not_ends_with: String - decimals: Int - decimals_not: Int - decimals_in: [Int] - decimals_not_in: [Int] - decimals_gt: Int - decimals_lt: Int - decimals_gte: Int - decimals_lte: Int - totalSupply: BigInt - totalSupply_not: BigInt - totalSupply_in: [BigInt] - totalSupply_not_in: [BigInt] - totalSupply_gt: BigInt - totalSupply_lt: BigInt - totalSupply_gte: BigInt - totalSupply_lte: BigInt - delegatedSupply: BigInt - delegatedSupply_not: BigInt - delegatedSupply_in: [BigInt] - delegatedSupply_not_in: [BigInt] - delegatedSupply_gt: BigInt - delegatedSupply_lt: BigInt - delegatedSupply_gte: BigInt - delegatedSupply_lte: BigInt - cexSupply: BigInt - cexSupply_not: BigInt - cexSupply_in: [BigInt] - cexSupply_not_in: [BigInt] - cexSupply_gt: BigInt - cexSupply_lt: BigInt - cexSupply_gte: BigInt - cexSupply_lte: BigInt - dexSupply: BigInt - dexSupply_not: BigInt - dexSupply_in: [BigInt] - dexSupply_not_in: [BigInt] - dexSupply_gt: BigInt - dexSupply_lt: BigInt - dexSupply_gte: BigInt - dexSupply_lte: BigInt - lendingSupply: BigInt - lendingSupply_not: BigInt - lendingSupply_in: [BigInt] - lendingSupply_not_in: [BigInt] - lendingSupply_gt: BigInt - lendingSupply_lt: BigInt - lendingSupply_gte: BigInt - lendingSupply_lte: BigInt - circulatingSupply: BigInt - circulatingSupply_not: BigInt - circulatingSupply_in: [BigInt] - circulatingSupply_not_in: [BigInt] - circulatingSupply_gt: BigInt - circulatingSupply_lt: BigInt - circulatingSupply_gte: BigInt - circulatingSupply_lte: BigInt - treasury: BigInt - treasury_not: BigInt - treasury_in: [BigInt] - treasury_not_in: [BigInt] - treasury_gt: BigInt - treasury_lt: BigInt - treasury_gte: BigInt - treasury_lte: BigInt -} - -type account { - id: String! - balances(where: accountBalanceFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountBalancePage - powers(where: accountPowerFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountPowerPage - delegationsFrom(where: delegationFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): delegationPage - delegationsTo(where: delegationFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): delegationPage - sentTransfers(where: transferFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transferPage - receivedTransfers(where: transferFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transferPage - proposals(where: proposalsOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): proposalsOnchainPage - votes(where: votesOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): votesOnchainPage - delegatedFromBalances(where: accountBalanceFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): accountBalancePage -} - -type accountBalancePage { - items: [accountBalance!]! - pageInfo: PageInfo! - totalCount: Int! -} - -type accountBalance { - accountId: String! - tokenId: String! - balance: BigInt! - delegate: String! - delegatePower: accountPower - account: account - delegateAccount: account - delegatedTo: accountPower - token: token -} - -type accountPower { - accountId: String! - daoId: String! - votingPower: BigInt! - votesCount: Int! - proposalsCount: Int! - delegationsCount: Int! - lastVoteTimestamp: BigInt! - account: account -} - -input accountBalanceFilter { - AND: [accountBalanceFilter] - OR: [accountBalanceFilter] - accountId: String - accountId_not: String - accountId_in: [String] - accountId_not_in: [String] - accountId_contains: String - accountId_not_contains: String - accountId_starts_with: String - accountId_ends_with: String - accountId_not_starts_with: String - accountId_not_ends_with: String - tokenId: String - tokenId_not: String - tokenId_in: [String] - tokenId_not_in: [String] - tokenId_contains: String - tokenId_not_contains: String - tokenId_starts_with: String - tokenId_ends_with: String - tokenId_not_starts_with: String - tokenId_not_ends_with: String - balance: BigInt - balance_not: BigInt - balance_in: [BigInt] - balance_not_in: [BigInt] - balance_gt: BigInt - balance_lt: BigInt - balance_gte: BigInt - balance_lte: BigInt - delegate: String - delegate_not: String - delegate_in: [String] - delegate_not_in: [String] - delegate_contains: String - delegate_not_contains: String - delegate_starts_with: String - delegate_ends_with: String - delegate_not_starts_with: String - delegate_not_ends_with: String -} - -type accountPowerPage { - items: [accountPower!]! - pageInfo: PageInfo! - totalCount: Int! -} - -input accountPowerFilter { - AND: [accountPowerFilter] - OR: [accountPowerFilter] - accountId: String - accountId_not: String - accountId_in: [String] - accountId_not_in: [String] - accountId_contains: String - accountId_not_contains: String - accountId_starts_with: String - accountId_ends_with: String - accountId_not_starts_with: String - accountId_not_ends_with: String - daoId: String - daoId_not: String - daoId_in: [String] - daoId_not_in: [String] - daoId_contains: String - daoId_not_contains: String - daoId_starts_with: String - daoId_ends_with: String - daoId_not_starts_with: String - daoId_not_ends_with: String - votingPower: BigInt - votingPower_not: BigInt - votingPower_in: [BigInt] - votingPower_not_in: [BigInt] - votingPower_gt: BigInt - votingPower_lt: BigInt - votingPower_gte: BigInt - votingPower_lte: BigInt - votesCount: Int - votesCount_not: Int - votesCount_in: [Int] - votesCount_not_in: [Int] - votesCount_gt: Int - votesCount_lt: Int - votesCount_gte: Int - votesCount_lte: Int - proposalsCount: Int - proposalsCount_not: Int - proposalsCount_in: [Int] - proposalsCount_not_in: [Int] - proposalsCount_gt: Int - proposalsCount_lt: Int - proposalsCount_gte: Int - proposalsCount_lte: Int - delegationsCount: Int - delegationsCount_not: Int - delegationsCount_in: [Int] - delegationsCount_not_in: [Int] - delegationsCount_gt: Int - delegationsCount_lt: Int - delegationsCount_gte: Int - delegationsCount_lte: Int - lastVoteTimestamp: BigInt - lastVoteTimestamp_not: BigInt - lastVoteTimestamp_in: [BigInt] - lastVoteTimestamp_not_in: [BigInt] - lastVoteTimestamp_gt: BigInt - lastVoteTimestamp_lt: BigInt - lastVoteTimestamp_gte: BigInt - lastVoteTimestamp_lte: BigInt -} - -type delegationPage { - items: [delegation!]! - pageInfo: PageInfo! - totalCount: Int! -} - -type delegation { - transactionHash: String! - daoId: String! - delegateAccountId: String! - delegatorAccountId: String! - delegatedValue: BigInt! - previousDelegate: String - timestamp: BigInt! - logIndex: Int! - isCex: Boolean! - isDex: Boolean! - isLending: Boolean! - isTotal: Boolean! - delegate: account - delegator: account - transaction: transaction -} - -type transaction { - transactionHash: String! - fromAddress: String - toAddress: String - isCex: Boolean! - isDex: Boolean! - isLending: Boolean! - isTotal: Boolean! - timestamp: BigInt! - transfers(where: transferFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): transferPage - delegations(where: delegationFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): delegationPage -} - -type transferPage { - items: [transfer!]! - pageInfo: PageInfo! - totalCount: Int! -} - -type transfer { - transactionHash: String! - daoId: String! - tokenId: String! - amount: BigInt! - fromAccountId: String! - toAccountId: String! - timestamp: BigInt! - logIndex: Int! - isCex: Boolean! - isDex: Boolean! - isLending: Boolean! - isTotal: Boolean! - from: account - to: account - token: token - transaction: transaction -} - -input transferFilter { - AND: [transferFilter] - OR: [transferFilter] - transactionHash: String - transactionHash_not: String - transactionHash_in: [String] - transactionHash_not_in: [String] - transactionHash_contains: String - transactionHash_not_contains: String - transactionHash_starts_with: String - transactionHash_ends_with: String - transactionHash_not_starts_with: String - transactionHash_not_ends_with: String - daoId: String - daoId_not: String - daoId_in: [String] - daoId_not_in: [String] - daoId_contains: String - daoId_not_contains: String - daoId_starts_with: String - daoId_ends_with: String - daoId_not_starts_with: String - daoId_not_ends_with: String - tokenId: String - tokenId_not: String - tokenId_in: [String] - tokenId_not_in: [String] - tokenId_contains: String - tokenId_not_contains: String - tokenId_starts_with: String - tokenId_ends_with: String - tokenId_not_starts_with: String - tokenId_not_ends_with: String - amount: BigInt - amount_not: BigInt - amount_in: [BigInt] - amount_not_in: [BigInt] - amount_gt: BigInt - amount_lt: BigInt - amount_gte: BigInt - amount_lte: BigInt - fromAccountId: String - fromAccountId_not: String - fromAccountId_in: [String] - fromAccountId_not_in: [String] - fromAccountId_contains: String - fromAccountId_not_contains: String - fromAccountId_starts_with: String - fromAccountId_ends_with: String - fromAccountId_not_starts_with: String - fromAccountId_not_ends_with: String - toAccountId: String - toAccountId_not: String - toAccountId_in: [String] - toAccountId_not_in: [String] - toAccountId_contains: String - toAccountId_not_contains: String - toAccountId_starts_with: String - toAccountId_ends_with: String - toAccountId_not_starts_with: String - toAccountId_not_ends_with: String - timestamp: BigInt - timestamp_not: BigInt - timestamp_in: [BigInt] - timestamp_not_in: [BigInt] - timestamp_gt: BigInt - timestamp_lt: BigInt - timestamp_gte: BigInt - timestamp_lte: BigInt - logIndex: Int - logIndex_not: Int - logIndex_in: [Int] - logIndex_not_in: [Int] - logIndex_gt: Int - logIndex_lt: Int - logIndex_gte: Int - logIndex_lte: Int - isCex: Boolean - isCex_not: Boolean - isCex_in: [Boolean] - isCex_not_in: [Boolean] - isDex: Boolean - isDex_not: Boolean - isDex_in: [Boolean] - isDex_not_in: [Boolean] - isLending: Boolean - isLending_not: Boolean - isLending_in: [Boolean] - isLending_not_in: [Boolean] - isTotal: Boolean - isTotal_not: Boolean - isTotal_in: [Boolean] - isTotal_not_in: [Boolean] -} - -input delegationFilter { - AND: [delegationFilter] - OR: [delegationFilter] - transactionHash: String - transactionHash_not: String - transactionHash_in: [String] - transactionHash_not_in: [String] - transactionHash_contains: String - transactionHash_not_contains: String - transactionHash_starts_with: String - transactionHash_ends_with: String - transactionHash_not_starts_with: String - transactionHash_not_ends_with: String - daoId: String - daoId_not: String - daoId_in: [String] - daoId_not_in: [String] - daoId_contains: String - daoId_not_contains: String - daoId_starts_with: String - daoId_ends_with: String - daoId_not_starts_with: String - daoId_not_ends_with: String - delegateAccountId: String - delegateAccountId_not: String - delegateAccountId_in: [String] - delegateAccountId_not_in: [String] - delegateAccountId_contains: String - delegateAccountId_not_contains: String - delegateAccountId_starts_with: String - delegateAccountId_ends_with: String - delegateAccountId_not_starts_with: String - delegateAccountId_not_ends_with: String - delegatorAccountId: String - delegatorAccountId_not: String - delegatorAccountId_in: [String] - delegatorAccountId_not_in: [String] - delegatorAccountId_contains: String - delegatorAccountId_not_contains: String - delegatorAccountId_starts_with: String - delegatorAccountId_ends_with: String - delegatorAccountId_not_starts_with: String - delegatorAccountId_not_ends_with: String - delegatedValue: BigInt - delegatedValue_not: BigInt - delegatedValue_in: [BigInt] - delegatedValue_not_in: [BigInt] - delegatedValue_gt: BigInt - delegatedValue_lt: BigInt - delegatedValue_gte: BigInt - delegatedValue_lte: BigInt - previousDelegate: String - previousDelegate_not: String - previousDelegate_in: [String] - previousDelegate_not_in: [String] - previousDelegate_contains: String - previousDelegate_not_contains: String - previousDelegate_starts_with: String - previousDelegate_ends_with: String - previousDelegate_not_starts_with: String - previousDelegate_not_ends_with: String - timestamp: BigInt - timestamp_not: BigInt - timestamp_in: [BigInt] - timestamp_not_in: [BigInt] - timestamp_gt: BigInt - timestamp_lt: BigInt - timestamp_gte: BigInt - timestamp_lte: BigInt - logIndex: Int - logIndex_not: Int - logIndex_in: [Int] - logIndex_not_in: [Int] - logIndex_gt: Int - logIndex_lt: Int - logIndex_gte: Int - logIndex_lte: Int - isCex: Boolean - isCex_not: Boolean - isCex_in: [Boolean] - isCex_not_in: [Boolean] - isDex: Boolean - isDex_not: Boolean - isDex_in: [Boolean] - isDex_not_in: [Boolean] - isLending: Boolean - isLending_not: Boolean - isLending_in: [Boolean] - isLending_not_in: [Boolean] - isTotal: Boolean - isTotal_not: Boolean - isTotal_in: [Boolean] - isTotal_not_in: [Boolean] -} - -type proposalsOnchainPage { - items: [proposalsOnchain!]! - pageInfo: PageInfo! - totalCount: Int! -} - -type proposalsOnchain { - id: String! - txHash: String! - daoId: String! - proposerAccountId: String! - targets: JSON! - values: JSON! - signatures: JSON! - calldatas: JSON! - startBlock: Int! - endBlock: Int! - description: String! - timestamp: BigInt! - endTimestamp: BigInt! - status: String! - forVotes: BigInt! - againstVotes: BigInt! - abstainVotes: BigInt! - proposalType: Int - votes(where: votesOnchainFilter, orderBy: String, orderDirection: String, before: String, after: String, limit: Int): votesOnchainPage - proposer: account -} - -type votesOnchainPage { - items: [votesOnchain!]! - pageInfo: PageInfo! - totalCount: Int! -} - -type votesOnchain { - txHash: String! - daoId: String! - voterAccountId: String! - proposalId: String! - support: String! - votingPower: BigInt! - reason: String - timestamp: BigInt! - proposal: proposalsOnchain - voter: account -} - -input votesOnchainFilter { - AND: [votesOnchainFilter] - OR: [votesOnchainFilter] - txHash: String - txHash_not: String - txHash_in: [String] - txHash_not_in: [String] - txHash_contains: String - txHash_not_contains: String - txHash_starts_with: String - txHash_ends_with: String - txHash_not_starts_with: String - txHash_not_ends_with: String - daoId: String - daoId_not: String - daoId_in: [String] - daoId_not_in: [String] - daoId_contains: String - daoId_not_contains: String - daoId_starts_with: String - daoId_ends_with: String - daoId_not_starts_with: String - daoId_not_ends_with: String - voterAccountId: String - voterAccountId_not: String - voterAccountId_in: [String] - voterAccountId_not_in: [String] - voterAccountId_contains: String - voterAccountId_not_contains: String - voterAccountId_starts_with: String - voterAccountId_ends_with: String - voterAccountId_not_starts_with: String - voterAccountId_not_ends_with: String - proposalId: String - proposalId_not: String - proposalId_in: [String] - proposalId_not_in: [String] - proposalId_contains: String - proposalId_not_contains: String - proposalId_starts_with: String - proposalId_ends_with: String - proposalId_not_starts_with: String - proposalId_not_ends_with: String - support: String - support_not: String - support_in: [String] - support_not_in: [String] - support_contains: String - support_not_contains: String - support_starts_with: String - support_ends_with: String - support_not_starts_with: String - support_not_ends_with: String - votingPower: BigInt - votingPower_not: BigInt - votingPower_in: [BigInt] - votingPower_not_in: [BigInt] - votingPower_gt: BigInt - votingPower_lt: BigInt - votingPower_gte: BigInt - votingPower_lte: BigInt - reason: String - reason_not: String - reason_in: [String] - reason_not_in: [String] - reason_contains: String - reason_not_contains: String - reason_starts_with: String - reason_ends_with: String - reason_not_starts_with: String - reason_not_ends_with: String - timestamp: BigInt - timestamp_not: BigInt - timestamp_in: [BigInt] - timestamp_not_in: [BigInt] - timestamp_gt: BigInt - timestamp_lt: BigInt - timestamp_gte: BigInt - timestamp_lte: BigInt -} - -input proposalsOnchainFilter { - AND: [proposalsOnchainFilter] - OR: [proposalsOnchainFilter] - id: String - id_not: String - id_in: [String] - id_not_in: [String] - id_contains: String - id_not_contains: String - id_starts_with: String - id_ends_with: String - id_not_starts_with: String - id_not_ends_with: String - txHash: String - txHash_not: String - txHash_in: [String] - txHash_not_in: [String] - txHash_contains: String - txHash_not_contains: String - txHash_starts_with: String - txHash_ends_with: String - txHash_not_starts_with: String - txHash_not_ends_with: String - daoId: String - daoId_not: String - daoId_in: [String] - daoId_not_in: [String] - daoId_contains: String - daoId_not_contains: String - daoId_starts_with: String - daoId_ends_with: String - daoId_not_starts_with: String - daoId_not_ends_with: String - proposerAccountId: String - proposerAccountId_not: String - proposerAccountId_in: [String] - proposerAccountId_not_in: [String] - proposerAccountId_contains: String - proposerAccountId_not_contains: String - proposerAccountId_starts_with: String - proposerAccountId_ends_with: String - proposerAccountId_not_starts_with: String - proposerAccountId_not_ends_with: String - startBlock: Int - startBlock_not: Int - startBlock_in: [Int] - startBlock_not_in: [Int] - startBlock_gt: Int - startBlock_lt: Int - startBlock_gte: Int - startBlock_lte: Int - endBlock: Int - endBlock_not: Int - endBlock_in: [Int] - endBlock_not_in: [Int] - endBlock_gt: Int - endBlock_lt: Int - endBlock_gte: Int - endBlock_lte: Int - description: String - description_not: String - description_in: [String] - description_not_in: [String] - description_contains: String - description_not_contains: String - description_starts_with: String - description_ends_with: String - description_not_starts_with: String - description_not_ends_with: String - timestamp: BigInt - timestamp_not: BigInt - timestamp_in: [BigInt] - timestamp_not_in: [BigInt] - timestamp_gt: BigInt - timestamp_lt: BigInt - timestamp_gte: BigInt - timestamp_lte: BigInt - endTimestamp: BigInt - endTimestamp_not: BigInt - endTimestamp_in: [BigInt] - endTimestamp_not_in: [BigInt] - endTimestamp_gt: BigInt - endTimestamp_lt: BigInt - endTimestamp_gte: BigInt - endTimestamp_lte: BigInt - status: String - status_not: String - status_in: [String] - status_not_in: [String] - status_contains: String - status_not_contains: String - status_starts_with: String - status_ends_with: String - status_not_starts_with: String - status_not_ends_with: String - forVotes: BigInt - forVotes_not: BigInt - forVotes_in: [BigInt] - forVotes_not_in: [BigInt] - forVotes_gt: BigInt - forVotes_lt: BigInt - forVotes_gte: BigInt - forVotes_lte: BigInt - againstVotes: BigInt - againstVotes_not: BigInt - againstVotes_in: [BigInt] - againstVotes_not_in: [BigInt] - againstVotes_gt: BigInt - againstVotes_lt: BigInt - againstVotes_gte: BigInt - againstVotes_lte: BigInt - abstainVotes: BigInt - abstainVotes_not: BigInt - abstainVotes_in: [BigInt] - abstainVotes_not_in: [BigInt] - abstainVotes_gt: BigInt - abstainVotes_lt: BigInt - abstainVotes_gte: BigInt - abstainVotes_lte: BigInt - proposalType: Int - proposalType_not: Int - proposalType_in: [Int] - proposalType_not_in: [Int] - proposalType_gt: Int - proposalType_lt: Int - proposalType_gte: Int - proposalType_lte: Int -} - -type accountPage { - items: [account!]! - pageInfo: PageInfo! - totalCount: Int! -} - -input accountFilter { - AND: [accountFilter] - OR: [accountFilter] - id: String - id_not: String - id_in: [String] - id_not_in: [String] - id_contains: String - id_not_contains: String - id_starts_with: String - id_ends_with: String - id_not_starts_with: String - id_not_ends_with: String -} - -type votingPowerHistory { - transactionHash: String! - daoId: String! - accountId: String! - votingPower: BigInt! - delta: BigInt! - deltaMod: BigInt! - timestamp: BigInt! - logIndex: Int! - transfer: transfer - delegation: delegation - account: account -} - -type votingPowerHistoryPage { - items: [votingPowerHistory!]! - pageInfo: PageInfo! - totalCount: Int! -} - -input votingPowerHistoryFilter { - AND: [votingPowerHistoryFilter] - OR: [votingPowerHistoryFilter] - transactionHash: String - transactionHash_not: String - transactionHash_in: [String] - transactionHash_not_in: [String] - transactionHash_contains: String - transactionHash_not_contains: String - transactionHash_starts_with: String - transactionHash_ends_with: String - transactionHash_not_starts_with: String - transactionHash_not_ends_with: String - daoId: String - daoId_not: String - daoId_in: [String] - daoId_not_in: [String] - daoId_contains: String - daoId_not_contains: String - daoId_starts_with: String - daoId_ends_with: String - daoId_not_starts_with: String - daoId_not_ends_with: String - accountId: String - accountId_not: String - accountId_in: [String] - accountId_not_in: [String] - accountId_contains: String - accountId_not_contains: String - accountId_starts_with: String - accountId_ends_with: String - accountId_not_starts_with: String - accountId_not_ends_with: String - votingPower: BigInt - votingPower_not: BigInt - votingPower_in: [BigInt] - votingPower_not_in: [BigInt] - votingPower_gt: BigInt - votingPower_lt: BigInt - votingPower_gte: BigInt - votingPower_lte: BigInt - delta: BigInt - delta_not: BigInt - delta_in: [BigInt] - delta_not_in: [BigInt] - delta_gt: BigInt - delta_lt: BigInt - delta_gte: BigInt - delta_lte: BigInt - deltaMod: BigInt - deltaMod_not: BigInt - deltaMod_in: [BigInt] - deltaMod_not_in: [BigInt] - deltaMod_gt: BigInt - deltaMod_lt: BigInt - deltaMod_gte: BigInt - deltaMod_lte: BigInt - timestamp: BigInt - timestamp_not: BigInt - timestamp_in: [BigInt] - timestamp_not_in: [BigInt] - timestamp_gt: BigInt - timestamp_lt: BigInt - timestamp_gte: BigInt - timestamp_lte: BigInt - logIndex: Int - logIndex_not: Int - logIndex_in: [Int] - logIndex_not_in: [Int] - logIndex_gt: Int - logIndex_lt: Int - logIndex_gte: Int - logIndex_lte: Int -} - -type balanceHistory { - transactionHash: String! - daoId: String! - accountId: String! - balance: BigInt! - delta: BigInt! - deltaMod: BigInt! - timestamp: BigInt! - logIndex: Int! -} - -type balanceHistoryPage { - items: [balanceHistory!]! - pageInfo: PageInfo! - totalCount: Int! -} - -input balanceHistoryFilter { - AND: [balanceHistoryFilter] - OR: [balanceHistoryFilter] - transactionHash: String - transactionHash_not: String - transactionHash_in: [String] - transactionHash_not_in: [String] - transactionHash_contains: String - transactionHash_not_contains: String - transactionHash_starts_with: String - transactionHash_ends_with: String - transactionHash_not_starts_with: String - transactionHash_not_ends_with: String - daoId: String - daoId_not: String - daoId_in: [String] - daoId_not_in: [String] - daoId_contains: String - daoId_not_contains: String - daoId_starts_with: String - daoId_ends_with: String - daoId_not_starts_with: String - daoId_not_ends_with: String - accountId: String - accountId_not: String - accountId_in: [String] - accountId_not_in: [String] - accountId_contains: String - accountId_not_contains: String - accountId_starts_with: String - accountId_ends_with: String - accountId_not_starts_with: String - accountId_not_ends_with: String - balance: BigInt - balance_not: BigInt - balance_in: [BigInt] - balance_not_in: [BigInt] - balance_gt: BigInt - balance_lt: BigInt - balance_gte: BigInt - balance_lte: BigInt - delta: BigInt - delta_not: BigInt - delta_in: [BigInt] - delta_not_in: [BigInt] - delta_gt: BigInt - delta_lt: BigInt - delta_gte: BigInt - delta_lte: BigInt - deltaMod: BigInt - deltaMod_not: BigInt - deltaMod_in: [BigInt] - deltaMod_not_in: [BigInt] - deltaMod_gt: BigInt - deltaMod_lt: BigInt - deltaMod_gte: BigInt - deltaMod_lte: BigInt - timestamp: BigInt - timestamp_not: BigInt - timestamp_in: [BigInt] - timestamp_not_in: [BigInt] - timestamp_gt: BigInt - timestamp_lt: BigInt - timestamp_gte: BigInt - timestamp_lte: BigInt - logIndex: Int - logIndex_not: Int - logIndex_in: [Int] - logIndex_not_in: [Int] - logIndex_gt: Int - logIndex_lt: Int - logIndex_gte: Int - logIndex_lte: Int -} - -type daoMetricsDayBucket { - date: BigInt! - daoId: String! - tokenId: String! - metricType: metricType! - open: BigInt! - close: BigInt! - low: BigInt! - high: BigInt! - average: BigInt! - volume: BigInt! - count: Int! - lastUpdate: BigInt! -} - -enum metricType { - TOTAL_SUPPLY - DELEGATED_SUPPLY - CEX_SUPPLY - DEX_SUPPLY - LENDING_SUPPLY - CIRCULATING_SUPPLY - TREASURY -} - -type daoMetricsDayBucketPage { - items: [daoMetricsDayBucket!]! - pageInfo: PageInfo! - totalCount: Int! -} - -input daoMetricsDayBucketFilter { - AND: [daoMetricsDayBucketFilter] - OR: [daoMetricsDayBucketFilter] - date: BigInt - date_not: BigInt - date_in: [BigInt] - date_not_in: [BigInt] - date_gt: BigInt - date_lt: BigInt - date_gte: BigInt - date_lte: BigInt - daoId: String - daoId_not: String - daoId_in: [String] - daoId_not_in: [String] - daoId_contains: String - daoId_not_contains: String - daoId_starts_with: String - daoId_ends_with: String - daoId_not_starts_with: String - daoId_not_ends_with: String - tokenId: String - tokenId_not: String - tokenId_in: [String] - tokenId_not_in: [String] - tokenId_contains: String - tokenId_not_contains: String - tokenId_starts_with: String - tokenId_ends_with: String - tokenId_not_starts_with: String - tokenId_not_ends_with: String - metricType: metricType - metricType_not: metricType - metricType_in: [metricType] - metricType_not_in: [metricType] - open: BigInt - open_not: BigInt - open_in: [BigInt] - open_not_in: [BigInt] - open_gt: BigInt - open_lt: BigInt - open_gte: BigInt - open_lte: BigInt - close: BigInt - close_not: BigInt - close_in: [BigInt] - close_not_in: [BigInt] - close_gt: BigInt - close_lt: BigInt - close_gte: BigInt - close_lte: BigInt - low: BigInt - low_not: BigInt - low_in: [BigInt] - low_not_in: [BigInt] - low_gt: BigInt - low_lt: BigInt - low_gte: BigInt - low_lte: BigInt - high: BigInt - high_not: BigInt - high_in: [BigInt] - high_not_in: [BigInt] - high_gt: BigInt - high_lt: BigInt - high_gte: BigInt - high_lte: BigInt - average: BigInt - average_not: BigInt - average_in: [BigInt] - average_not_in: [BigInt] - average_gt: BigInt - average_lt: BigInt - average_gte: BigInt - average_lte: BigInt - volume: BigInt - volume_not: BigInt - volume_in: [BigInt] - volume_not_in: [BigInt] - volume_gt: BigInt - volume_lt: BigInt - volume_gte: BigInt - volume_lte: BigInt - count: Int - count_not: Int - count_in: [Int] - count_not_in: [Int] - count_gt: Int - count_lt: Int - count_gte: Int - count_lte: Int - lastUpdate: BigInt - lastUpdate_not: BigInt - lastUpdate_in: [BigInt] - lastUpdate_not_in: [BigInt] - lastUpdate_gt: BigInt - lastUpdate_lt: BigInt - lastUpdate_gte: BigInt - lastUpdate_lte: BigInt -} - -type transactionPage { - items: [transaction!]! - pageInfo: PageInfo! - totalCount: Int! -} - -input transactionFilter { - AND: [transactionFilter] - OR: [transactionFilter] - transactionHash: String - transactionHash_not: String - transactionHash_in: [String] - transactionHash_not_in: [String] - transactionHash_contains: String - transactionHash_not_contains: String - transactionHash_starts_with: String - transactionHash_ends_with: String - transactionHash_not_starts_with: String - transactionHash_not_ends_with: String - fromAddress: String - fromAddress_not: String - fromAddress_in: [String] - fromAddress_not_in: [String] - fromAddress_contains: String - fromAddress_not_contains: String - fromAddress_starts_with: String - fromAddress_ends_with: String - fromAddress_not_starts_with: String - fromAddress_not_ends_with: String - toAddress: String - toAddress_not: String - toAddress_in: [String] - toAddress_not_in: [String] - toAddress_contains: String - toAddress_not_contains: String - toAddress_starts_with: String - toAddress_ends_with: String - toAddress_not_starts_with: String - toAddress_not_ends_with: String - isCex: Boolean - isCex_not: Boolean - isCex_in: [Boolean] - isCex_not_in: [Boolean] - isDex: Boolean - isDex_not: Boolean - isDex_in: [Boolean] - isDex_not_in: [Boolean] - isLending: Boolean - isLending_not: Boolean - isLending_in: [Boolean] - isLending_not_in: [Boolean] - isTotal: Boolean - isTotal_not: Boolean - isTotal_in: [Boolean] - isTotal_not_in: [Boolean] - timestamp: BigInt - timestamp_not: BigInt - timestamp_in: [BigInt] - timestamp_not_in: [BigInt] - timestamp_gt: BigInt - timestamp_lt: BigInt - timestamp_gte: BigInt - timestamp_lte: BigInt -} - -type tokenPrice { - price: BigInt! - timestamp: BigInt! -} - -type tokenPricePage { - items: [tokenPrice!]! - pageInfo: PageInfo! - totalCount: Int! -} - -input tokenPriceFilter { - AND: [tokenPriceFilter] - OR: [tokenPriceFilter] - price: BigInt - price_not: BigInt - price_in: [BigInt] - price_not_in: [BigInt] - price_gt: BigInt - price_lt: BigInt - price_gte: BigInt - price_lte: BigInt - timestamp: BigInt - timestamp_not: BigInt - timestamp_in: [BigInt] - timestamp_not_in: [BigInt] - timestamp_gt: BigInt - timestamp_lt: BigInt - timestamp_gte: BigInt - timestamp_lte: BigInt -} \ No newline at end of file diff --git a/apps/hypersync-indexer/generated/src/Benchmark.res b/apps/hypersync-indexer/generated/src/Benchmark.res deleted file mode 100644 index 7dd91aa12..000000000 --- a/apps/hypersync-indexer/generated/src/Benchmark.res +++ /dev/null @@ -1,394 +0,0 @@ -module MillisAccum = { - type millis = float - type t = {counters: dict, startTime: Js.Date.t, mutable endTime: Js.Date.t} - let schema: S.t = S.schema(s => { - counters: s.matches(S.dict(S.float)), - startTime: s.matches(S.string->S.datetime), - endTime: s.matches(S.string->S.datetime), - }) - let make: unit => t = () => { - counters: Js.Dict.empty(), - startTime: Js.Date.make(), - endTime: Js.Date.make(), - } - - let increment = (self: t, label, amount) => { - self.endTime = Js.Date.make() - let amount = amount->Belt.Float.fromInt - switch self.counters->Utils.Dict.dangerouslyGetNonOption(label) { - | None => - self.counters->Js.Dict.set(label, amount) - amount - | Some(current) => - let newAmount = current +. amount - self.counters->Js.Dict.set(label, newAmount) - newAmount - } - } -} - -module SummaryData = { - module DataSet = { - type t = { - count: float, - min: float, - max: float, - sum: BigDecimal.t, - sumOfSquares: option, - decimalPlaces: int, - } - - let schema = S.schema(s => { - count: s.matches(S.float), - min: s.matches(S.float), - max: s.matches(S.float), - sum: s.matches(BigDecimal.schema), - sumOfSquares: s.matches(S.option(BigDecimal.schema)), - decimalPlaces: s.matches(S.int), - }) - - let make = (val: float, ~decimalPlaces=2) => { - let bigDecimal = val->BigDecimal.fromFloat - { - count: 1., - min: val, - max: val, - sum: bigDecimal, - sumOfSquares: Env.Benchmark.shouldSaveStdDev - ? Some(bigDecimal->BigDecimal.times(bigDecimal)) - : None, - decimalPlaces, - } - } - - let add = (self: t, val: float) => { - let bigDecimal = val->BigDecimal.fromFloat - { - count: self.count +. 1., - min: Pervasives.min(self.min, val), - max: Pervasives.max(self.max, val), - sum: self.sum->BigDecimal.plus(bigDecimal), - sumOfSquares: self.sumOfSquares->Belt.Option.map(s => - s->BigDecimal.plus(bigDecimal->BigDecimal.times(bigDecimal)) - ), - decimalPlaces: self.decimalPlaces, - } - } - } - module Group = { - type t = dict - let schema: S.t = S.dict(DataSet.schema) - let make = (): t => Js.Dict.empty() - - /** - Adds a value to the data set for the given key. If the key does not exist, it will be created. - - Returns the updated data set. - */ - let add = (self: t, label, value: float, ~decimalPlaces=2) => { - switch self->Utils.Dict.dangerouslyGetNonOption(label) { - | None => - let new = DataSet.make(value, ~decimalPlaces) - self->Js.Dict.set(label, new) - new - | Some(dataSet) => - let updated = dataSet->DataSet.add(value) - self->Js.Dict.set(label, updated) - updated - } - } - } - - type t = dict - let schema = S.dict(Group.schema) - let make = (): t => Js.Dict.empty() - - let add = (self: t, ~group, ~label, ~value, ~decimalPlaces=2) => { - let group = switch self->Utils.Dict.dangerouslyGetNonOption(group) { - | None => - let newGroup = Group.make() - self->Js.Dict.set(group, newGroup) - newGroup - | Some(group) => group - } - - group->Group.add(label, value, ~decimalPlaces) - } -} - -module Stats = { - open Belt - type t = { - n: float, - mean: float, - @as("std-dev") stdDev: option, - min: float, - max: float, - sum: float, - } - - let round = (float, ~precision=2) => { - let factor = Js.Math.pow_float(~base=10.0, ~exp=precision->Int.toFloat) - Js.Math.round(float *. factor) /. factor - } - - let makeFromDataSet = (dataSet: SummaryData.DataSet.t) => { - let n = dataSet.count - let countBigDecimal = n->BigDecimal.fromFloat - let mean = dataSet.sum->BigDecimal.div(countBigDecimal) - - let roundBigDecimal = bd => - bd->BigDecimal.decimalPlaces(dataSet.decimalPlaces)->BigDecimal.toNumber - let roundFloat = float => float->round(~precision=dataSet.decimalPlaces) - - let stdDev = dataSet.sumOfSquares->Option.map(sumOfSquares => { - let variance = - sumOfSquares - ->BigDecimal.div(countBigDecimal) - ->BigDecimal.minus(mean->BigDecimal.times(mean)) - BigDecimal.sqrt(variance)->roundBigDecimal - }) - { - n, - mean: mean->roundBigDecimal, - stdDev, - min: dataSet.min->roundFloat, - max: dataSet.max->roundFloat, - sum: dataSet.sum->roundBigDecimal, - } - } -} - -module Data = { - type t = { - millisAccum: MillisAccum.t, - summaryData: SummaryData.t, - } - - let schema = S.schema(s => { - millisAccum: s.matches(MillisAccum.schema), - summaryData: s.matches(SummaryData.schema), - }) - - let make = () => { - millisAccum: MillisAccum.make(), - summaryData: SummaryData.make(), - } - - module LiveMetrics = { - let addDataSet = if ( - Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSavePrometheus - ) { - (dataSet: SummaryData.DataSet.t, ~group, ~label) => { - let {n, mean, stdDev, min, max, sum} = dataSet->Stats.makeFromDataSet - Prometheus.BenchmarkSummaryData.set(~group, ~label, ~n, ~mean, ~stdDev, ~min, ~max, ~sum) - } - } else { - (_dataSet, ~group as _, ~label as _) => () - } - let setCounterMillis = if ( - Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSavePrometheus - ) { - (millisAccum: MillisAccum.t, ~label, ~millis) => { - let totalRuntimeMillis = - millisAccum.endTime->Js.Date.getTime -. millisAccum.startTime->Js.Date.getTime - Prometheus.BenchmarkCounters.set(~label, ~millis, ~totalRuntimeMillis) - } - } else { - (_, ~label as _, ~millis as _) => () - } - } - - let incrementMillis = (self: t, ~label, ~amount) => { - let nextMillis = self.millisAccum->MillisAccum.increment(label, amount) - self.millisAccum->LiveMetrics.setCounterMillis(~label, ~millis=nextMillis) - } - - let addSummaryData = (self: t, ~group, ~label, ~value, ~decimalPlaces=2) => { - let updatedDataSet = self.summaryData->SummaryData.add(~group, ~label, ~value, ~decimalPlaces) - updatedDataSet->LiveMetrics.addDataSet(~group, ~label) - } -} - -let data = Data.make() -let throttler = Throttler.make( - ~intervalMillis=Env.ThrottleWrites.jsonFileBenchmarkIntervalMillis, - ~logger=Logging.createChild(~params={"context": "Benchmarking framework"}), -) -let cacheFileName = "BenchmarkCache.json" -let cacheFilePath = NodeJs.Path.join(NodeJs.Path.__dirname, cacheFileName) - -let saveToCacheFile = if ( - Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSaveJsonFile -) { - //Save to cache file only happens if the strategy is set to json-file - data => { - let write = () => { - let json = data->S.reverseConvertToJsonStringOrThrow(Data.schema) - NodeJs.Fs.Promises.writeFile(~filepath=cacheFilePath, ~content=json) - } - throttler->Throttler.schedule(write) - } -} else { - _ => () -} - -let readFromCacheFile = async () => { - switch await NodeJs.Fs.Promises.readFile(~filepath=cacheFilePath, ~encoding=Utf8) { - | exception _ => None - | content => - try content->S.parseJsonStringOrThrow(Data.schema)->Some catch { - | S.Raised(e) => - Logging.error( - "Failed to parse benchmark cache file, please delete it and rerun the benchmark", - ) - e->S.Error.raise - } - } -} - -let addSummaryData = (~group, ~label, ~value, ~decimalPlaces=2) => { - let _ = data->Data.addSummaryData(~group, ~label, ~value, ~decimalPlaces) - data->saveToCacheFile -} - -let incrementMillis = (~label, ~amount) => { - let _ = data->Data.incrementMillis(~label, ~amount) - data->saveToCacheFile -} - -let addBlockRangeFetched = ( - ~totalTimeElapsed: int, - ~parsingTimeElapsed: int, - ~pageFetchTime: int, - ~chainId, - ~fromBlock, - ~toBlock, - ~numEvents, - ~numAddresses, - ~queryName, -) => { - let group = `BlockRangeFetched Summary for Chain ${chainId->Belt.Int.toString} ${queryName}` - let add = (label, value) => data->Data.addSummaryData(~group, ~label, ~value=Utils.magic(value)) - - add("Total Time Elapsed (ms)", totalTimeElapsed) - add("Parsing Time Elapsed (ms)", parsingTimeElapsed) - add("Page Fetch Time (ms)", pageFetchTime) - add("Num Events", numEvents) - add("Num Addresses", numAddresses) - add("Block Range Size", toBlock - fromBlock) - - data->Data.incrementMillis( - ~label=`Total Time Fetching Chain ${chainId->Belt.Int.toString} ${queryName}`, - ~amount=totalTimeElapsed, - ) - - data->saveToCacheFile -} - -let eventProcessingGroup = "EventProcessing Summary" -let batchSizeLabel = "Batch Size" - -let addEventProcessing = ( - ~batchSize, - ~loadDuration, - ~handlerDuration, - ~dbWriteDuration, - ~totalTimeElapsed, -) => { - let add = (label, value) => - data->Data.addSummaryData(~group=eventProcessingGroup, ~label, ~value=value->Belt.Int.toFloat) - - add(batchSizeLabel, batchSize) - add("Load Duration (ms)", loadDuration) - add("Handler Duration (ms)", handlerDuration) - add("DB Write Duration (ms)", dbWriteDuration) - add("Total Time Elapsed (ms)", totalTimeElapsed) - - data->Data.incrementMillis(~label="Total Time Processing", ~amount=totalTimeElapsed) - - data->saveToCacheFile -} - -module Summary = { - open Belt - - type summaryTable = dict - - external logSummaryTable: summaryTable => unit = "console.table" - external logArrTable: array<'a> => unit = "console.table" - external logObjTable: {..} => unit = "console.table" - external logDictTable: dict<'a> => unit = "console.table" - - external arrayIntToFloat: array => array = "%identity" - - let printSummary = async () => { - let data = await readFromCacheFile() - switch data { - | None => - Logging.error( - "No benchmark cache file found, please use 'ENVIO_SAVE_BENCHMARK_DATA=true' and rerun the benchmark", - ) - | Some({summaryData, millisAccum}) => - Js.log("Time breakdown") - let timeBreakdown = [ - ( - "Total Runtime", - DateFns.intervalToDuration({ - start: millisAccum.startTime, - end: millisAccum.endTime, - }), - ), - ] - - millisAccum.counters - ->Js.Dict.entries - ->Array.forEach(((label, millis)) => - timeBreakdown - ->Js.Array2.push((label, DateFns.durationFromMillis(millis->Belt.Int.fromFloat))) - ->ignore - ) - - timeBreakdown - ->Js.Dict.fromArray - ->logDictTable - - Js.log("General") - let batchSizesSum = - summaryData - ->Js.Dict.get(eventProcessingGroup) - ->Option.flatMap(g => g->Js.Dict.get(batchSizeLabel)) - ->Option.map(data => data.sum) - ->Option.getWithDefault(BigDecimal.zero) - - let totalRuntimeMillis = - millisAccum.endTime->Js.Date.getTime -. millisAccum.startTime->Js.Date.getTime - - let totalRuntimeSeconds = totalRuntimeMillis /. 1000. - - let eventsPerSecond = - batchSizesSum - ->BigDecimal.div(BigDecimal.fromFloat(totalRuntimeSeconds)) - ->BigDecimal.decimalPlaces(2) - ->BigDecimal.toNumber - - logObjTable({ - "batch sizes sum": batchSizesSum->BigDecimal.toNumber, - "total runtime (sec)": totalRuntimeSeconds, - "events per second": eventsPerSecond, - }) - - summaryData - ->Js.Dict.entries - ->Js.Array2.sortInPlaceWith(((a, _), (b, _)) => a < b ? -1 : 1) - ->Array.forEach(((groupName, group)) => { - Js.log(groupName) - group - ->Js.Dict.entries - ->Array.map(((label, values)) => (label, values->Stats.makeFromDataSet)) - ->Js.Dict.fromArray - ->logDictTable - }) - } - } -} diff --git a/apps/hypersync-indexer/generated/src/ConfigYAML.gen.ts b/apps/hypersync-indexer/generated/src/ConfigYAML.gen.ts deleted file mode 100644 index 2b580012d..000000000 --- a/apps/hypersync-indexer/generated/src/ConfigYAML.gen.ts +++ /dev/null @@ -1,38 +0,0 @@ -/* TypeScript file generated from ConfigYAML.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -const ConfigYAMLJS = require('./ConfigYAML.res.js'); - -export type hyperSyncConfig = { readonly endpointUrl: string }; - -export type hyperFuelConfig = { readonly endpointUrl: string }; - -export abstract class rpcConfig { protected opaque!: any }; /* simulate opaque types */ - -export type syncSource = - { TAG: "HyperSync"; _0: hyperSyncConfig } - | { TAG: "HyperFuel"; _0: hyperFuelConfig } - | { TAG: "Rpc"; _0: rpcConfig }; - -export abstract class aliasAbi { protected opaque!: any }; /* simulate opaque types */ - -export type eventName = string; - -export type contract = { - readonly name: string; - readonly abi: aliasAbi; - readonly addresses: string[]; - readonly events: eventName[] -}; - -export type configYaml = { - readonly syncSource: syncSource; - readonly startBlock: number; - readonly confirmedBlockThreshold: number; - readonly contracts: {[id: string]: contract}; - readonly lowercaseAddresses: boolean -}; - -export const getGeneratedByChainId: (chainId:number) => configYaml = ConfigYAMLJS.getGeneratedByChainId as any; diff --git a/apps/hypersync-indexer/generated/src/ConfigYAML.res b/apps/hypersync-indexer/generated/src/ConfigYAML.res deleted file mode 100644 index deb1a1505..000000000 --- a/apps/hypersync-indexer/generated/src/ConfigYAML.res +++ /dev/null @@ -1,92 +0,0 @@ - -type hyperSyncConfig = {endpointUrl: string} -type hyperFuelConfig = {endpointUrl: string} - -@genType.opaque -type rpcConfig = { - syncConfig: Config.sourceSync, -} - -@genType -type syncSource = HyperSync(hyperSyncConfig) | HyperFuel(hyperFuelConfig) | Rpc(rpcConfig) - -@genType.opaque -type aliasAbi = Ethers.abi - -type eventName = string - -type contract = { - name: string, - abi: aliasAbi, - addresses: array, - events: array, -} - -type configYaml = { - syncSource, - startBlock: int, - confirmedBlockThreshold: int, - contracts: dict, - lowercaseAddresses: bool, -} - -let publicConfig = ChainMap.fromArrayUnsafe([ - { - let contracts = Js.Dict.fromArray([ - ( - "ENSToken", - { - name: "ENSToken", - abi: Types.ENSToken.abi, - addresses: [ - "0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72", - ], - events: [ - Types.ENSToken.Transfer.name, - Types.ENSToken.DelegateChanged.name, - Types.ENSToken.DelegateVotesChanged.name, - ], - } - ), - ( - "ENSGovernor", - { - name: "ENSGovernor", - abi: Types.ENSGovernor.abi, - addresses: [ - "0x323a76393544d5ecca80cd6ef2a560c6a395b7e3", - ], - events: [ - Types.ENSGovernor.ProposalCreated.name, - Types.ENSGovernor.VoteCast.name, - Types.ENSGovernor.ProposalCanceled.name, - Types.ENSGovernor.ProposalExecuted.name, - Types.ENSGovernor.ProposalQueued.name, - ], - } - ), - ]) - let chain = ChainMap.Chain.makeUnsafe(~chainId=1) - ( - chain, - { - confirmedBlockThreshold: 200, - syncSource: HyperSync({endpointUrl: "https://eth.hypersync.xyz"}), - startBlock: 9380410, - contracts, - lowercaseAddresses: false - } - ) - }, -]) - -@genType -let getGeneratedByChainId: int => configYaml = chainId => { - let chain = ChainMap.Chain.makeUnsafe(~chainId) - if !(publicConfig->ChainMap.has(chain)) { - Js.Exn.raiseError( - "No chain with id " ++ chain->ChainMap.Chain.toString ++ " found in config.yaml", - ) - } - publicConfig->ChainMap.get(chain) -} diff --git a/apps/hypersync-indexer/generated/src/Env.res b/apps/hypersync-indexer/generated/src/Env.res deleted file mode 100644 index 6618abd6d..000000000 --- a/apps/hypersync-indexer/generated/src/Env.res +++ /dev/null @@ -1,247 +0,0 @@ -Dotenv.initialize() -%%private( - let envSafe = EnvSafe.make() - - let getLogLevelConfig = (name, ~default): Pino.logLevel => - envSafe->EnvSafe.get( - name, - S.enum([#trace, #debug, #info, #warn, #error, #fatal, #udebug, #uinfo, #uwarn, #uerror]), - ~fallback=default, - ) -) -// resets the timestampCaughtUpToHeadOrEndblock after a restart when true -let updateSyncTimeOnRestart = - envSafe->EnvSafe.get("UPDATE_SYNC_TIME_ON_RESTART", S.bool, ~fallback=true) -let batchSize = envSafe->EnvSafe.get("MAX_BATCH_SIZE", S.option(S.int)) -let targetBufferSize = envSafe->EnvSafe.get("ENVIO_INDEXING_MAX_BUFFER_SIZE", S.option(S.int)) -let maxAddrInPartition = envSafe->EnvSafe.get("MAX_PARTITION_SIZE", S.int, ~fallback=5_000) -let maxPartitionConcurrency = - envSafe->EnvSafe.get("ENVIO_MAX_PARTITION_CONCURRENCY", S.int, ~fallback=10) -let indexingBlockLag = envSafe->EnvSafe.get("ENVIO_INDEXING_BLOCK_LAG", S.option(S.int)) - -// FIXME: This broke HS grafana dashboard. Should investigate it later. Maybe we should use :: as a default value? -// We want to be able to set it to 0.0.0.0 -// to allow to passthrough the port from a Docker container -// let serverHost = envSafe->EnvSafe.get("ENVIO_INDEXER_HOST", S.string, ~fallback="localhost") -let serverPort = - envSafe->EnvSafe.get( - "ENVIO_INDEXER_PORT", - S.int->S.port, - ~fallback=envSafe->EnvSafe.get("METRICS_PORT", S.int->S.port, ~fallback=9898), - ) - -let tuiOffEnvVar = envSafe->EnvSafe.get("TUI_OFF", S.bool, ~fallback=false) - -let logFilePath = envSafe->EnvSafe.get("LOG_FILE", S.string, ~fallback="logs/envio.log") -let userLogLevel = getLogLevelConfig("LOG_LEVEL", ~default=#info) -let defaultFileLogLevel = getLogLevelConfig("FILE_LOG_LEVEL", ~default=#trace) - -let prodEnvioAppUrl = "https://envio.dev" -let envioAppUrl = envSafe->EnvSafe.get("ENVIO_APP", S.string, ~fallback=prodEnvioAppUrl) -let envioApiToken = envSafe->EnvSafe.get("ENVIO_API_TOKEN", S.option(S.string)) -let hyperSyncClientTimeoutMillis = - envSafe->EnvSafe.get("ENVIO_HYPERSYNC_CLIENT_TIMEOUT_MILLIS", S.int, ~fallback=120_000) - -/** -This is the number of retries that the binary client makes before rejecting the promise with an error -Default is 0 so that the indexer can handle retries internally -*/ -let hyperSyncClientMaxRetries = - envSafe->EnvSafe.get("ENVIO_HYPERSYNC_CLIENT_MAX_RETRIES", S.int, ~fallback=0) - -let hypersyncClientSerializationFormat = - envSafe->EnvSafe.get( - "ENVIO_HYPERSYNC_CLIENT_SERIALIZATION_FORMAT", - HyperSyncClient.serializationFormatSchema, - ~fallback=CapnProto, - ) - -let hypersyncClientEnableQueryCaching = - envSafe->EnvSafe.get("ENVIO_HYPERSYNC_CLIENT_ENABLE_QUERY_CACHING", S.bool, ~fallback=true) - -let hypersyncLogLevel = - envSafe->EnvSafe.get( - "ENVIO_HYPERSYNC_LOG_LEVEL", - HyperSyncClient.logLevelSchema, - ~fallback=#info, - ) -HyperSyncClient.setLogLevel(hypersyncLogLevel) - -module Benchmark = { - module SaveDataStrategy: { - type t - let schema: S.t - let default: t - let shouldSaveJsonFile: t => bool - let shouldSavePrometheus: t => bool - let shouldSaveData: t => bool - } = { - @unboxed - type t = Bool(bool) | @as("json-file") JsonFile | @as("prometheus") Prometheus - - let schema = S.enum([Bool(true), Bool(false), JsonFile, Prometheus]) - let default = Bool(false) - - let shouldSaveJsonFile = self => - switch self { - | JsonFile | Bool(true) => true - | _ => false - } - - let shouldSavePrometheus = _ => true - - let shouldSaveData = self => self->shouldSavePrometheus || self->shouldSaveJsonFile - } - - let saveDataStrategy = - envSafe->EnvSafe.get( - "ENVIO_SAVE_BENCHMARK_DATA", - SaveDataStrategy.schema, - ~fallback=SaveDataStrategy.default, - ) - - let shouldSaveData = saveDataStrategy->SaveDataStrategy.shouldSaveData - - /** - StdDev involves saving sum of squares of data points, which could get very large. - - Currently only do this for local runs on json-file and not prometheus. - */ - let shouldSaveStdDev = - saveDataStrategy->SaveDataStrategy.shouldSaveJsonFile -} - -let logStrategy = - envSafe->EnvSafe.get( - "LOG_STRATEGY", - S.enum([ - Logging.EcsFile, - EcsConsole, - EcsConsoleMultistream, - FileOnly, - ConsoleRaw, - ConsolePretty, - Both, - ]), - ~fallback=ConsolePretty, - ) - -Logging.setLogger( - Logging.makeLogger(~logStrategy, ~logFilePath, ~defaultFileLogLevel, ~userLogLevel), -) - -module Db = { - let host = envSafe->EnvSafe.get("ENVIO_PG_HOST", S.string, ~devFallback="localhost") - let port = envSafe->EnvSafe.get("ENVIO_PG_PORT", S.int->S.port, ~devFallback=5433) - let user = envSafe->EnvSafe.get("ENVIO_PG_USER", S.string, ~devFallback="postgres") - let password = envSafe->EnvSafe.get( - "ENVIO_PG_PASSWORD", - S.string, - ~fallback={ - envSafe->EnvSafe.get("ENVIO_POSTGRES_PASSWORD", S.string, ~fallback="testing") - }, - ) - let database = envSafe->EnvSafe.get("ENVIO_PG_DATABASE", S.string, ~devFallback="envio-dev") - let publicSchema = envSafe->EnvSafe.get("ENVIO_PG_PUBLIC_SCHEMA", S.string, ~fallback="public") - let ssl = envSafe->EnvSafe.get( - "ENVIO_PG_SSL_MODE", - Postgres.sslOptionsSchema, - //this is a dev fallback option for local deployments, shouldn't run in the prod env - //the SSL modes should be provided as string otherwise as 'require' | 'allow' | 'prefer' | 'verify-full' - ~devFallback=Bool(false), - ) -} - -module Hasura = { - // Disable it on HS indexer run, since we don't have Hasura credentials anyways - // Also, it might be useful for some users who don't care about Hasura - let enabled = envSafe->EnvSafe.get("ENVIO_HASURA", S.bool, ~fallback=true) - - let responseLimit = switch envSafe->EnvSafe.get("ENVIO_HASURA_RESPONSE_LIMIT", S.option(S.int)) { - | Some(_) as s => s - | None => envSafe->EnvSafe.get("HASURA_RESPONSE_LIMIT", S.option(S.int)) - } - - let graphqlEndpoint = - envSafe->EnvSafe.get( - "HASURA_GRAPHQL_ENDPOINT", - S.string, - ~devFallback="http://localhost:8080/v1/metadata", - ) - - let url = graphqlEndpoint->Js.String2.slice(~from=0, ~to_=-("/v1/metadata"->Js.String2.length)) - - let role = envSafe->EnvSafe.get("HASURA_GRAPHQL_ROLE", S.string, ~devFallback="admin") - - let secret = envSafe->EnvSafe.get("HASURA_GRAPHQL_ADMIN_SECRET", S.string, ~devFallback="testing") - - let aggregateEntities = envSafe->EnvSafe.get( - "ENVIO_HASURA_PUBLIC_AGGREGATE", - S.union([ - S.array(S.string), - // Temporary workaround: Hosted Service can't use commas in env vars for multiple entities. - // Will be removed once comma support is added — don't rely on this. - S.string->S.transform(s => { - parser: string => - switch string->Js.String2.split("&") { - | [] - | [_] => - s.fail(`Provide an array of entities in the JSON format.`) - | entities => entities - }, - }), - ]), - ~fallback=[], - ) -} - -module Configurable = { - /** - Used for backwards compatability - */ - let unstable__temp_unordered_head_mode = envSafe->EnvSafe.get( - "UNSTABLE__TEMP_UNORDERED_HEAD_MODE", - S.option(S.bool), - ) - - let isUnorderedMultichainMode = - envSafe->EnvSafe.get("UNORDERED_MULTICHAIN_MODE", S.option(S.bool)) - - module SyncConfig = { - let initialBlockInterval = - envSafe->EnvSafe.get("ENVIO_RPC_INITIAL_BLOCK_INTERVAL", S.option(S.int)) - let backoffMultiplicative = - envSafe->EnvSafe.get("ENVIO_RPC_BACKOFF_MULTIPLICATIVE", S.option(S.float)) - let accelerationAdditive = - envSafe->EnvSafe.get("ENVIO_RPC_ACCELERATION_ADDITIVE", S.option(S.int)) - let intervalCeiling = envSafe->EnvSafe.get("ENVIO_RPC_INTERVAL_CEILING", S.option(S.int)) - } -} - -module ThrottleWrites = { - let chainMetadataIntervalMillis = - envSafe->EnvSafe.get("ENVIO_THROTTLE_CHAIN_METADATA_INTERVAL_MILLIS", S.int, ~devFallback=500) - let pruneStaleDataIntervalMillis = - envSafe->EnvSafe.get( - "ENVIO_THROTTLE_PRUNE_STALE_DATA_INTERVAL_MILLIS", - S.int, - ~devFallback=30_000, - ) - - let liveMetricsBenchmarkIntervalMillis = - envSafe->EnvSafe.get( - "ENVIO_THROTTLE_LIVE_METRICS_BENCHMARK_INTERVAL_MILLIS", - S.int, - ~devFallback=1_000, - ) - - let jsonFileBenchmarkIntervalMillis = - envSafe->EnvSafe.get( - "ENVIO_THROTTLE_JSON_FILE_BENCHMARK_INTERVAL_MILLIS", - S.int, - ~devFallback=500, - ) -} - -// You need to close the envSafe after you're done with it so that it immediately tells you about your misconfigured environment on startup. -envSafe->EnvSafe.close diff --git a/apps/hypersync-indexer/generated/src/EventProcessing.res b/apps/hypersync-indexer/generated/src/EventProcessing.res deleted file mode 100644 index bc394e254..000000000 --- a/apps/hypersync-indexer/generated/src/EventProcessing.res +++ /dev/null @@ -1,478 +0,0 @@ -open Belt - -let allChainsEventsProcessedToEndblock = (chainFetchers: ChainMap.t) => { - chainFetchers - ->ChainMap.values - ->Array.every(cf => cf->ChainFetcher.hasProcessedToEndblock) -} - -let computeChainsState = (chainFetchers: ChainMap.t): Internal.chains => { - let chains = Js.Dict.empty() - - chainFetchers - ->ChainMap.entries - ->Array.forEach(((chain, chainFetcher)) => { - let chainId = chain->ChainMap.Chain.toChainId->Int.toString - let isReady = chainFetcher.timestampCaughtUpToHeadOrEndblock !== None - - chains->Js.Dict.set( - chainId, - { - Internal.isReady: isReady, - }, - ) - }) - - chains -} - -let convertFieldsToJson = (fields: option>) => { - switch fields { - | None => %raw(`{}`) - | Some(fields) => { - let keys = fields->Js.Dict.keys - let new = Js.Dict.empty() - for i in 0 to keys->Js.Array2.length - 1 { - let key = keys->Js.Array2.unsafe_get(i) - let value = fields->Js.Dict.unsafeGet(key) - // Skip `undefined` values and convert bigint fields to string - // There are not fields with nested bigints, so this is safe - new->Js.Dict.set( - key, - Js.typeof(value) === "bigint" ? value->Utils.magic->BigInt.toString->Utils.magic : value, - ) - } - new->(Utils.magic: dict => Js.Json.t) - } - } -} - -let addItemToRawEvents = (eventItem: Internal.eventItem, ~inMemoryStore: InMemoryStore.t) => { - let {event, eventConfig, chain, blockNumber, timestamp: blockTimestamp} = eventItem - let {block, transaction, params, logIndex, srcAddress} = event - let chainId = chain->ChainMap.Chain.toChainId - let eventId = EventUtils.packEventIndex(~logIndex, ~blockNumber) - let blockFields = - block - ->(Utils.magic: Internal.eventBlock => option>) - ->convertFieldsToJson - let transactionFields = - transaction - ->(Utils.magic: Internal.eventTransaction => option>) - ->convertFieldsToJson - - blockFields->Types.Block.cleanUpRawEventFieldsInPlace - - // Serialize to unknown, because serializing to Js.Json.t fails for Bytes Fuel type, since it has unknown schema - let params = - params - ->S.reverseConvertOrThrow(eventConfig.paramsRawEventSchema) - ->(Utils.magic: unknown => Js.Json.t) - let params = if params === %raw(`null`) { - // Should probably make the params field nullable - // But this is currently needed to make events - // with empty params work - %raw(`"null"`) - } else { - params - } - - let rawEvent: InternalTable.RawEvents.t = { - chainId, - eventId, - eventName: eventConfig.name, - contractName: eventConfig.contractName, - blockNumber, - logIndex, - srcAddress, - blockHash: block->Types.Block.getId, - blockTimestamp, - blockFields, - transactionFields, - params, - } - - let eventIdStr = eventId->BigInt.toString - - inMemoryStore.rawEvents->InMemoryTable.set({chainId, eventId: eventIdStr}, rawEvent) -} - -exception ProcessingError({message: string, exn: exn, item: Internal.item}) - -let runEventHandlerOrThrow = async ( - item: Internal.item, - ~checkpointId, - ~handler, - ~inMemoryStore, - ~loadManager, - ~persistence, - ~shouldSaveHistory, - ~shouldBenchmark, - ~chains: Internal.chains, -) => { - let eventItem = item->Internal.castUnsafeEventItem - - //Include the load in time before handler - let timeBeforeHandler = Hrtime.makeTimer() - - try { - let contextParams: UserContext.contextParams = { - item, - checkpointId, - inMemoryStore, - loadManager, - persistence, - shouldSaveHistory, - isPreload: false, - chains, - isResolved: false, - } - await handler( - ( - { - event: eventItem.event, - context: UserContext.getHandlerContext(contextParams), - }: Internal.handlerArgs - ), - ) - contextParams.isResolved = true - } catch { - | exn => - raise( - ProcessingError({ - message: "Unexpected error in the event handler. Please handle the error to keep the indexer running smoothly.", - item, - exn, - }), - ) - } - if shouldBenchmark { - let timeEnd = timeBeforeHandler->Hrtime.timeSince->Hrtime.toMillis->Hrtime.floatFromMillis - Benchmark.addSummaryData( - ~group="Handlers Per Event", - ~label=`${eventItem.eventConfig.contractName} ${eventItem.eventConfig.name} Handler (ms)`, - ~value=timeEnd, - ~decimalPlaces=4, - ) - } -} - -let runHandlerOrThrow = async ( - item: Internal.item, - ~checkpointId, - ~inMemoryStore, - ~loadManager, - ~indexer: Indexer.t, - ~shouldSaveHistory, - ~shouldBenchmark, - ~chains: Internal.chains, -) => { - switch item { - | Block({onBlockConfig: {handler, chainId}, blockNumber}) => - try { - let contextParams: UserContext.contextParams = { - item, - inMemoryStore, - loadManager, - persistence: indexer.persistence, - shouldSaveHistory, - checkpointId, - isPreload: false, - chains, - isResolved: false, - } - await handler( - ( - { - block: { - number: blockNumber, - chainId, - }, - context: UserContext.getHandlerContext(contextParams), - }: Internal.onBlockArgs - ), - ) - contextParams.isResolved = true - } catch { - | exn => - raise( - ProcessingError({ - message: "Unexpected error in the block handler. Please handle the error to keep the indexer running smoothly.", - item, - exn, - }), - ) - } - | Event({eventConfig}) => { - switch eventConfig.handler { - | Some(handler) => - await item->runEventHandlerOrThrow( - ~handler, - ~checkpointId, - ~inMemoryStore, - ~loadManager, - ~persistence=indexer.persistence, - ~shouldSaveHistory, - ~shouldBenchmark, - ~chains, - ) - | None => () - } - - if indexer.config.enableRawEvents { - item->Internal.castUnsafeEventItem->addItemToRawEvents(~inMemoryStore) - } - } - } -} - -let preloadBatchOrThrow = async ( - batch: Batch.t, - ~loadManager, - ~persistence, - ~inMemoryStore, - ~chains: Internal.chains, -) => { - // On the first run of loaders, we don't care about the result, - // whether it's an error or a return type. - // We'll rerun the loader again right before the handler run, - // to avoid having a stale data returned from the loader. - - let promises = [] - let itemIdx = ref(0) - - for checkpointIdx in 0 to batch.checkpointIds->Array.length - 1 { - let checkpointId = batch.checkpointIds->Js.Array2.unsafe_get(checkpointIdx) - let checkpointEventsProcessed = - batch.checkpointEventsProcessed->Js.Array2.unsafe_get(checkpointIdx) - - for idx in 0 to checkpointEventsProcessed - 1 { - let item = batch.items->Js.Array2.unsafe_get(itemIdx.contents + idx) - switch item { - | Event({eventConfig: {handler}, event}) => - switch handler { - | None => () - | Some(handler) => - try { - promises->Array.push( - handler({ - event, - context: UserContext.getHandlerContext({ - item, - inMemoryStore, - loadManager, - persistence, - checkpointId, - isPreload: true, - shouldSaveHistory: false, - chains, - isResolved: false, - }), - })->Promise.silentCatch, - // Must have Promise.catch as well as normal catch, - // because if user throws an error before await in the handler, - // it won't create a rejected promise - ) - } catch { - | _ => () - } - } - | Block({onBlockConfig: {handler, chainId}, blockNumber}) => - try { - promises->Array.push( - handler({ - block: { - number: blockNumber, - chainId, - }, - context: UserContext.getHandlerContext({ - item, - inMemoryStore, - loadManager, - persistence, - checkpointId, - isPreload: true, - shouldSaveHistory: false, - chains, - isResolved: false, - }), - })->Promise.silentCatch, - ) - } catch { - | _ => () - } - } - } - - itemIdx := itemIdx.contents + checkpointEventsProcessed - } - - let _ = await Promise.all(promises) -} - -let runBatchHandlersOrThrow = async ( - batch: Batch.t, - ~inMemoryStore, - ~loadManager, - ~indexer, - ~shouldSaveHistory, - ~shouldBenchmark, - ~chains: Internal.chains, -) => { - let itemIdx = ref(0) - - for checkpointIdx in 0 to batch.checkpointIds->Array.length - 1 { - let checkpointId = batch.checkpointIds->Js.Array2.unsafe_get(checkpointIdx) - let checkpointEventsProcessed = - batch.checkpointEventsProcessed->Js.Array2.unsafe_get(checkpointIdx) - - for idx in 0 to checkpointEventsProcessed - 1 { - let item = batch.items->Js.Array2.unsafe_get(itemIdx.contents + idx) - - await runHandlerOrThrow( - item, - ~checkpointId, - ~inMemoryStore, - ~loadManager, - ~indexer, - ~shouldSaveHistory, - ~shouldBenchmark, - ~chains, - ) - } - itemIdx := itemIdx.contents + checkpointEventsProcessed - } -} - -let registerProcessEventBatchMetrics = ( - ~logger, - ~loadDuration, - ~handlerDuration, - ~dbWriteDuration, -) => { - logger->Logging.childTrace({ - "msg": "Finished processing batch", - "loader_time_elapsed": loadDuration, - "handlers_time_elapsed": handlerDuration, - "write_time_elapsed": dbWriteDuration, - }) - - Prometheus.incrementLoadEntityDurationCounter(~duration=loadDuration) - Prometheus.incrementEventRouterDurationCounter(~duration=handlerDuration) - Prometheus.incrementExecuteBatchDurationCounter(~duration=dbWriteDuration) -} - -type logPartitionInfo = { - batchSize: int, - firstItemTimestamp: option, - firstItemBlockNumber?: int, - lastItemBlockNumber?: int, -} - -let processEventBatch = async ( - ~batch: Batch.t, - ~inMemoryStore: InMemoryStore.t, - ~isInReorgThreshold, - ~loadManager, - ~indexer: Indexer.t, - ~chainFetchers: ChainMap.t, -) => { - let totalBatchSize = batch.totalBatchSize - // Compute chains state for this batch - let chains: Internal.chains = chainFetchers->computeChainsState - - let logger = Logging.getLogger() - logger->Logging.childTrace({ - "msg": "Started processing batch", - "totalBatchSize": totalBatchSize, - "chains": batch.progressedChainsById->Utils.Dict.mapValues(chainAfterBatch => { - { - "batchSize": chainAfterBatch.batchSize, - "progress": chainAfterBatch.progressBlockNumber, - } - }), - }) - - try { - let timeRef = Hrtime.makeTimer() - - if batch.items->Utils.Array.notEmpty { - await batch->preloadBatchOrThrow( - ~loadManager, - ~persistence=indexer.persistence, - ~inMemoryStore, - ~chains, - ) - } - - let elapsedTimeAfterLoaders = timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis - - if batch.items->Utils.Array.notEmpty { - await batch->runBatchHandlersOrThrow( - ~inMemoryStore, - ~loadManager, - ~indexer, - ~shouldSaveHistory=indexer.config->Config.shouldSaveHistory(~isInReorgThreshold), - ~shouldBenchmark=Env.Benchmark.shouldSaveData, - ~chains, - ) - } - - let elapsedTimeAfterProcessing = - timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis - - let rec executeBatch = async (~escapeTables=?) => { - switch await indexer.persistence.sql->IO.executeBatch( - ~batch, - ~inMemoryStore, - ~isInReorgThreshold, - ~indexer, - ~escapeTables?, - ) { - | exception Persistence.StorageError({message, reason}) => - reason->ErrorHandling.make(~msg=message, ~logger)->Error - - | exception PgStorage.PgEncodingError({table}) => - let escapeTables = switch escapeTables { - | Some(set) => set - | None => Utils.Set.make() - } - let _ = escapeTables->Utils.Set.add(table) - // Retry with specifying which tables to escape. - await executeBatch(~escapeTables) - | exception exn => - exn->ErrorHandling.make(~msg="Failed writing batch to database", ~logger)->Error - | () => { - let elapsedTimeAfterDbWrite = - timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis - let loaderDuration = elapsedTimeAfterLoaders - let handlerDuration = elapsedTimeAfterProcessing - loaderDuration - let dbWriteDuration = elapsedTimeAfterDbWrite - elapsedTimeAfterProcessing - registerProcessEventBatchMetrics( - ~logger, - ~loadDuration=loaderDuration, - ~handlerDuration, - ~dbWriteDuration, - ) - if Env.Benchmark.shouldSaveData { - Benchmark.addEventProcessing( - ~batchSize=totalBatchSize, - ~loadDuration=loaderDuration, - ~handlerDuration, - ~dbWriteDuration, - ~totalTimeElapsed=elapsedTimeAfterDbWrite, - ) - } - Ok() - } - } - } - - await executeBatch() - } catch { - | ProcessingError({message, exn, item}) => - exn - ->ErrorHandling.make(~msg=message, ~logger=item->Logging.getItemLogger) - ->Error - } -} diff --git a/apps/hypersync-indexer/generated/src/Generated.res b/apps/hypersync-indexer/generated/src/Generated.res deleted file mode 100644 index 4d704a8e0..000000000 --- a/apps/hypersync-indexer/generated/src/Generated.res +++ /dev/null @@ -1,206 +0,0 @@ -@val external require: string => unit = "require" - -let registerContractHandlers = ( - ~contractName, - ~handlerPathRelativeToRoot, - ~handlerPathRelativeToConfig, -) => { - try { - require(`../${Path.relativePathToRootFromGenerated}/${handlerPathRelativeToRoot}`) - } catch { - | exn => - let params = { - "Contract Name": contractName, - "Expected Handler Path": handlerPathRelativeToConfig, - "Code": "EE500", - } - let logger = Logging.createChild(~params) - - let errHandler = exn->ErrorHandling.make(~msg="Failed to import handler file", ~logger) - errHandler->ErrorHandling.log - errHandler->ErrorHandling.raiseExn - } -} - -let makeGeneratedConfig = () => { - let chains = [ - { - let contracts = [ - { - Config.name: "ENSToken", - abi: Types.ENSToken.abi, - addresses: [ - "0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72"->Address.Evm.fromStringOrThrow -, - ], - events: [ - (Types.ENSToken.Transfer.register() :> Internal.eventConfig), - (Types.ENSToken.DelegateChanged.register() :> Internal.eventConfig), - (Types.ENSToken.DelegateVotesChanged.register() :> Internal.eventConfig), - ], - startBlock: None, - }, - { - Config.name: "ENSGovernor", - abi: Types.ENSGovernor.abi, - addresses: [ - "0x323a76393544d5ecca80cd6ef2a560c6a395b7e3"->Address.Evm.fromStringOrThrow -, - ], - events: [ - (Types.ENSGovernor.ProposalCreated.register() :> Internal.eventConfig), - (Types.ENSGovernor.VoteCast.register() :> Internal.eventConfig), - (Types.ENSGovernor.ProposalCanceled.register() :> Internal.eventConfig), - (Types.ENSGovernor.ProposalExecuted.register() :> Internal.eventConfig), - (Types.ENSGovernor.ProposalQueued.register() :> Internal.eventConfig), - ], - startBlock: Some(13533772), - }, - ] - let chain = ChainMap.Chain.makeUnsafe(~chainId=1) - { - Config.maxReorgDepth: 200, - startBlock: 9380410, - id: 1, - contracts, - sources: NetworkSources.evm(~chain, ~contracts=[{name: "ENSToken",events: [Types.ENSToken.Transfer.register(), Types.ENSToken.DelegateChanged.register(), Types.ENSToken.DelegateVotesChanged.register()],abi: Types.ENSToken.abi}, {name: "ENSGovernor",events: [Types.ENSGovernor.ProposalCreated.register(), Types.ENSGovernor.VoteCast.register(), Types.ENSGovernor.ProposalCanceled.register(), Types.ENSGovernor.ProposalExecuted.register(), Types.ENSGovernor.ProposalQueued.register()],abi: Types.ENSGovernor.abi}], ~hyperSync=Some("https://eth.hypersync.xyz"), ~allEventSignatures=[Types.ENSToken.eventSignatures, Types.ENSGovernor.eventSignatures]->Belt.Array.concatMany, ~shouldUseHypersyncClientDecoder=true, ~rpcs=[], ~lowercaseAddresses=false) - } - }, - ] - - Config.make( - ~shouldRollbackOnReorg=true, - ~shouldSaveFullHistory=false, - ~multichain=if ( - Env.Configurable.isUnorderedMultichainMode->Belt.Option.getWithDefault( - Env.Configurable.unstable__temp_unordered_head_mode->Belt.Option.getWithDefault( - false, - ), - ) - ) { - Unordered - } else { - Ordered - }, - ~chains, - ~enableRawEvents=false, - ~batchSize=?Env.batchSize, - ~preloadHandlers=false, - ~lowercaseAddresses=false, - ~shouldUseHypersyncClientDecoder=true, - ) -} - -let configWithoutRegistrations = makeGeneratedConfig() - -let registerAllHandlers = () => { - EventRegister.startRegistration( - ~ecosystem=configWithoutRegistrations.ecosystem, - ~multichain=configWithoutRegistrations.multichain, - ~preloadHandlers=configWithoutRegistrations.preloadHandlers, - ) - - registerContractHandlers( - ~contractName="ENSGovernor", - ~handlerPathRelativeToRoot="src/eventHandlers/ENSGovernor.ts", - ~handlerPathRelativeToConfig="src/eventHandlers/ENSGovernor.ts", - ) - registerContractHandlers( - ~contractName="ENSToken", - ~handlerPathRelativeToRoot="src/eventHandlers/ENSToken.ts", - ~handlerPathRelativeToConfig="src/eventHandlers/ENSToken.ts", - ) - - EventRegister.finishRegistration() -} - -let initialSql = Db.makeClient() -let storagePgSchema = Env.Db.publicSchema -let makeStorage = (~sql, ~pgSchema=storagePgSchema, ~isHasuraEnabled=Env.Hasura.enabled) => { - PgStorage.make( - ~sql, - ~pgSchema, - ~pgHost=Env.Db.host, - ~pgUser=Env.Db.user, - ~pgPort=Env.Db.port, - ~pgDatabase=Env.Db.database, - ~pgPassword=Env.Db.password, - ~onInitialize=?{ - if isHasuraEnabled { - Some( - () => { - Hasura.trackDatabase( - ~endpoint=Env.Hasura.graphqlEndpoint, - ~auth={ - role: Env.Hasura.role, - secret: Env.Hasura.secret, - }, - ~pgSchema=storagePgSchema, - ~userEntities=Entities.userEntities, - ~responseLimit=Env.Hasura.responseLimit, - ~schema=Db.schema, - ~aggregateEntities=Env.Hasura.aggregateEntities, - )->Promise.catch(err => { - Logging.errorWithExn( - err->Utils.prettifyExn, - `EE803: Error tracking tables`, - )->Promise.resolve - }) - }, - ) - } else { - None - } - }, - ~onNewTables=?{ - if isHasuraEnabled { - Some( - (~tableNames) => { - Hasura.trackTables( - ~endpoint=Env.Hasura.graphqlEndpoint, - ~auth={ - role: Env.Hasura.role, - secret: Env.Hasura.secret, - }, - ~pgSchema=storagePgSchema, - ~tableNames, - )->Promise.catch(err => { - Logging.errorWithExn( - err->Utils.prettifyExn, - `EE804: Error tracking new tables`, - )->Promise.resolve - }) - }, - ) - } else { - None - } - }, - ~isHasuraEnabled, - ) -} - -let codegenPersistence = Persistence.make( - ~userEntities=Entities.userEntities, - ~allEnums=Enums.allEnums, - ~storage=makeStorage(~sql=initialSql), - ~sql=initialSql, -) - -%%private(let indexer: ref> = ref(None)) -let getIndexer = () => { - switch indexer.contents { - | Some(indexer) => indexer - | None => - let i = { - Indexer.registrations: registerAllHandlers(), - // Need to recreate initial config one more time, - // since configWithoutRegistrations called register for event - // before they were ready - config: makeGeneratedConfig(), - persistence: codegenPersistence, - } - indexer := Some(i) - i - } -} diff --git a/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.gen.ts b/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.gen.ts deleted file mode 100644 index f9f2cf2c7..000000000 --- a/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.gen.ts +++ /dev/null @@ -1,8 +0,0 @@ -/* TypeScript file generated from GqlDbCustomTypes.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -export type Float_t = number; - -export type Int_t = number; diff --git a/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.res b/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.res deleted file mode 100644 index 8915358d6..000000000 --- a/apps/hypersync-indexer/generated/src/GqlDbCustomTypes.res +++ /dev/null @@ -1,11 +0,0 @@ -// Can be deleted on a breaking release (V3) - -module Float = { - @genType - type t = float -} - -module Int = { - @genType - type t = int -} diff --git a/apps/hypersync-indexer/generated/src/Handlers.gen.ts b/apps/hypersync-indexer/generated/src/Handlers.gen.ts deleted file mode 100644 index 5e57e88fe..000000000 --- a/apps/hypersync-indexer/generated/src/Handlers.gen.ts +++ /dev/null @@ -1,165 +0,0 @@ -/* TypeScript file generated from Handlers.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -const HandlersJS = require('./Handlers.res.js'); - -import type {ENSGovernor_ProposalCanceled_eventFilters as Types_ENSGovernor_ProposalCanceled_eventFilters} from './Types.gen'; - -import type {ENSGovernor_ProposalCanceled_event as Types_ENSGovernor_ProposalCanceled_event} from './Types.gen'; - -import type {ENSGovernor_ProposalCreated_eventFilters as Types_ENSGovernor_ProposalCreated_eventFilters} from './Types.gen'; - -import type {ENSGovernor_ProposalCreated_event as Types_ENSGovernor_ProposalCreated_event} from './Types.gen'; - -import type {ENSGovernor_ProposalExecuted_eventFilters as Types_ENSGovernor_ProposalExecuted_eventFilters} from './Types.gen'; - -import type {ENSGovernor_ProposalExecuted_event as Types_ENSGovernor_ProposalExecuted_event} from './Types.gen'; - -import type {ENSGovernor_ProposalQueued_eventFilters as Types_ENSGovernor_ProposalQueued_eventFilters} from './Types.gen'; - -import type {ENSGovernor_ProposalQueued_event as Types_ENSGovernor_ProposalQueued_event} from './Types.gen'; - -import type {ENSGovernor_VoteCast_eventFilters as Types_ENSGovernor_VoteCast_eventFilters} from './Types.gen'; - -import type {ENSGovernor_VoteCast_event as Types_ENSGovernor_VoteCast_event} from './Types.gen'; - -import type {ENSToken_DelegateChanged_eventFilters as Types_ENSToken_DelegateChanged_eventFilters} from './Types.gen'; - -import type {ENSToken_DelegateChanged_event as Types_ENSToken_DelegateChanged_event} from './Types.gen'; - -import type {ENSToken_DelegateVotesChanged_eventFilters as Types_ENSToken_DelegateVotesChanged_eventFilters} from './Types.gen'; - -import type {ENSToken_DelegateVotesChanged_event as Types_ENSToken_DelegateVotesChanged_event} from './Types.gen'; - -import type {ENSToken_Transfer_eventFilters as Types_ENSToken_Transfer_eventFilters} from './Types.gen'; - -import type {ENSToken_Transfer_event as Types_ENSToken_Transfer_event} from './Types.gen'; - -import type {HandlerTypes_eventConfig as Types_HandlerTypes_eventConfig} from './Types.gen'; - -import type {chain as Types_chain} from './Types.gen'; - -import type {contractRegistrations as Types_contractRegistrations} from './Types.gen'; - -import type {fnWithEventConfig as Types_fnWithEventConfig} from './Types.gen'; - -import type {genericContractRegisterArgs as Internal_genericContractRegisterArgs} from 'envio/src/Internal.gen'; - -import type {genericContractRegister as Internal_genericContractRegister} from 'envio/src/Internal.gen'; - -import type {genericHandlerArgs as Internal_genericHandlerArgs} from 'envio/src/Internal.gen'; - -import type {genericHandlerWithLoader as Internal_genericHandlerWithLoader} from 'envio/src/Internal.gen'; - -import type {genericHandler as Internal_genericHandler} from 'envio/src/Internal.gen'; - -import type {genericLoaderArgs as Internal_genericLoaderArgs} from 'envio/src/Internal.gen'; - -import type {genericLoader as Internal_genericLoader} from 'envio/src/Internal.gen'; - -import type {handlerContext as Types_handlerContext} from './Types.gen'; - -import type {loaderContext as Types_loaderContext} from './Types.gen'; - -import type {onBlockArgs as Envio_onBlockArgs} from 'envio/src/Envio.gen'; - -import type {onBlockOptions as Envio_onBlockOptions} from 'envio/src/Envio.gen'; - -export const ENSGovernor_ProposalCreated_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalCreated.contractRegister as any; - -export const ENSGovernor_ProposalCreated_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalCreated.handler as any; - -export const ENSGovernor_ProposalCreated_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalCreated_eventFilters>) => void = HandlersJS.ENSGovernor.ProposalCreated.handlerWithLoader as any; - -export const ENSGovernor_VoteCast_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.VoteCast.contractRegister as any; - -export const ENSGovernor_VoteCast_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.VoteCast.handler as any; - -export const ENSGovernor_VoteCast_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_VoteCast_eventFilters>) => void = HandlersJS.ENSGovernor.VoteCast.handlerWithLoader as any; - -export const ENSGovernor_ProposalCanceled_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalCanceled.contractRegister as any; - -export const ENSGovernor_ProposalCanceled_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalCanceled.handler as any; - -export const ENSGovernor_ProposalCanceled_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalCanceled_eventFilters>) => void = HandlersJS.ENSGovernor.ProposalCanceled.handlerWithLoader as any; - -export const ENSGovernor_ProposalExecuted_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalExecuted.contractRegister as any; - -export const ENSGovernor_ProposalExecuted_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalExecuted.handler as any; - -export const ENSGovernor_ProposalExecuted_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalExecuted_eventFilters>) => void = HandlersJS.ENSGovernor.ProposalExecuted.handlerWithLoader as any; - -export const ENSGovernor_ProposalQueued_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalQueued.contractRegister as any; - -export const ENSGovernor_ProposalQueued_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSGovernor.ProposalQueued.handler as any; - -export const ENSGovernor_ProposalQueued_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalQueued_eventFilters>) => void = HandlersJS.ENSGovernor.ProposalQueued.handlerWithLoader as any; - -export const ENSToken_Transfer_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.Transfer.contractRegister as any; - -export const ENSToken_Transfer_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.Transfer.handler as any; - -export const ENSToken_Transfer_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_Transfer_eventFilters>) => void = HandlersJS.ENSToken.Transfer.handlerWithLoader as any; - -export const ENSToken_DelegateChanged_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.DelegateChanged.contractRegister as any; - -export const ENSToken_DelegateChanged_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.DelegateChanged.handler as any; - -export const ENSToken_DelegateChanged_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_DelegateChanged_eventFilters>) => void = HandlersJS.ENSToken.DelegateChanged.handlerWithLoader as any; - -export const ENSToken_DelegateVotesChanged_contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.DelegateVotesChanged.contractRegister as any; - -export const ENSToken_DelegateVotesChanged_handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> = HandlersJS.ENSToken.DelegateVotesChanged.handler as any; - -export const ENSToken_DelegateVotesChanged_handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_DelegateVotesChanged_eventFilters>) => void = HandlersJS.ENSToken.DelegateVotesChanged.handlerWithLoader as any; - -/** Register a Block Handler. It'll be called for every block by default. */ -export const onBlock: (_1:Envio_onBlockOptions, _2:((_1:Envio_onBlockArgs) => Promise)) => void = HandlersJS.onBlock as any; - -export const ENSGovernor: { - VoteCast: { - handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_VoteCast_eventFilters>) => void; - handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; - contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> - }; - ProposalQueued: { - handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalQueued_eventFilters>) => void; - handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; - contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> - }; - ProposalCreated: { - handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalCreated_eventFilters>) => void; - handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; - contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> - }; - ProposalCanceled: { - handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalCanceled_eventFilters>) => void; - handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; - contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> - }; - ProposalExecuted: { - handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSGovernor_ProposalExecuted_eventFilters>) => void; - handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; - contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> - } -} = HandlersJS.ENSGovernor as any; - -export const ENSToken: { - Transfer: { - handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_Transfer_eventFilters>) => void; - handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; - contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> - }; - DelegateChanged: { - handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_DelegateChanged_eventFilters>) => void; - handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; - contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> - }; - DelegateVotesChanged: { - handlerWithLoader: (_1:Internal_genericHandlerWithLoader,loaderReturn>,Internal_genericHandler>,Types_ENSToken_DelegateVotesChanged_eventFilters>) => void; - handler: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig>; - contractRegister: Types_fnWithEventConfig>,Types_HandlerTypes_eventConfig> - } -} = HandlersJS.ENSToken as any; diff --git a/apps/hypersync-indexer/generated/src/Handlers.res b/apps/hypersync-indexer/generated/src/Handlers.res deleted file mode 100644 index b6de3af00..000000000 --- a/apps/hypersync-indexer/generated/src/Handlers.res +++ /dev/null @@ -1,23 +0,0 @@ - @genType -module ENSGovernor = { - module ProposalCreated = Types.MakeRegister(Types.ENSGovernor.ProposalCreated) - module VoteCast = Types.MakeRegister(Types.ENSGovernor.VoteCast) - module ProposalCanceled = Types.MakeRegister(Types.ENSGovernor.ProposalCanceled) - module ProposalExecuted = Types.MakeRegister(Types.ENSGovernor.ProposalExecuted) - module ProposalQueued = Types.MakeRegister(Types.ENSGovernor.ProposalQueued) -} - - @genType -module ENSToken = { - module Transfer = Types.MakeRegister(Types.ENSToken.Transfer) - module DelegateChanged = Types.MakeRegister(Types.ENSToken.DelegateChanged) - module DelegateVotesChanged = Types.MakeRegister(Types.ENSToken.DelegateVotesChanged) -} - -@genType /** Register a Block Handler. It'll be called for every block by default. */ -let onBlock: ( - Envio.onBlockOptions, - Envio.onBlockArgs => promise, -) => unit = ( - EventRegister.onBlock: (unknown, Internal.onBlockArgs => promise) => unit -)->Utils.magic diff --git a/apps/hypersync-indexer/generated/src/IO.res b/apps/hypersync-indexer/generated/src/IO.res deleted file mode 100644 index 3b69a3313..000000000 --- a/apps/hypersync-indexer/generated/src/IO.res +++ /dev/null @@ -1,396 +0,0 @@ -open Belt - -let executeSet = ( - sql: Postgres.sql, - ~items: array<'a>, - ~dbFunction: (Postgres.sql, array<'a>) => promise, -) => { - if items->Array.length > 0 { - sql->dbFunction(items) - } else { - Promise.resolve() - } -} - -let executeBatch = async ( - sql, - ~batch: Batch.t, - ~inMemoryStore: InMemoryStore.t, - ~isInReorgThreshold, - ~indexer: Indexer.t, - ~escapeTables=?, -) => { - let shouldSaveHistory = indexer.config->Config.shouldSaveHistory(~isInReorgThreshold) - - let specificError = ref(None) - - let setRawEvents = executeSet( - _, - ~dbFunction=(sql, items) => { - sql->PgStorage.setOrThrow( - ~items, - ~table=InternalTable.RawEvents.table, - ~itemSchema=InternalTable.RawEvents.schema, - ~pgSchema=Generated.storagePgSchema, - ) - }, - ~items=inMemoryStore.rawEvents->InMemoryTable.values, - ) - - let setEntities = Entities.allEntities->Belt.Array.map(entityConfig => { - let entitiesToSet = [] - let idsToDelete = [] - - let rows = - inMemoryStore - ->InMemoryStore.getInMemTable(~entityConfig) - ->InMemoryTable.Entity.rows - - rows->Js.Array2.forEach(row => { - switch row { - | Updated({latest: {entityUpdateAction: Set(entity)}}) => entitiesToSet->Array.push(entity) - | Updated({latest: {entityUpdateAction: Delete, entityId}}) => - idsToDelete->Array.push(entityId) - | _ => () - } - }) - - let shouldRemoveInvalidUtf8 = switch escapeTables { - | Some(tables) if tables->Utils.Set.has(entityConfig.table) => true - | _ => false - } - - async sql => { - try { - let promises = [] - - if shouldSaveHistory { - let backfillHistoryIds = Utils.Set.make() - let batchSetUpdates = [] - // Use unnest approach - let batchDeleteCheckpointIds = [] - let batchDeleteEntityIds = [] - - rows->Js.Array2.forEach(row => { - switch row { - | Updated({history, containsRollbackDiffChange}) => - history->Js.Array2.forEach( - (entityUpdate: EntityHistory.entityUpdate<'a>) => { - if !containsRollbackDiffChange { - // For every update we want to make sure that there's an existing history item - // with the current entity state. So we backfill history with checkpoint id 0, - // before writing updates. Don't do this if the update has a rollback diff change. - backfillHistoryIds->Utils.Set.add(entityUpdate.entityId)->ignore - } - switch entityUpdate.entityUpdateAction { - | Delete => { - batchDeleteEntityIds->Array.push(entityUpdate.entityId)->ignore - batchDeleteCheckpointIds->Array.push(entityUpdate.checkpointId)->ignore - } - | Set(_) => batchSetUpdates->Js.Array2.push(entityUpdate)->ignore - } - }, - ) - | _ => () - } - }) - - if backfillHistoryIds->Utils.Set.size !== 0 { - // This must run before updating entity or entity history tables - await EntityHistory.backfillHistory( - sql, - ~pgSchema=Db.publicSchema, - ~entityName=entityConfig.name, - ~entityIndex=entityConfig.index, - ~ids=backfillHistoryIds->Utils.Set.toArray, - ) - } - - if batchDeleteCheckpointIds->Utils.Array.notEmpty { - promises->Array.push( - sql->EntityHistory.insertDeleteUpdates( - ~pgSchema=Db.publicSchema, - ~entityHistory=entityConfig.entityHistory, - ~batchDeleteEntityIds, - ~batchDeleteCheckpointIds, - ), - ) - } - - if batchSetUpdates->Utils.Array.notEmpty { - if shouldRemoveInvalidUtf8 { - let entities = batchSetUpdates->Js.Array2.map(batchSetUpdate => { - switch batchSetUpdate.entityUpdateAction { - | Set(entity) => entity - | _ => Js.Exn.raiseError("Expected Set action") - } - }) - entities->PgStorage.removeInvalidUtf8InPlace - } - - promises - ->Js.Array2.push( - sql->PgStorage.setOrThrow( - ~items=batchSetUpdates, - ~itemSchema=entityConfig.entityHistory.setUpdateSchema, - ~table=entityConfig.entityHistory.table, - ~pgSchema=Db.publicSchema, - ), - ) - ->ignore - } - } - - if entitiesToSet->Utils.Array.notEmpty { - if shouldRemoveInvalidUtf8 { - entitiesToSet->PgStorage.removeInvalidUtf8InPlace - } - promises->Array.push( - sql->PgStorage.setOrThrow( - ~items=entitiesToSet, - ~table=entityConfig.table, - ~itemSchema=entityConfig.schema, - ~pgSchema=Generated.storagePgSchema, - ), - ) - } - if idsToDelete->Utils.Array.notEmpty { - promises->Array.push(sql->DbFunctionsEntities.batchDelete(~entityConfig)(idsToDelete)) - } - - let _ = await promises->Promise.all - } catch { - // There's a race condition that sql->Postgres.beginSql - // might throw PG error, earlier, than the handled error - // from setOrThrow will be passed through. - // This is needed for the utf8 encoding fix. - | exn => { - /* Note: Entity History doesn't return StorageError yet, and directly throws JsError */ - let normalizedExn = switch exn { - | JsError(_) => exn - | Persistence.StorageError({reason: exn}) => exn - | _ => exn - }->Js.Exn.anyToExnInternal - - switch normalizedExn { - | JsError(error) => - // Workaround for https://github.com/enviodev/hyperindex/issues/446 - // We do escaping only when we actually got an error writing for the first time. - // This is not perfect, but an optimization to avoid escaping for every single item. - - switch error->S.parseOrThrow(PgStorage.pgErrorMessageSchema) { - | `current transaction is aborted, commands ignored until end of transaction block` => () - | `invalid byte sequence for encoding "UTF8": 0x00` => - // Since the transaction is aborted at this point, - // we can't simply retry the function with escaped items, - // so propagate the error, to restart the whole batch write. - // Also, pass the failing table, to escape only its items. - // TODO: Ideally all this should be done in the file, - // so it'll be easier to work on PG specific logic. - specificError.contents = Some(PgStorage.PgEncodingError({table: entityConfig.table})) - | _ => specificError.contents = Some(exn->Utils.prettifyExn) - | exception _ => () - } - | S.Raised(_) => raise(normalizedExn) // But rethrow this one, since it's not a PG error - | _ => () - } - - // Improtant: Don't rethrow here, since it'll result in - // an unhandled rejected promise error. - // That's fine not to throw, since sql->Postgres.beginSql - // will fail anyways. - } - } - } - }) - - //In the event of a rollback, rollback all meta tables based on the given - //valid event identifier, where all rows created after this eventIdentifier should - //be deleted - let rollbackTables = switch inMemoryStore { - | {rollbackTargetCheckpointId: Some(rollbackTargetCheckpointId)} => - Some( - sql => { - let promises = Entities.allEntities->Js.Array2.map(entityConfig => { - sql->EntityHistory.rollback( - ~pgSchema=Db.publicSchema, - ~entityName=entityConfig.name, - ~entityIndex=entityConfig.index, - ~rollbackTargetCheckpointId, - ) - }) - promises - ->Js.Array2.push( - sql->InternalTable.Checkpoints.rollback( - ~pgSchema=Db.publicSchema, - ~rollbackTargetCheckpointId, - ), - ) - ->ignore - Promise.all(promises) - }, - ) - | _ => None - } - - try { - let _ = await Promise.all2(( - sql->Postgres.beginSql(async sql => { - //Rollback tables need to happen first in the traction - switch rollbackTables { - | Some(rollbackTables) => - let _ = await rollbackTables(sql) - | None => () - } - - let setOperations = [ - sql => - sql->InternalTable.Chains.setProgressedChains( - ~pgSchema=Db.publicSchema, - ~progressedChains=batch.progressedChainsById->Utils.Dict.mapValuesToArray(( - chainAfterBatch - ): InternalTable.Chains.progressedChain => { - chainId: chainAfterBatch.fetchState.chainId, - progressBlockNumber: chainAfterBatch.progressBlockNumber, - totalEventsProcessed: chainAfterBatch.totalEventsProcessed, - }), - ), - setRawEvents, - ]->Belt.Array.concat(setEntities) - - if shouldSaveHistory { - setOperations->Array.push(sql => - sql->InternalTable.Checkpoints.insert( - ~pgSchema=Db.publicSchema, - ~checkpointIds=batch.checkpointIds, - ~checkpointChainIds=batch.checkpointChainIds, - ~checkpointBlockNumbers=batch.checkpointBlockNumbers, - ~checkpointBlockHashes=batch.checkpointBlockHashes, - ~checkpointEventsProcessed=batch.checkpointEventsProcessed, - ) - ) - } - - await setOperations - ->Belt.Array.map(dbFunc => sql->dbFunc) - ->Promise.all - }), - // Since effect cache currently doesn't support rollback, - // we can run it outside of the transaction for simplicity. - inMemoryStore.effects - ->Js.Dict.keys - ->Belt.Array.keepMapU(effectName => { - let inMemTable = inMemoryStore.effects->Js.Dict.unsafeGet(effectName) - let {idsToStore, dict, effect, invalidationsCount} = inMemTable - switch idsToStore { - | [] => None - | ids => { - let items = Belt.Array.makeUninitializedUnsafe(ids->Belt.Array.length) - ids->Belt.Array.forEachWithIndex((index, id) => { - items->Js.Array2.unsafe_set( - index, - ( - { - id, - output: dict->Js.Dict.unsafeGet(id), - }: Internal.effectCacheItem - ), - ) - }) - Some( - indexer.persistence->Persistence.setEffectCacheOrThrow( - ~effect, - ~items, - ~invalidationsCount, - ), - ) - } - } - }) - ->Promise.all, - )) - - // Just in case, if there's a not PG-specific error. - switch specificError.contents { - | Some(specificError) => raise(specificError) - | None => () - } - } catch { - | exn => - raise( - switch specificError.contents { - | Some(specificError) => specificError - | None => exn - }, - ) - } -} - -let prepareRollbackDiff = async (~persistence: Persistence.t, ~rollbackTargetCheckpointId) => { - let inMemStore = InMemoryStore.make(~entities=Entities.allEntities, ~rollbackTargetCheckpointId) - - let deletedEntities = Js.Dict.empty() - let setEntities = Js.Dict.empty() - - let _ = - await Entities.allEntities - ->Belt.Array.map(async entityConfig => { - let entityTable = inMemStore->InMemoryStore.getInMemTable(~entityConfig) - - let (removedIdsResult, restoredEntitiesResult) = await Promise.all2(( - // Get IDs of entities that should be deleted (created after rollback target with no prior history) - persistence.sql - ->Postgres.preparedUnsafe( - entityConfig.entityHistory.makeGetRollbackRemovedIdsQuery(~pgSchema=Db.publicSchema), - [rollbackTargetCheckpointId]->Utils.magic, - ) - ->(Utils.magic: promise => promise>), - // Get entities that should be restored to their state at or before rollback target - persistence.sql - ->Postgres.preparedUnsafe( - entityConfig.entityHistory.makeGetRollbackRestoredEntitiesQuery( - ~pgSchema=Db.publicSchema, - ), - [rollbackTargetCheckpointId]->Utils.magic, - ) - ->(Utils.magic: promise => promise>), - )) - - // Process removed IDs - removedIdsResult->Js.Array2.forEach(data => { - deletedEntities->Utils.Dict.push(entityConfig.name, data["id"]) - entityTable->InMemoryTable.Entity.set( - { - entityId: data["id"], - checkpointId: 0, - entityUpdateAction: Delete, - }, - ~shouldSaveHistory=false, - ~containsRollbackDiffChange=true, - ) - }) - - let restoredEntities = restoredEntitiesResult->S.parseOrThrow(entityConfig.rowsSchema) - - // Process restored entities - restoredEntities->Belt.Array.forEach((entity: Entities.internalEntity) => { - setEntities->Utils.Dict.push(entityConfig.name, entity.id) - entityTable->InMemoryTable.Entity.set( - { - entityId: entity.id, - checkpointId: 0, - entityUpdateAction: Set(entity), - }, - ~shouldSaveHistory=false, - ~containsRollbackDiffChange=true, - ) - }) - }) - ->Promise.all - - { - "inMemStore": inMemStore, - "deletedEntities": deletedEntities, - "setEntities": setEntities, - } -} diff --git a/apps/hypersync-indexer/generated/src/Index.bs.js b/apps/hypersync-indexer/generated/src/Index.bs.js deleted file mode 100644 index 598439233..000000000 --- a/apps/hypersync-indexer/generated/src/Index.bs.js +++ /dev/null @@ -1,209 +0,0 @@ -#!/usr/bin/env node - -/* - * Migration script for Envio v2.22.0+ - * - * In version 2.22.0 we introduced a breaking change by changing ReScript generated - * files suffix from .bs.js to .res.js. This script helps users smoothly upgrade - * their indexers to the new version. - * - * Previously, users needed to run ReScript-generated files directly in their - * package.json scripts (e.g., "node generated/src/Index.bs.js"). Now, Envio - * provides a unified `envio start` command that handles this internally. - * - * This script: - * 1. Updates package.json scripts to use `envio start` instead of direct file execution - * 2. Runs the new envio start command - */ - -const fs = require("fs"); -const path = require("path"); -const readline = require("readline"); -const { spawn } = require("child_process"); - -// Function to update package.json -function updatePackageJson() { - try { - // Look for package.json in current directory and parent directories - let packageJsonPath = null; - let currentDir = process.cwd(); - - // Search up the directory tree for package.json - while (currentDir !== path.dirname(currentDir)) { - const potentialPath = path.join(currentDir, "package.json"); - if (fs.existsSync(potentialPath)) { - packageJsonPath = potentialPath; - break; - } - currentDir = path.dirname(currentDir); - } - - if (!packageJsonPath) { - console.log("❌ Could not find package.json file"); - return false; - } - - console.log(`📦 Found package.json at: ${packageJsonPath}`); - - // Read and parse package.json - const packageJsonContent = fs.readFileSync(packageJsonPath, "utf8"); - const packageJson = JSON.parse(packageJsonContent); - - // Check if scripts section exists - if (!packageJson.scripts) { - console.log("⚠️ No scripts section found in package.json"); - return false; - } - - // Update the start script - let updated = false; - if (packageJson.scripts.start) { - let originalScript = packageJson.scripts.start; - let newScript = originalScript; - - // Replace ts-node generated/src/Index.bs.js with envio start - newScript = newScript.replace( - /ts-node\s+generated\/src\/Index\.bs\.js/g, - "envio start" - ); - - // Replace node generated/src/Index.bs.js with envio start - newScript = newScript.replace( - /node\s+generated\/src\/Index\.bs\.js/g, - "envio start" - ); - - if (newScript !== originalScript) { - console.log("🔧 Updating start script..."); - console.log(` From: ${originalScript}`); - console.log(` To: ${newScript}`); - packageJson.scripts.start = newScript; - updated = true; - } - } - - if (updated) { - // Write back the updated package.json - fs.writeFileSync( - packageJsonPath, - JSON.stringify(packageJson, null, 2) + "\n" - ); - console.log("✅ Package.json updated successfully!"); - return true; - } else { - console.log("ℹ️ No scripts found that need updating"); - return false; - } - } catch (error) { - console.error("❌ Error updating package.json:", error.message); - return false; - } -} - -// Function to prompt user for migration -function promptUserForMigration() { - return new Promise((resolve) => { - const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout, - }); - - // Set up timeout to automatically skip after 30 seconds - const timeout = setTimeout(() => { - rl.close(); - console.log( - "\n⏱️ No response received in 30 seconds, skipping package.json migration." - ); - resolve(false); - }, 30000); - - rl.question( - "🤔 Would you like to automatically update your package.json scripts? (y/N): ", - (answer) => { - clearTimeout(timeout); - rl.close(); - const shouldMigrate = - answer.toLowerCase() === "y" || - answer.toLowerCase() === "yes" || - answer.trim() === ""; - resolve(shouldMigrate); - } - ); - }); -} - -// Function to run envio start -function runEnvioStart() { - console.log("🚀 Starting Envio..."); - - const envioProcess = spawn("envio", ["start"], { - stdio: "inherit", - cwd: process.cwd(), - }); - - envioProcess.on("error", (error) => { - if (error.code === "ENOENT") { - console.error( - '❌ Error: "envio" command not found. Please make sure Envio CLI is installed.' - ); - } else { - console.error("❌ Error running envio start:", error.message); - } - process.exit(1); - }); - - envioProcess.on("close", (code) => { - if (code !== 0) { - console.log(`❌ Envio start exited with code ${code}`); - process.exit(code); - } - }); -} - -// Main execution -async function main() { - console.log("🔄 Migrating to envio@2.22.0 or later..."); - console.log("📋 Migration steps:"); - console.log(" 1. Update package.json scripts (optional)"); - console.log(" 2. Run envio start"); - console.log(""); - - console.log( - "ℹ️ Note: In the new version, pnpm-workspaces.yaml and .npmrc files are no longer required." - ); - console.log( - " You can safely remove them if you don't need them for other purposes." - ); - console.log(""); - - // Prompt user for package.json migration - const shouldMigrate = await promptUserForMigration(); - - if (shouldMigrate) { - console.log(""); - const packageUpdated = updatePackageJson(); - - if (packageUpdated) { - console.log(""); - console.log( - "🎉 Migration completed! Your package.json has been updated." - ); - console.log( - ' From now on, you can use "npm start" or "envio start" directly.' - ); - console.log(""); - } - } else { - console.log("⏭️ Skipping package.json migration."); - console.log(""); - } - - // Run envio start - runEnvioStart(); -} - -// Start the main function -main().catch((error) => { - console.error("❌ Error during migration:", error.message); - process.exit(1); -}); diff --git a/apps/hypersync-indexer/generated/src/Index.res b/apps/hypersync-indexer/generated/src/Index.res deleted file mode 100644 index daba19f7a..000000000 --- a/apps/hypersync-indexer/generated/src/Index.res +++ /dev/null @@ -1,332 +0,0 @@ -open Belt - -type chainData = { - chainId: float, - poweredByHyperSync: bool, - firstEventBlockNumber: option, - latestProcessedBlock: option, - timestampCaughtUpToHeadOrEndblock: option, - numEventsProcessed: int, - latestFetchedBlockNumber: int, - currentBlockHeight: int, - numBatchesFetched: int, - endBlock: option, - numAddresses: int, -} -@tag("status") -type state = - | @as("disabled") Disabled({}) - | @as("initializing") Initializing({}) - | @as("active") - Active({ - envioVersion: string, - chains: array, - indexerStartTime: Js.Date.t, - isPreRegisteringDynamicContracts: bool, - isUnorderedMultichainMode: bool, - rollbackOnReorg: bool, - }) - -let chainDataSchema = S.schema((s): chainData => { - chainId: s.matches(S.float), - poweredByHyperSync: s.matches(S.bool), - firstEventBlockNumber: s.matches(S.option(S.int)), - latestProcessedBlock: s.matches(S.option(S.int)), - timestampCaughtUpToHeadOrEndblock: s.matches(S.option(S.datetime(S.string))), - numEventsProcessed: s.matches(S.int), - latestFetchedBlockNumber: s.matches(S.int), - currentBlockHeight: s.matches(S.int), - numBatchesFetched: s.matches(S.int), - endBlock: s.matches(S.option(S.int)), - numAddresses: s.matches(S.int), -}) -let stateSchema = S.union([ - S.literal(Disabled({})), - S.literal(Initializing({})), - S.schema(s => Active({ - envioVersion: s.matches(S.string), - chains: s.matches(S.array(chainDataSchema)), - indexerStartTime: s.matches(S.datetime(S.string)), - // Keep the field, since Dev Console expects it to be present - isPreRegisteringDynamicContracts: false, - isUnorderedMultichainMode: s.matches(S.bool), - rollbackOnReorg: s.matches(S.bool), - })), -]) - -let startServer = (~getState, ~indexer: Indexer.t, ~isDevelopmentMode: bool) => { - open Express - - let app = makeCjs() - - let consoleCorsMiddleware = (req, res, next) => { - switch req.headers->Js.Dict.get("origin") { - | Some(origin) if origin === Env.prodEnvioAppUrl || origin === Env.envioAppUrl => - res->setHeader("Access-Control-Allow-Origin", origin) - | _ => () - } - - res->setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") - res->setHeader( - "Access-Control-Allow-Headers", - "Origin, X-Requested-With, Content-Type, Accept", - ) - - if req.method === Options { - res->sendStatus(200) - } else { - next() - } - } - app->useFor("/console", consoleCorsMiddleware) - app->useFor("/metrics", consoleCorsMiddleware) - - app->get("/healthz", (_req, res) => { - // this is the machine readable port used in kubernetes to check the health of this service. - // aditional health information could be added in the future (info about errors, back-offs, etc). - res->sendStatus(200) - }) - - app->get("/console/state", (_req, res) => { - let state = if isDevelopmentMode { - getState() - } else { - Disabled({}) - } - - res->json(state->S.reverseConvertToJsonOrThrow(stateSchema)) - }) - - app->post("/console/syncCache", (_req, res) => { - if isDevelopmentMode { - (indexer.persistence->Persistence.getInitializedStorageOrThrow).dumpEffectCache() - ->Promise.thenResolve(_ => res->json(Boolean(true))) - ->Promise.done - } else { - res->json(Boolean(false)) - } - }) - - PromClient.collectDefaultMetrics() - - app->get("/metrics", (_req, res) => { - res->set("Content-Type", PromClient.defaultRegister->PromClient.getContentType) - let _ = - PromClient.defaultRegister - ->PromClient.metrics - ->Promise.thenResolve(metrics => res->endWithData(metrics)) - }) - - let _ = app->listen(Env.serverPort) -} - -type args = {@as("tui-off") tuiOff?: bool} - -type process -@val external process: process = "process" -@get external argv: process => 'a = "argv" - -type mainArgs = Yargs.parsedArgs - -let makeAppState = (globalState: GlobalState.t): EnvioInkApp.appState => { - let chains = - globalState.chainManager.chainFetchers - ->ChainMap.values - ->Array.map(cf => { - let {numEventsProcessed, fetchState, numBatchesFetched} = cf - let latestFetchedBlockNumber = Pervasives.max(fetchState->FetchState.bufferBlockNumber, 0) - let hasProcessedToEndblock = cf->ChainFetcher.hasProcessedToEndblock - let currentBlockHeight = - cf->ChainFetcher.hasProcessedToEndblock - ? cf.fetchState.endBlock->Option.getWithDefault(cf.currentBlockHeight) - : cf.currentBlockHeight - - let progress: ChainData.progress = if hasProcessedToEndblock { - // If the endblock has been reached then set the progress to synced. - // if there's chains that have no events in the block range start->end, - // it's possible there are no events in that block range (ie firstEventBlockNumber = None) - // This ensures TUI still displays synced in this case - let { - committedProgressBlockNumber, - timestampCaughtUpToHeadOrEndblock, - numEventsProcessed, - firstEventBlockNumber, - } = cf - - Synced({ - firstEventBlockNumber: firstEventBlockNumber->Option.getWithDefault(0), - latestProcessedBlock: committedProgressBlockNumber, - timestampCaughtUpToHeadOrEndblock: timestampCaughtUpToHeadOrEndblock->Option.getWithDefault( - Js.Date.now()->Js.Date.fromFloat, - ), - numEventsProcessed, - }) - } else { - switch cf { - | { - committedProgressBlockNumber, - timestampCaughtUpToHeadOrEndblock: Some(timestampCaughtUpToHeadOrEndblock), - firstEventBlockNumber: Some(firstEventBlockNumber), - } => - Synced({ - firstEventBlockNumber, - latestProcessedBlock: committedProgressBlockNumber, - timestampCaughtUpToHeadOrEndblock, - numEventsProcessed, - }) - | { - committedProgressBlockNumber, - timestampCaughtUpToHeadOrEndblock: None, - firstEventBlockNumber: Some(firstEventBlockNumber), - } => - Syncing({ - firstEventBlockNumber, - latestProcessedBlock: committedProgressBlockNumber, - numEventsProcessed, - }) - | {firstEventBlockNumber: None} => SearchingForEvents - } - } - - ( - { - progress, - currentBlockHeight, - latestFetchedBlockNumber, - numBatchesFetched, - chain: ChainMap.Chain.makeUnsafe(~chainId=cf.chainConfig.id), - endBlock: cf.fetchState.endBlock, - poweredByHyperSync: (cf.sourceManager->SourceManager.getActiveSource).poweredByHyperSync, - }: EnvioInkApp.chainData - ) - }) - { - config: globalState.indexer.config, - indexerStartTime: globalState.indexerStartTime, - chains, - } -} - -// Function to open the URL in the browser -// @module("child_process") -// external exec: (string, (Js.Nullable.t, 'a, 'b) => unit) => unit = "exec" -// @module("process") external platform: string = "platform" -// let openConsole = () => { -// let host = "https://envio.dev" -// let command = switch platform { -// | "win32" => "start" -// | "darwin" => "open" -// | _ => "xdg-open" -// } -// exec(`${command} ${host}/console`, (_, _, _) => ()) -// } - -let main = async () => { - try { - let mainArgs: mainArgs = process->argv->Yargs.hideBin->Yargs.yargs->Yargs.argv - let shouldUseTui = !(mainArgs.tuiOff->Belt.Option.getWithDefault(Env.tuiOffEnvVar)) - // The most simple check to verify whether we are running in development mode - // and prevent exposing the console to public, when creating a real deployment. - let isDevelopmentMode = Env.Db.password === "testing" - - let indexer = Generated.getIndexer() - - let gsManagerRef = ref(None) - - let envioVersion = Utils.EnvioPackage.json.version - Prometheus.Info.set(~version=envioVersion) - Prometheus.RollbackEnabled.set(~enabled=indexer.config.shouldRollbackOnReorg) - - startServer( - ~indexer, - ~isDevelopmentMode, - ~getState=() => - switch gsManagerRef.contents { - | None => Initializing({}) - | Some(gsManager) => { - let state = gsManager->GlobalStateManager.getState - let appState = state->makeAppState - Active({ - envioVersion, - chains: appState.chains->Js.Array2.map(c => { - let cf = state.chainManager.chainFetchers->ChainMap.get(c.chain) - { - chainId: c.chain->ChainMap.Chain.toChainId->Js.Int.toFloat, - poweredByHyperSync: c.poweredByHyperSync, - latestFetchedBlockNumber: c.latestFetchedBlockNumber, - currentBlockHeight: c.currentBlockHeight, - numBatchesFetched: c.numBatchesFetched, - endBlock: c.endBlock, - firstEventBlockNumber: switch c.progress { - | SearchingForEvents => None - | Syncing({firstEventBlockNumber}) | Synced({firstEventBlockNumber}) => - Some(firstEventBlockNumber) - }, - latestProcessedBlock: switch c.progress { - | SearchingForEvents => None - | Syncing({latestProcessedBlock}) | Synced({latestProcessedBlock}) => - Some(latestProcessedBlock) - }, - timestampCaughtUpToHeadOrEndblock: switch c.progress { - | SearchingForEvents - | Syncing(_) => - None - | Synced({timestampCaughtUpToHeadOrEndblock}) => - Some(timestampCaughtUpToHeadOrEndblock) - }, - numEventsProcessed: switch c.progress { - | SearchingForEvents => 0 - | Syncing({numEventsProcessed}) - | Synced({numEventsProcessed}) => numEventsProcessed - }, - numAddresses: cf.fetchState->FetchState.numAddresses, - } - }), - indexerStartTime: appState.indexerStartTime, - isPreRegisteringDynamicContracts: false, - rollbackOnReorg: indexer.config.shouldRollbackOnReorg, - isUnorderedMultichainMode: switch indexer.config.multichain { - | Unordered => true - | Ordered => false - }, - }) - } - }, - ) - - await indexer.persistence->Persistence.init( - ~chainConfigs=indexer.config.chainMap->ChainMap.values, - ) - - let chainManager = await ChainManager.makeFromDbState( - ~initialState=indexer.persistence->Persistence.getInitializedState, - ~config=indexer.config, - ~registrations=indexer.registrations, - ~persistence=indexer.persistence, - ) - let globalState = GlobalState.make(~indexer, ~chainManager, ~isDevelopmentMode, ~shouldUseTui) - let stateUpdatedHook = if shouldUseTui { - let rerender = EnvioInkApp.startApp(makeAppState(globalState)) - Some(globalState => globalState->makeAppState->rerender) - } else { - None - } - let gsManager = globalState->GlobalStateManager.make(~stateUpdatedHook?) - gsManagerRef := Some(gsManager) - gsManager->GlobalStateManager.dispatchTask(NextQuery(CheckAllChains)) - /* - NOTE: - This `ProcessEventBatch` dispatch shouldn't be necessary but we are adding for safety, it should immediately return doing - nothing since there is no events on the queues. - */ - - gsManager->GlobalStateManager.dispatchTask(ProcessEventBatch) - } catch { - | e => { - e->ErrorHandling.make(~msg="Failed at initialization")->ErrorHandling.log - NodeJs.process->NodeJs.exitWithCode(Failure) - } - } -} - -main()->ignore diff --git a/apps/hypersync-indexer/generated/src/Js.shim.ts b/apps/hypersync-indexer/generated/src/Js.shim.ts deleted file mode 100644 index 51f44c5fd..000000000 --- a/apps/hypersync-indexer/generated/src/Js.shim.ts +++ /dev/null @@ -1,11 +0,0 @@ -export type Json_t = - | string - | boolean - | number - | null - | { [key: string]: Json_t } - | Json_t[]; - -export type t = unknown; - -export type Exn_t = Error; diff --git a/apps/hypersync-indexer/generated/src/LoadLayer.res b/apps/hypersync-indexer/generated/src/LoadLayer.res deleted file mode 100644 index 5dab1f3b8..000000000 --- a/apps/hypersync-indexer/generated/src/LoadLayer.res +++ /dev/null @@ -1,444 +0,0 @@ -open Belt - -let loadById = ( - ~loadManager, - ~persistence: Persistence.t, - ~entityConfig: Internal.entityConfig, - ~inMemoryStore, - ~shouldGroup, - ~item, - ~entityId, -) => { - let key = `${entityConfig.name}.get` - let inMemTable = inMemoryStore->InMemoryStore.getInMemTable(~entityConfig) - - let load = async (idsToLoad, ~onError as _) => { - let timerRef = Prometheus.StorageLoad.startOperation(~operation=key) - - // Since LoadManager.call prevents registerign entities already existing in the inMemoryStore, - // we can be sure that we load only the new ones. - let dbEntities = try { - await (persistence->Persistence.getInitializedStorageOrThrow).loadByIdsOrThrow( - ~table=entityConfig.table, - ~rowsSchema=entityConfig.rowsSchema, - ~ids=idsToLoad, - ) - } catch { - | Persistence.StorageError({message, reason}) => - reason->ErrorHandling.mkLogAndRaise(~logger=item->Logging.getItemLogger, ~msg=message) - } - - let entitiesMap = Js.Dict.empty() - for idx in 0 to dbEntities->Array.length - 1 { - let entity = dbEntities->Js.Array2.unsafe_get(idx) - entitiesMap->Js.Dict.set(entity.id, entity) - } - idsToLoad->Js.Array2.forEach(entityId => { - // Set the entity in the in memory store - // without overwriting existing values - // which might be newer than what we got from db - inMemTable->InMemoryTable.Entity.initValue( - ~allowOverWriteEntity=false, - ~key=entityId, - ~entity=entitiesMap->Utils.Dict.dangerouslyGetNonOption(entityId), - ) - }) - - timerRef->Prometheus.StorageLoad.endOperation( - ~operation=key, - ~whereSize=idsToLoad->Array.length, - ~size=dbEntities->Array.length, - ) - } - - loadManager->LoadManager.call( - ~key, - ~load, - ~shouldGroup, - ~hasher=LoadManager.noopHasher, - ~getUnsafeInMemory=inMemTable->InMemoryTable.Entity.getUnsafe, - ~hasInMemory=hash => inMemTable.table->InMemoryTable.hasByHash(hash), - ~input=entityId, - ) -} - -let callEffect = ( - ~effect: Internal.effect, - ~arg: Internal.effectArgs, - ~inMemTable: InMemoryStore.effectCacheInMemTable, - ~timerRef, - ~onError, -) => { - let effectName = effect.name - let hadActiveCalls = effect.activeCallsCount > 0 - effect.activeCallsCount = effect.activeCallsCount + 1 - Prometheus.EffectCalls.activeCallsCount->Prometheus.SafeGauge.handleInt( - ~labels=effectName, - ~value=effect.activeCallsCount, - ) - - if hadActiveCalls { - let elapsed = Hrtime.millisBetween(~from=effect.prevCallStartTimerRef, ~to=timerRef) - if elapsed > 0 { - Prometheus.EffectCalls.timeCounter->Prometheus.SafeCounter.incrementMany( - ~labels=effectName, - ~value=Hrtime.millisBetween(~from=effect.prevCallStartTimerRef, ~to=timerRef), - ) - } - } - effect.prevCallStartTimerRef = timerRef - - effect.handler(arg) - ->Promise.thenResolve(output => { - inMemTable.dict->Js.Dict.set(arg.cacheKey, output) - if arg.context.cache { - inMemTable.idsToStore->Array.push(arg.cacheKey)->ignore - } - }) - ->Promise.catchResolve(exn => { - onError(~inputKey=arg.cacheKey, ~exn) - }) - ->Promise.finally(() => { - effect.activeCallsCount = effect.activeCallsCount - 1 - Prometheus.EffectCalls.activeCallsCount->Prometheus.SafeGauge.handleInt( - ~labels=effectName, - ~value=effect.activeCallsCount, - ) - let newTimer = Hrtime.makeTimer() - Prometheus.EffectCalls.timeCounter->Prometheus.SafeCounter.incrementMany( - ~labels=effectName, - ~value=Hrtime.millisBetween(~from=effect.prevCallStartTimerRef, ~to=newTimer), - ) - effect.prevCallStartTimerRef = newTimer - - Prometheus.EffectCalls.totalCallsCount->Prometheus.SafeCounter.increment(~labels=effectName) - Prometheus.EffectCalls.sumTimeCounter->Prometheus.SafeCounter.incrementMany( - ~labels=effectName, - ~value=timerRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis, - ) - }) -} - -let rec executeWithRateLimit = ( - ~effect: Internal.effect, - ~effectArgs: array, - ~inMemTable, - ~onError, - ~isFromQueue: bool, -) => { - let effectName = effect.name - - let timerRef = Hrtime.makeTimer() - let promises = [] - - switch effect.rateLimit { - | None => - // No rate limiting - execute all immediately - for idx in 0 to effectArgs->Array.length - 1 { - promises - ->Array.push( - callEffect( - ~effect, - ~arg=effectArgs->Array.getUnsafe(idx), - ~inMemTable, - ~timerRef, - ~onError, - )->Promise.ignoreValue, - ) - ->ignore - } - - | Some(state) => - let now = Js.Date.now() - - // Check if we need to reset the window - if now >= state.windowStartTime +. state.durationMs->Int.toFloat { - state.availableCalls = state.callsPerDuration - state.windowStartTime = now - state.nextWindowPromise = None - } - - // Split into immediate and queued - let immediateCount = Js.Math.min_int(state.availableCalls, effectArgs->Array.length) - let immediateArgs = effectArgs->Array.slice(~offset=0, ~len=immediateCount) - let queuedArgs = effectArgs->Array.sliceToEnd(immediateCount) - - // Update available calls - state.availableCalls = state.availableCalls - immediateCount - - // Call immediate effects - for idx in 0 to immediateArgs->Array.length - 1 { - promises - ->Array.push( - callEffect( - ~effect, - ~arg=immediateArgs->Array.getUnsafe(idx), - ~inMemTable, - ~timerRef, - ~onError, - )->Promise.ignoreValue, - ) - ->ignore - } - - if immediateCount > 0 && isFromQueue { - // Update queue count metric - state.queueCount = state.queueCount - immediateCount - Prometheus.EffectQueueCount.set(~count=state.queueCount, ~effectName) - } - - // Handle queued items - if queuedArgs->Utils.Array.notEmpty { - if !isFromQueue { - // Update queue count metric - state.queueCount = state.queueCount + queuedArgs->Array.length - Prometheus.EffectQueueCount.set(~count=state.queueCount, ~effectName) - } - - let millisUntilReset = ref(0) - let nextWindowPromise = switch state.nextWindowPromise { - | Some(p) => p - | None => - millisUntilReset := - (state.windowStartTime +. state.durationMs->Int.toFloat -. now)->Float.toInt - let p = Utils.delay(millisUntilReset.contents) - state.nextWindowPromise = Some(p) - p - } - - // Wait for next window and recursively process queue - promises - ->Array.push( - nextWindowPromise - ->Promise.then(() => { - if millisUntilReset.contents > 0 { - Prometheus.EffectQueueCount.timeCounter->Prometheus.SafeCounter.incrementMany( - ~labels=effectName, - ~value=millisUntilReset.contents, - ) - } - executeWithRateLimit( - ~effect, - ~effectArgs=queuedArgs, - ~inMemTable, - ~onError, - ~isFromQueue=true, - ) - }) - ->Promise.ignoreValue, - ) - ->ignore - } - } - - // Wait for all to complete - promises->Promise.all -} - -let loadEffect = ( - ~loadManager, - ~persistence: Persistence.t, - ~effect: Internal.effect, - ~effectArgs, - ~inMemoryStore, - ~shouldGroup, - ~item, -) => { - let effectName = effect.name - let key = `${effectName}.effect` - let inMemTable = inMemoryStore->InMemoryStore.getEffectInMemTable(~effect) - - let load = async (args, ~onError) => { - let idsToLoad = args->Js.Array2.map((arg: Internal.effectArgs) => arg.cacheKey) - let idsFromCache = Utils.Set.make() - - if ( - switch persistence.storageStatus { - | Ready({cache}) => cache->Utils.Dict.has(effectName) - | _ => false - } - ) { - let timerRef = Prometheus.StorageLoad.startOperation(~operation=key) - let {table, outputSchema} = effect.storageMeta - - let dbEntities = try { - await (persistence->Persistence.getInitializedStorageOrThrow).loadByIdsOrThrow( - ~table, - ~rowsSchema=Internal.effectCacheItemRowsSchema, - ~ids=idsToLoad, - ) - } catch { - | exn => - item - ->Logging.getItemLogger - ->Logging.childWarn({ - "msg": `Failed to load cache effect cache. The indexer will continue working, but the effect will not be able to use the cache.`, - "err": exn->Utils.prettifyExn, - "effect": effectName, - }) - [] - } - - dbEntities->Js.Array2.forEach(dbEntity => { - try { - let output = dbEntity.output->S.parseOrThrow(outputSchema) - idsFromCache->Utils.Set.add(dbEntity.id)->ignore - inMemTable.dict->Js.Dict.set(dbEntity.id, output) - } catch { - | S.Raised(error) => - inMemTable.invalidationsCount = inMemTable.invalidationsCount + 1 - Prometheus.EffectCacheInvalidationsCount.increment(~effectName) - item - ->Logging.getItemLogger - ->Logging.childTrace({ - "msg": "Invalidated effect cache", - "input": dbEntity.id, - "effect": effectName, - "err": error->S.Error.message, - }) - } - }) - - timerRef->Prometheus.StorageLoad.endOperation( - ~operation=key, - ~whereSize=idsToLoad->Array.length, - ~size=dbEntities->Array.length, - ) - } - - let remainingCallsCount = idsToLoad->Array.length - idsFromCache->Utils.Set.size - if remainingCallsCount > 0 { - let argsToCall = [] - for idx in 0 to args->Array.length - 1 { - let arg = args->Array.getUnsafe(idx) - if !(idsFromCache->Utils.Set.has(arg.cacheKey)) { - argsToCall->Array.push(arg)->ignore - } - } - - if argsToCall->Utils.Array.notEmpty { - await executeWithRateLimit( - ~effect, - ~effectArgs=argsToCall, - ~inMemTable, - ~onError, - ~isFromQueue=false, - )->Promise.ignoreValue - } - } - } - - loadManager->LoadManager.call( - ~key, - ~load, - ~shouldGroup, - ~hasher=args => args.cacheKey, - ~getUnsafeInMemory=hash => inMemTable.dict->Js.Dict.unsafeGet(hash), - ~hasInMemory=hash => inMemTable.dict->Utils.Dict.has(hash), - ~input=effectArgs, - ) -} - -let loadByField = ( - ~loadManager, - ~persistence: Persistence.t, - ~operator: TableIndices.Operator.t, - ~entityConfig: Internal.entityConfig, - ~inMemoryStore, - ~fieldName, - ~fieldValueSchema, - ~shouldGroup, - ~item, - ~fieldValue, -) => { - let operatorCallName = switch operator { - | Eq => "eq" - | Gt => "gt" - | Lt => "lt" - } - let key = `${entityConfig.name}.getWhere.${fieldName}.${operatorCallName}` - let inMemTable = inMemoryStore->InMemoryStore.getInMemTable(~entityConfig) - - let load = async (fieldValues: array<'fieldValue>, ~onError as _) => { - let timerRef = Prometheus.StorageLoad.startOperation(~operation=key) - - let size = ref(0) - - let indiciesToLoad = fieldValues->Js.Array2.map((fieldValue): TableIndices.Index.t => { - Single({ - fieldName, - fieldValue: TableIndices.FieldValue.castFrom(fieldValue), - operator, - }) - }) - - let _ = - await indiciesToLoad - ->Js.Array2.map(async index => { - inMemTable->InMemoryTable.Entity.addEmptyIndex(~index) - try { - let entities = await ( - persistence->Persistence.getInitializedStorageOrThrow - ).loadByFieldOrThrow( - ~operator=switch index { - | Single({operator: Gt}) => #">" - | Single({operator: Eq}) => #"=" - | Single({operator: Lt}) => #"<" - }, - ~table=entityConfig.table, - ~rowsSchema=entityConfig.rowsSchema, - ~fieldName=index->TableIndices.Index.getFieldName, - ~fieldValue=switch index { - | Single({fieldValue}) => fieldValue - }, - ~fieldSchema=fieldValueSchema->( - Utils.magic: S.t<'fieldValue> => S.t - ), - ) - - entities->Array.forEach(entity => { - //Set the entity in the in memory store - inMemTable->InMemoryTable.Entity.initValue( - ~allowOverWriteEntity=false, - ~key=Entities.getEntityId(entity), - ~entity=Some(entity), - ) - }) - - size := size.contents + entities->Array.length - } catch { - | Persistence.StorageError({message, reason}) => - reason->ErrorHandling.mkLogAndRaise( - ~logger=Logging.createChildFrom( - ~logger=item->Logging.getItemLogger, - ~params={ - "operator": operatorCallName, - "tableName": entityConfig.table.tableName, - "fieldName": fieldName, - "fieldValue": fieldValue, - }, - ), - ~msg=message, - ) - } - }) - ->Promise.all - - timerRef->Prometheus.StorageLoad.endOperation( - ~operation=key, - ~whereSize=fieldValues->Array.length, - ~size=size.contents, - ) - } - - loadManager->LoadManager.call( - ~key, - ~load, - ~input=fieldValue, - ~shouldGroup, - ~hasher=fieldValue => - fieldValue->TableIndices.FieldValue.castFrom->TableIndices.FieldValue.toString, - ~getUnsafeInMemory=inMemTable->InMemoryTable.Entity.getUnsafeOnIndex(~fieldName, ~operator), - ~hasInMemory=inMemTable->InMemoryTable.Entity.hasIndex(~fieldName, ~operator), - ) -} diff --git a/apps/hypersync-indexer/generated/src/LoadLayer.resi b/apps/hypersync-indexer/generated/src/LoadLayer.resi deleted file mode 100644 index b63c53109..000000000 --- a/apps/hypersync-indexer/generated/src/LoadLayer.resi +++ /dev/null @@ -1,32 +0,0 @@ -let loadById: ( - ~loadManager: LoadManager.t, - ~persistence: Persistence.t, - ~entityConfig: Internal.entityConfig, - ~inMemoryStore: InMemoryStore.t, - ~shouldGroup: bool, - ~item: Internal.item, - ~entityId: string, -) => promise> - -let loadByField: ( - ~loadManager: LoadManager.t, - ~persistence: Persistence.t, - ~operator: TableIndices.Operator.t, - ~entityConfig: Internal.entityConfig, - ~inMemoryStore: InMemoryStore.t, - ~fieldName: string, - ~fieldValueSchema: RescriptSchema.S.t<'fieldValue>, - ~shouldGroup: bool, - ~item: Internal.item, - ~fieldValue: 'fieldValue, -) => promise> - -let loadEffect: ( - ~loadManager: LoadManager.t, - ~persistence: Persistence.t, - ~effect: Internal.effect, - ~effectArgs: Internal.effectArgs, - ~inMemoryStore: InMemoryStore.t, - ~shouldGroup: bool, - ~item: Internal.item, -) => promise diff --git a/apps/hypersync-indexer/generated/src/Path.res b/apps/hypersync-indexer/generated/src/Path.res deleted file mode 100644 index 6f6efb5c7..000000000 --- a/apps/hypersync-indexer/generated/src/Path.res +++ /dev/null @@ -1 +0,0 @@ -let relativePathToRootFromGenerated = "../." diff --git a/apps/hypersync-indexer/generated/src/PersistedState.res b/apps/hypersync-indexer/generated/src/PersistedState.res deleted file mode 100644 index 72673a5b7..000000000 --- a/apps/hypersync-indexer/generated/src/PersistedState.res +++ /dev/null @@ -1,25 +0,0 @@ -type t = { - @as("envio_version") envioVersion: string, - @as("config_hash") configHash: string, - @as("schema_hash") schemaHash: string, - @as("handler_files_hash") handlerFilesHash: string, - @as("abi_files_hash") abiFilesHash: string, -} - -let schema = S.schema(s => { - envioVersion: s.matches(S.string), - configHash: s.matches(S.string), - schemaHash: s.matches(S.string), - handlerFilesHash: s.matches(S.string), - abiFilesHash: s.matches(S.string), -}) - -external requireJson: string => Js.Json.t = "require" -let getPersistedState = () => - try { - let json = requireJson("../persisted_state.envio.json") - let parsed = json->S.parseJsonOrThrow(schema) - Ok(parsed) - } catch { - | exn => Error(exn) - } diff --git a/apps/hypersync-indexer/generated/src/TestHelpers.gen.ts b/apps/hypersync-indexer/generated/src/TestHelpers.gen.ts deleted file mode 100644 index 869abc857..000000000 --- a/apps/hypersync-indexer/generated/src/TestHelpers.gen.ts +++ /dev/null @@ -1,188 +0,0 @@ -/* TypeScript file generated from TestHelpers.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -const TestHelpersJS = require('./TestHelpers.res.js'); - -import type {ENSGovernor_ProposalCanceled_event as Types_ENSGovernor_ProposalCanceled_event} from './Types.gen'; - -import type {ENSGovernor_ProposalCreated_event as Types_ENSGovernor_ProposalCreated_event} from './Types.gen'; - -import type {ENSGovernor_ProposalExecuted_event as Types_ENSGovernor_ProposalExecuted_event} from './Types.gen'; - -import type {ENSGovernor_ProposalQueued_event as Types_ENSGovernor_ProposalQueued_event} from './Types.gen'; - -import type {ENSGovernor_VoteCast_event as Types_ENSGovernor_VoteCast_event} from './Types.gen'; - -import type {ENSToken_DelegateChanged_event as Types_ENSToken_DelegateChanged_event} from './Types.gen'; - -import type {ENSToken_DelegateVotesChanged_event as Types_ENSToken_DelegateVotesChanged_event} from './Types.gen'; - -import type {ENSToken_Transfer_event as Types_ENSToken_Transfer_event} from './Types.gen'; - -import type {t as Address_t} from 'envio/src/Address.gen'; - -import type {t as TestHelpers_MockDb_t} from './TestHelpers_MockDb.gen'; - -/** The arguements that get passed to a "processEvent" helper function */ -export type EventFunctions_eventProcessorArgs = { - readonly event: event; - readonly mockDb: TestHelpers_MockDb_t; - readonly chainId?: number -}; - -export type EventFunctions_eventProcessor = (_1:EventFunctions_eventProcessorArgs) => Promise; - -export type EventFunctions_MockBlock_t = { - readonly hash?: string; - readonly number?: number; - readonly timestamp?: number -}; - -export type EventFunctions_MockTransaction_t = { - readonly from?: (undefined | Address_t); - readonly hash?: string; - readonly to?: (undefined | Address_t) -}; - -export type EventFunctions_mockEventData = { - readonly chainId?: number; - readonly srcAddress?: Address_t; - readonly logIndex?: number; - readonly block?: EventFunctions_MockBlock_t; - readonly transaction?: EventFunctions_MockTransaction_t -}; - -export type ENSGovernor_ProposalCreated_createMockArgs = { - readonly proposalId?: bigint; - readonly proposer?: Address_t; - readonly targets?: Address_t[]; - readonly values?: bigint[]; - readonly signatures?: string[]; - readonly calldatas?: string[]; - readonly startBlock?: bigint; - readonly endBlock?: bigint; - readonly description?: string; - readonly mockEventData?: EventFunctions_mockEventData -}; - -export type ENSGovernor_VoteCast_createMockArgs = { - readonly voter?: Address_t; - readonly proposalId?: bigint; - readonly support?: bigint; - readonly weight?: bigint; - readonly reason?: string; - readonly mockEventData?: EventFunctions_mockEventData -}; - -export type ENSGovernor_ProposalCanceled_createMockArgs = { readonly proposalId?: bigint; readonly mockEventData?: EventFunctions_mockEventData }; - -export type ENSGovernor_ProposalExecuted_createMockArgs = { readonly proposalId?: bigint; readonly mockEventData?: EventFunctions_mockEventData }; - -export type ENSGovernor_ProposalQueued_createMockArgs = { - readonly proposalId?: bigint; - readonly eta?: bigint; - readonly mockEventData?: EventFunctions_mockEventData -}; - -export type ENSToken_Transfer_createMockArgs = { - readonly from?: Address_t; - readonly to?: Address_t; - readonly value?: bigint; - readonly mockEventData?: EventFunctions_mockEventData -}; - -export type ENSToken_DelegateChanged_createMockArgs = { - readonly delegator?: Address_t; - readonly fromDelegate?: Address_t; - readonly toDelegate?: Address_t; - readonly mockEventData?: EventFunctions_mockEventData -}; - -export type ENSToken_DelegateVotesChanged_createMockArgs = { - readonly delegate?: Address_t; - readonly previousBalance?: bigint; - readonly newBalance?: bigint; - readonly mockEventData?: EventFunctions_mockEventData -}; - -export const MockDb_createMockDb: () => TestHelpers_MockDb_t = TestHelpersJS.MockDb.createMockDb as any; - -export const Addresses_mockAddresses: Address_t[] = TestHelpersJS.Addresses.mockAddresses as any; - -export const Addresses_defaultAddress: Address_t = TestHelpersJS.Addresses.defaultAddress as any; - -export const ENSGovernor_ProposalCreated_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.ProposalCreated.processEvent as any; - -export const ENSGovernor_ProposalCreated_createMockEvent: (args:ENSGovernor_ProposalCreated_createMockArgs) => Types_ENSGovernor_ProposalCreated_event = TestHelpersJS.ENSGovernor.ProposalCreated.createMockEvent as any; - -export const ENSGovernor_VoteCast_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.VoteCast.processEvent as any; - -export const ENSGovernor_VoteCast_createMockEvent: (args:ENSGovernor_VoteCast_createMockArgs) => Types_ENSGovernor_VoteCast_event = TestHelpersJS.ENSGovernor.VoteCast.createMockEvent as any; - -export const ENSGovernor_ProposalCanceled_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.ProposalCanceled.processEvent as any; - -export const ENSGovernor_ProposalCanceled_createMockEvent: (args:ENSGovernor_ProposalCanceled_createMockArgs) => Types_ENSGovernor_ProposalCanceled_event = TestHelpersJS.ENSGovernor.ProposalCanceled.createMockEvent as any; - -export const ENSGovernor_ProposalExecuted_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.ProposalExecuted.processEvent as any; - -export const ENSGovernor_ProposalExecuted_createMockEvent: (args:ENSGovernor_ProposalExecuted_createMockArgs) => Types_ENSGovernor_ProposalExecuted_event = TestHelpersJS.ENSGovernor.ProposalExecuted.createMockEvent as any; - -export const ENSGovernor_ProposalQueued_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSGovernor.ProposalQueued.processEvent as any; - -export const ENSGovernor_ProposalQueued_createMockEvent: (args:ENSGovernor_ProposalQueued_createMockArgs) => Types_ENSGovernor_ProposalQueued_event = TestHelpersJS.ENSGovernor.ProposalQueued.createMockEvent as any; - -export const ENSToken_Transfer_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSToken.Transfer.processEvent as any; - -export const ENSToken_Transfer_createMockEvent: (args:ENSToken_Transfer_createMockArgs) => Types_ENSToken_Transfer_event = TestHelpersJS.ENSToken.Transfer.createMockEvent as any; - -export const ENSToken_DelegateChanged_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSToken.DelegateChanged.processEvent as any; - -export const ENSToken_DelegateChanged_createMockEvent: (args:ENSToken_DelegateChanged_createMockArgs) => Types_ENSToken_DelegateChanged_event = TestHelpersJS.ENSToken.DelegateChanged.createMockEvent as any; - -export const ENSToken_DelegateVotesChanged_processEvent: EventFunctions_eventProcessor = TestHelpersJS.ENSToken.DelegateVotesChanged.processEvent as any; - -export const ENSToken_DelegateVotesChanged_createMockEvent: (args:ENSToken_DelegateVotesChanged_createMockArgs) => Types_ENSToken_DelegateVotesChanged_event = TestHelpersJS.ENSToken.DelegateVotesChanged.createMockEvent as any; - -export const Addresses: { mockAddresses: Address_t[]; defaultAddress: Address_t } = TestHelpersJS.Addresses as any; - -export const ENSGovernor: { - VoteCast: { - processEvent: EventFunctions_eventProcessor; - createMockEvent: (args:ENSGovernor_VoteCast_createMockArgs) => Types_ENSGovernor_VoteCast_event - }; - ProposalQueued: { - processEvent: EventFunctions_eventProcessor; - createMockEvent: (args:ENSGovernor_ProposalQueued_createMockArgs) => Types_ENSGovernor_ProposalQueued_event - }; - ProposalCreated: { - processEvent: EventFunctions_eventProcessor; - createMockEvent: (args:ENSGovernor_ProposalCreated_createMockArgs) => Types_ENSGovernor_ProposalCreated_event - }; - ProposalCanceled: { - processEvent: EventFunctions_eventProcessor; - createMockEvent: (args:ENSGovernor_ProposalCanceled_createMockArgs) => Types_ENSGovernor_ProposalCanceled_event - }; - ProposalExecuted: { - processEvent: EventFunctions_eventProcessor; - createMockEvent: (args:ENSGovernor_ProposalExecuted_createMockArgs) => Types_ENSGovernor_ProposalExecuted_event - } -} = TestHelpersJS.ENSGovernor as any; - -export const MockDb: { createMockDb: () => TestHelpers_MockDb_t } = TestHelpersJS.MockDb as any; - -export const ENSToken: { - Transfer: { - processEvent: EventFunctions_eventProcessor; - createMockEvent: (args:ENSToken_Transfer_createMockArgs) => Types_ENSToken_Transfer_event - }; - DelegateChanged: { - processEvent: EventFunctions_eventProcessor; - createMockEvent: (args:ENSToken_DelegateChanged_createMockArgs) => Types_ENSToken_DelegateChanged_event - }; - DelegateVotesChanged: { - processEvent: EventFunctions_eventProcessor; - createMockEvent: (args:ENSToken_DelegateVotesChanged_createMockArgs) => Types_ENSToken_DelegateVotesChanged_event - } -} = TestHelpersJS.ENSToken as any; diff --git a/apps/hypersync-indexer/generated/src/TestHelpers.res b/apps/hypersync-indexer/generated/src/TestHelpers.res deleted file mode 100644 index a54495021..000000000 --- a/apps/hypersync-indexer/generated/src/TestHelpers.res +++ /dev/null @@ -1,504 +0,0 @@ -/***** TAKE NOTE ****** -This is a hack to get genType to work! - -In order for genType to produce recursive types, it needs to be at the -root module of a file. If it's defined in a nested module it does not -work. So all the MockDb types and internal functions are defined in TestHelpers_MockDb -and only public functions are recreated and exported from this module. - -the following module: -```rescript -module MyModule = { - @genType - type rec a = {fieldB: b} - @genType and b = {fieldA: a} -} -``` - -produces the following in ts: -```ts -// tslint:disable-next-line:interface-over-type-literal -export type MyModule_a = { readonly fieldB: b }; - -// tslint:disable-next-line:interface-over-type-literal -export type MyModule_b = { readonly fieldA: MyModule_a }; -``` - -fieldB references type b which doesn't exist because it's defined -as MyModule_b -*/ - -module MockDb = { - @genType - let createMockDb = TestHelpers_MockDb.createMockDb -} - -@genType -module Addresses = { - include TestHelpers_MockAddresses -} - -module EventFunctions = { - //Note these are made into a record to make operate in the same way - //for Res, JS and TS. - - /** - The arguements that get passed to a "processEvent" helper function - */ - @genType - type eventProcessorArgs<'event> = { - event: 'event, - mockDb: TestHelpers_MockDb.t, - @deprecated("Set the chainId for the event instead") - chainId?: int, - } - - @genType - type eventProcessor<'event> = eventProcessorArgs<'event> => promise - - /** - A function composer to help create individual processEvent functions - */ - let makeEventProcessor = (~register) => args => { - let {event, mockDb, ?chainId} = - args->(Utils.magic: eventProcessorArgs<'event> => eventProcessorArgs) - - // Have the line here, just in case the function is called with - // a manually created event. We don't want to break the existing tests here. - let _ = - TestHelpers_MockDb.mockEventRegisters->Utils.WeakMap.set(event, register) - TestHelpers_MockDb.makeProcessEvents(mockDb, ~chainId=?chainId)([event->(Utils.magic: Internal.event => Types.eventLog)]) - } - - module MockBlock = { - @genType - type t = { - @as("hash") hash?: string, - @as("number") number?: int, - @as("timestamp") timestamp?: int, - } - - let toBlock = (_mock: t) => { - hash: _mock.hash->Belt.Option.getWithDefault("foo"), - number: _mock.number->Belt.Option.getWithDefault(0), - timestamp: _mock.timestamp->Belt.Option.getWithDefault(0), - }->(Utils.magic: Types.AggregatedBlock.t => Internal.eventBlock) - } - - module MockTransaction = { - @genType - type t = { - @as("from") from?: option, - @as("hash") hash?: string, - @as("to") to?: option, - } - - let toTransaction = (_mock: t) => { - from: _mock.from->Belt.Option.getWithDefault(None), - hash: _mock.hash->Belt.Option.getWithDefault("foo"), - to: _mock.to->Belt.Option.getWithDefault(None), - }->(Utils.magic: Types.AggregatedTransaction.t => Internal.eventTransaction) - } - - @genType - type mockEventData = { - chainId?: int, - srcAddress?: Address.t, - logIndex?: int, - block?: MockBlock.t, - transaction?: MockTransaction.t, - } - - /** - Applies optional paramters with defaults for all common eventLog field - */ - let makeEventMocker = ( - ~params: Internal.eventParams, - ~mockEventData: option, - ~register: unit => Internal.eventConfig, - ): Internal.event => { - let {?block, ?transaction, ?srcAddress, ?chainId, ?logIndex} = - mockEventData->Belt.Option.getWithDefault({}) - let block = block->Belt.Option.getWithDefault({})->MockBlock.toBlock - let transaction = transaction->Belt.Option.getWithDefault({})->MockTransaction.toTransaction - let event: Internal.event = { - params, - transaction, - chainId: switch chainId { - | Some(chainId) => chainId - | None => - switch Generated.configWithoutRegistrations.defaultChain { - | Some(chainConfig) => chainConfig.id - | None => - Js.Exn.raiseError( - "No default chain Id found, please add at least 1 chain to your config.yaml", - ) - } - }, - block, - srcAddress: srcAddress->Belt.Option.getWithDefault(Addresses.defaultAddress), - logIndex: logIndex->Belt.Option.getWithDefault(0), - } - // Since currently it's not possible to figure out the event config from the event - // we store a reference to the register function by event in a weak map - let _ = TestHelpers_MockDb.mockEventRegisters->Utils.WeakMap.set(event, register) - event - } -} - - -module ENSGovernor = { - module ProposalCreated = { - @genType - let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( - ~register=(Types.ENSGovernor.ProposalCreated.register :> unit => Internal.eventConfig), - ) - - @genType - type createMockArgs = { - @as("proposalId") - proposalId?: bigint, - @as("proposer") - proposer?: Address.t, - @as("targets") - targets?: array, - @as("values") - values?: array, - @as("signatures") - signatures?: array, - @as("calldatas") - calldatas?: array, - @as("startBlock") - startBlock?: bigint, - @as("endBlock") - endBlock?: bigint, - @as("description") - description?: string, - mockEventData?: EventFunctions.mockEventData, - } - - @genType - let createMockEvent = args => { - let { - ?proposalId, - ?proposer, - ?targets, - ?values, - ?signatures, - ?calldatas, - ?startBlock, - ?endBlock, - ?description, - ?mockEventData, - } = args - - let params = - { - proposalId: proposalId->Belt.Option.getWithDefault(0n), - proposer: proposer->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), - targets: targets->Belt.Option.getWithDefault([]), - values: values->Belt.Option.getWithDefault([]), - signatures: signatures->Belt.Option.getWithDefault([]), - calldatas: calldatas->Belt.Option.getWithDefault([]), - startBlock: startBlock->Belt.Option.getWithDefault(0n), - endBlock: endBlock->Belt.Option.getWithDefault(0n), - description: description->Belt.Option.getWithDefault("foo"), - } -->(Utils.magic: Types.ENSGovernor.ProposalCreated.eventArgs => Internal.eventParams) - - EventFunctions.makeEventMocker( - ~params, - ~mockEventData, - ~register=(Types.ENSGovernor.ProposalCreated.register :> unit => Internal.eventConfig), - )->(Utils.magic: Internal.event => Types.ENSGovernor.ProposalCreated.event) - } - } - - module VoteCast = { - @genType - let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( - ~register=(Types.ENSGovernor.VoteCast.register :> unit => Internal.eventConfig), - ) - - @genType - type createMockArgs = { - @as("voter") - voter?: Address.t, - @as("proposalId") - proposalId?: bigint, - @as("support") - support?: bigint, - @as("weight") - weight?: bigint, - @as("reason") - reason?: string, - mockEventData?: EventFunctions.mockEventData, - } - - @genType - let createMockEvent = args => { - let { - ?voter, - ?proposalId, - ?support, - ?weight, - ?reason, - ?mockEventData, - } = args - - let params = - { - voter: voter->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), - proposalId: proposalId->Belt.Option.getWithDefault(0n), - support: support->Belt.Option.getWithDefault(0n), - weight: weight->Belt.Option.getWithDefault(0n), - reason: reason->Belt.Option.getWithDefault("foo"), - } -->(Utils.magic: Types.ENSGovernor.VoteCast.eventArgs => Internal.eventParams) - - EventFunctions.makeEventMocker( - ~params, - ~mockEventData, - ~register=(Types.ENSGovernor.VoteCast.register :> unit => Internal.eventConfig), - )->(Utils.magic: Internal.event => Types.ENSGovernor.VoteCast.event) - } - } - - module ProposalCanceled = { - @genType - let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( - ~register=(Types.ENSGovernor.ProposalCanceled.register :> unit => Internal.eventConfig), - ) - - @genType - type createMockArgs = { - @as("proposalId") - proposalId?: bigint, - mockEventData?: EventFunctions.mockEventData, - } - - @genType - let createMockEvent = args => { - let { - ?proposalId, - ?mockEventData, - } = args - - let params = - { - proposalId: proposalId->Belt.Option.getWithDefault(0n), - } -->(Utils.magic: Types.ENSGovernor.ProposalCanceled.eventArgs => Internal.eventParams) - - EventFunctions.makeEventMocker( - ~params, - ~mockEventData, - ~register=(Types.ENSGovernor.ProposalCanceled.register :> unit => Internal.eventConfig), - )->(Utils.magic: Internal.event => Types.ENSGovernor.ProposalCanceled.event) - } - } - - module ProposalExecuted = { - @genType - let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( - ~register=(Types.ENSGovernor.ProposalExecuted.register :> unit => Internal.eventConfig), - ) - - @genType - type createMockArgs = { - @as("proposalId") - proposalId?: bigint, - mockEventData?: EventFunctions.mockEventData, - } - - @genType - let createMockEvent = args => { - let { - ?proposalId, - ?mockEventData, - } = args - - let params = - { - proposalId: proposalId->Belt.Option.getWithDefault(0n), - } -->(Utils.magic: Types.ENSGovernor.ProposalExecuted.eventArgs => Internal.eventParams) - - EventFunctions.makeEventMocker( - ~params, - ~mockEventData, - ~register=(Types.ENSGovernor.ProposalExecuted.register :> unit => Internal.eventConfig), - )->(Utils.magic: Internal.event => Types.ENSGovernor.ProposalExecuted.event) - } - } - - module ProposalQueued = { - @genType - let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( - ~register=(Types.ENSGovernor.ProposalQueued.register :> unit => Internal.eventConfig), - ) - - @genType - type createMockArgs = { - @as("proposalId") - proposalId?: bigint, - @as("eta") - eta?: bigint, - mockEventData?: EventFunctions.mockEventData, - } - - @genType - let createMockEvent = args => { - let { - ?proposalId, - ?eta, - ?mockEventData, - } = args - - let params = - { - proposalId: proposalId->Belt.Option.getWithDefault(0n), - eta: eta->Belt.Option.getWithDefault(0n), - } -->(Utils.magic: Types.ENSGovernor.ProposalQueued.eventArgs => Internal.eventParams) - - EventFunctions.makeEventMocker( - ~params, - ~mockEventData, - ~register=(Types.ENSGovernor.ProposalQueued.register :> unit => Internal.eventConfig), - )->(Utils.magic: Internal.event => Types.ENSGovernor.ProposalQueued.event) - } - } - -} - - -module ENSToken = { - module Transfer = { - @genType - let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( - ~register=(Types.ENSToken.Transfer.register :> unit => Internal.eventConfig), - ) - - @genType - type createMockArgs = { - @as("from") - from?: Address.t, - @as("to") - to?: Address.t, - @as("value") - value?: bigint, - mockEventData?: EventFunctions.mockEventData, - } - - @genType - let createMockEvent = args => { - let { - ?from, - ?to, - ?value, - ?mockEventData, - } = args - - let params = - { - from: from->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), - to: to->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), - value: value->Belt.Option.getWithDefault(0n), - } -->(Utils.magic: Types.ENSToken.Transfer.eventArgs => Internal.eventParams) - - EventFunctions.makeEventMocker( - ~params, - ~mockEventData, - ~register=(Types.ENSToken.Transfer.register :> unit => Internal.eventConfig), - )->(Utils.magic: Internal.event => Types.ENSToken.Transfer.event) - } - } - - module DelegateChanged = { - @genType - let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( - ~register=(Types.ENSToken.DelegateChanged.register :> unit => Internal.eventConfig), - ) - - @genType - type createMockArgs = { - @as("delegator") - delegator?: Address.t, - @as("fromDelegate") - fromDelegate?: Address.t, - @as("toDelegate") - toDelegate?: Address.t, - mockEventData?: EventFunctions.mockEventData, - } - - @genType - let createMockEvent = args => { - let { - ?delegator, - ?fromDelegate, - ?toDelegate, - ?mockEventData, - } = args - - let params = - { - delegator: delegator->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), - fromDelegate: fromDelegate->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), - toDelegate: toDelegate->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), - } -->(Utils.magic: Types.ENSToken.DelegateChanged.eventArgs => Internal.eventParams) - - EventFunctions.makeEventMocker( - ~params, - ~mockEventData, - ~register=(Types.ENSToken.DelegateChanged.register :> unit => Internal.eventConfig), - )->(Utils.magic: Internal.event => Types.ENSToken.DelegateChanged.event) - } - } - - module DelegateVotesChanged = { - @genType - let processEvent: EventFunctions.eventProcessor = EventFunctions.makeEventProcessor( - ~register=(Types.ENSToken.DelegateVotesChanged.register :> unit => Internal.eventConfig), - ) - - @genType - type createMockArgs = { - @as("delegate") - delegate?: Address.t, - @as("previousBalance") - previousBalance?: bigint, - @as("newBalance") - newBalance?: bigint, - mockEventData?: EventFunctions.mockEventData, - } - - @genType - let createMockEvent = args => { - let { - ?delegate, - ?previousBalance, - ?newBalance, - ?mockEventData, - } = args - - let params = - { - delegate: delegate->Belt.Option.getWithDefault(TestHelpers_MockAddresses.defaultAddress), - previousBalance: previousBalance->Belt.Option.getWithDefault(0n), - newBalance: newBalance->Belt.Option.getWithDefault(0n), - } -->(Utils.magic: Types.ENSToken.DelegateVotesChanged.eventArgs => Internal.eventParams) - - EventFunctions.makeEventMocker( - ~params, - ~mockEventData, - ~register=(Types.ENSToken.DelegateVotesChanged.register :> unit => Internal.eventConfig), - )->(Utils.magic: Internal.event => Types.ENSToken.DelegateVotesChanged.event) - } - } - -} - diff --git a/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.gen.ts b/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.gen.ts deleted file mode 100644 index 5670668e1..000000000 --- a/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.gen.ts +++ /dev/null @@ -1,12 +0,0 @@ -/* TypeScript file generated from TestHelpers_MockAddresses.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -const TestHelpers_MockAddressesJS = require('./TestHelpers_MockAddresses.res.js'); - -import type {t as Address_t} from 'envio/src/Address.gen'; - -export const mockAddresses: Address_t[] = TestHelpers_MockAddressesJS.mockAddresses as any; - -export const defaultAddress: Address_t = TestHelpers_MockAddressesJS.defaultAddress as any; diff --git a/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.res b/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.res deleted file mode 100644 index 81b041358..000000000 --- a/apps/hypersync-indexer/generated/src/TestHelpers_MockAddresses.res +++ /dev/null @@ -1,30 +0,0 @@ -/* -Note this file should remain top level since there are default types -that can point to TestHelpers_MockAddresses.defaultAddress -*/ -@genType -let mockAddresses = [ - "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", - "0x70997970C51812dc3A010C7d01b50e0d17dc79C8", - "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC", - "0x90F79bf6EB2c4f870365E785982E1f101E93b906", - "0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65", - "0x9965507D1a55bcC2695C58ba16FB37d819B0A4dc", - "0x976EA74026E726554dB657fA54763abd0C3a0aa9", - "0x14dC79964da2C08b23698B3D3cc7Ca32193d9955", - "0x23618e81E3f5cdF7f54C3d65f7FBc0aBf5B21E8f", - "0xa0Ee7A142d267C1f36714E4a8F75612F20a79720", - "0xBcd4042DE499D14e55001CcbB24a551F3b954096", - "0x71bE63f3384f5fb98995898A86B02Fb2426c5788", - "0xFABB0ac9d68B0B445fB7357272Ff202C5651694a", - "0x1CBd3b2770909D4e10f157cABC84C7264073C9Ec", - "0xdF3e18d64BC6A983f673Ab319CCaE4f1a57C7097", - "0xcd3B766CCDd6AE721141F452C550Ca635964ce71", - "0x2546BcD3c84621e976D8185a91A922aE77ECEc30", - "0xbDA5747bFD65F08deb54cb465eB87D40e51B197E", - "0xdD2FD4581271e230360230F9337D5c0430Bf44C0", - "0x8626f6940E2eb28930eFb4CeF49B2d1F2C9C1199", -]->Belt.Array.map(Address.Evm.fromStringOrThrow) -@genType -let defaultAddress = - mockAddresses[0] diff --git a/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.gen.ts b/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.gen.ts deleted file mode 100644 index 960ca1393..000000000 --- a/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.gen.ts +++ /dev/null @@ -1,87 +0,0 @@ -/* TypeScript file generated from TestHelpers_MockDb.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -const TestHelpers_MockDbJS = require('./TestHelpers_MockDb.res.js'); - -import type {AccountBalance_t as Entities_AccountBalance_t} from '../src/db/Entities.gen'; - -import type {AccountPower_t as Entities_AccountPower_t} from '../src/db/Entities.gen'; - -import type {Account_t as Entities_Account_t} from '../src/db/Entities.gen'; - -import type {BalanceHistory_t as Entities_BalanceHistory_t} from '../src/db/Entities.gen'; - -import type {DaoMetricsDayBucket_t as Entities_DaoMetricsDayBucket_t} from '../src/db/Entities.gen'; - -import type {Delegation_t as Entities_Delegation_t} from '../src/db/Entities.gen'; - -import type {DynamicContractRegistry_t as InternalTable_DynamicContractRegistry_t} from 'envio/src/db/InternalTable.gen'; - -import type {FeedEvent_t as Entities_FeedEvent_t} from '../src/db/Entities.gen'; - -import type {ProposalOnchain_t as Entities_ProposalOnchain_t} from '../src/db/Entities.gen'; - -import type {RawEvents_t as InternalTable_RawEvents_t} from 'envio/src/db/InternalTable.gen'; - -import type {TokenPrice_t as Entities_TokenPrice_t} from '../src/db/Entities.gen'; - -import type {Token_t as Entities_Token_t} from '../src/db/Entities.gen'; - -import type {Transaction_t as Entities_Transaction_t} from '../src/db/Entities.gen'; - -import type {Transfer_t as Entities_Transfer_t} from '../src/db/Entities.gen'; - -import type {VoteOnchain_t as Entities_VoteOnchain_t} from '../src/db/Entities.gen'; - -import type {VotingPowerHistory_t as Entities_VotingPowerHistory_t} from '../src/db/Entities.gen'; - -import type {eventLog as Types_eventLog} from './Types.gen'; - -import type {rawEventsKey as InMemoryStore_rawEventsKey} from 'envio/src/InMemoryStore.gen'; - -/** The mockDb type is simply an InMemoryStore internally. __dbInternal__ holds a reference -to an inMemoryStore and all the the accessor methods point to the reference of that inMemory -store */ -export abstract class inMemoryStore { protected opaque!: any }; /* simulate opaque types */ - -export type t = { - readonly __dbInternal__: inMemoryStore; - readonly entities: entities; - readonly rawEvents: storeOperations; - readonly dynamicContractRegistry: entityStoreOperations; - readonly processEvents: (_1:Types_eventLog[]) => Promise -}; - -export type entities = { - readonly Account: entityStoreOperations; - readonly AccountBalance: entityStoreOperations; - readonly AccountPower: entityStoreOperations; - readonly BalanceHistory: entityStoreOperations; - readonly DaoMetricsDayBucket: entityStoreOperations; - readonly Delegation: entityStoreOperations; - readonly FeedEvent: entityStoreOperations; - readonly ProposalOnchain: entityStoreOperations; - readonly Token: entityStoreOperations; - readonly TokenPrice: entityStoreOperations; - readonly Transaction: entityStoreOperations; - readonly Transfer: entityStoreOperations; - readonly VoteOnchain: entityStoreOperations; - readonly VotingPowerHistory: entityStoreOperations -}; - -export type entityStoreOperations = storeOperations; - -export type storeOperations = { - readonly getAll: () => entity[]; - readonly get: (_1:entityKey) => (undefined | entity); - readonly set: (_1:entity) => t; - readonly delete: (_1:entityKey) => t -}; - -/** The constructor function for a mockDb. Call it and then set up the inital state by calling -any of the set functions it provides access to. A mockDb will be passed into a processEvent -helper. Note, process event helpers will not mutate the mockDb but return a new mockDb with -new state so you can compare states before and after. */ -export const createMockDb: () => t = TestHelpers_MockDbJS.createMockDb as any; diff --git a/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.res b/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.res deleted file mode 100644 index 6b0809ae5..000000000 --- a/apps/hypersync-indexer/generated/src/TestHelpers_MockDb.res +++ /dev/null @@ -1,820 +0,0 @@ - -/***** TAKE NOTE ****** -This file module is a hack to get genType to work! - -In order for genType to produce recursive types, it needs to be at the -root module of a file. If it's defined in a nested module it does not -work. So all the MockDb types and internal functions are defined here in TestHelpers_MockDb -and only public functions are recreated and exported from TestHelpers.MockDb module. - -the following module: -```rescript -module MyModule = { - @genType - type rec a = {fieldB: b} - @genType and b = {fieldA: a} -} -``` - -produces the following in ts: -```ts -// tslint:disable-next-line:interface-over-type-literal -export type MyModule_a = { readonly fieldB: b }; - -// tslint:disable-next-line:interface-over-type-literal -export type MyModule_b = { readonly fieldA: MyModule_a }; -``` - -fieldB references type b which doesn't exist because it's defined -as MyModule_b -*/ - -open Belt - -let mockEventRegisters = Utils.WeakMap.make() - -/** -A raw js binding to allow deleting from a dict. Used in store delete operation -*/ -let deleteDictKey: (dict<'a>, string) => unit = %raw(` - function(dict, key) { - delete dict[key] - } - `) - -let config = Generated.configWithoutRegistrations -EventRegister.startRegistration( - ~ecosystem=config.ecosystem, - ~multichain=config.multichain, - ~preloadHandlers=config.preloadHandlers, -) - -/** -The mockDb type is simply an InMemoryStore internally. __dbInternal__ holds a reference -to an inMemoryStore and all the the accessor methods point to the reference of that inMemory -store -*/ -@genType.opaque -type inMemoryStore = InMemoryStore.t - -@genType -type rec t = { - __dbInternal__: inMemoryStore, - entities: entities, - rawEvents: storeOperations, - dynamicContractRegistry: entityStoreOperations, - processEvents: array> => promise, -} - -// Each user defined entity will be in this record with all the store or "mockdb" operators -@genType -and entities = { - @as("Account") account: entityStoreOperations, - @as("AccountBalance") accountBalance: entityStoreOperations, - @as("AccountPower") accountPower: entityStoreOperations, - @as("BalanceHistory") balanceHistory: entityStoreOperations, - @as("DaoMetricsDayBucket") daoMetricsDayBucket: entityStoreOperations, - @as("Delegation") delegation: entityStoreOperations, - @as("FeedEvent") feedEvent: entityStoreOperations, - @as("ProposalOnchain") proposalOnchain: entityStoreOperations, - @as("Token") token: entityStoreOperations, - @as("TokenPrice") tokenPrice: entityStoreOperations, - @as("Transaction") transaction: entityStoreOperations, - @as("Transfer") transfer: entityStoreOperations, - @as("VoteOnchain") voteOnchain: entityStoreOperations, - @as("VotingPowerHistory") votingPowerHistory: entityStoreOperations, - } -// User defined entities always have a string for an id which is used as the -// key for entity stores -@genType -and entityStoreOperations<'entity> = storeOperations -// all the operator functions a user can access on an entity in the mock db -// stores refer to the the module that MakeStore functor outputs in IO.res -@genType -and storeOperations<'entityKey, 'entity> = { - getAll: unit => array<'entity>, - get: 'entityKey => option<'entity>, - set: 'entity => t, - delete: 'entityKey => t, -} - -/** -a composable function to make the "storeOperations" record to represent all the mock -db operations for each entity. -*/ -let makeStoreOperatorEntity = ( - ~inMemoryStore: InMemoryStore.t, - ~makeMockDb, - ~getStore: InMemoryStore.t => InMemoryTable.Entity.t<'entity>, - ~getKey: 'entity => Types.id, -): storeOperations => { - let {getUnsafe, values, set} = module(InMemoryTable.Entity) - - let get = id => { - let store = inMemoryStore->getStore - if store.table->InMemoryTable.hasByHash(id) { - getUnsafe(store)(id) - } else { - None - } - } - - let getAll = () => - inMemoryStore - ->getStore - ->values - - let delete = entityId => { - let cloned = inMemoryStore->InMemoryStore.clone - let table = cloned->getStore - - table->set( - { - entityId, - entityUpdateAction: Delete, - checkpointId: 0, - }, - ~shouldSaveHistory=false, - ) - - cloned->makeMockDb - } - - let set = entity => { - let cloned = inMemoryStore->InMemoryStore.clone - let table = cloned->getStore - let entityId = entity->getKey - - table->set( - { - entityId, - entityUpdateAction: Set(entity), - checkpointId: 0, - }, - ~shouldSaveHistory=false, - ) - - cloned->makeMockDb - } - - { - getAll, - get, - set, - delete, - } -} - -let makeStoreOperatorMeta = ( - ~inMemoryStore: InMemoryStore.t, - ~makeMockDb, - ~getStore: InMemoryStore.t => InMemoryTable.t<'key, 'value>, - ~getKey: 'value => 'key, -): storeOperations<'key, 'value> => { - let {get, values, set} = module(InMemoryTable) - - let get = id => get(inMemoryStore->getStore, id) - - let getAll = () => inMemoryStore->getStore->values->Array.map(row => row) - - let set = metaData => { - let cloned = inMemoryStore->InMemoryStore.clone - cloned->getStore->set(metaData->getKey, metaData) - cloned->makeMockDb - } - - // TODO: Remove. Is delete needed for meta data? - let delete = key => { - let cloned = inMemoryStore->InMemoryStore.clone - let store = cloned->getStore - store.dict->deleteDictKey(key->store.hash) - cloned->makeMockDb - } - - { - getAll, - get, - set, - delete, - } -} - -/** -Accessor function for getting the internal inMemoryStore in the mockDb -*/ -let getInternalDb = (self: t) => self.__dbInternal__ - -let getEntityOperations = (mockDb: t, ~entityName: string): entityStoreOperations< - Internal.entity, -> => { - mockDb.entities - ->Utils.magic - ->Utils.Dict.dangerouslyGetNonOption(entityName) - ->Utils.Option.getExn("Mocked operations for entity " ++ entityName ++ " not found") -} - -/** -A function composer for simulating the writing of an inMemoryStore to the external db with a mockDb. -Runs all set and delete operations currently cached in an inMemory store against the mockDb -*/ -let executeRowsEntity = ( - mockDb: t, - ~inMemoryStore: InMemoryStore.t, - ~entityConfig: Internal.entityConfig, -) => { - let getInMemTable = (inMemoryStore: InMemoryStore.t) => - inMemoryStore->InMemoryStore.getInMemTable(~entityConfig) - - let inMemTable = getInMemTable(inMemoryStore) - - inMemTable.table - ->InMemoryTable.values - ->Array.forEach(row => { - let mockDbTable = mockDb->getInternalDb->getInMemTable - switch row.entityRow { - | Updated({latest: {entityUpdateAction: Set(entity)}}) - | InitialReadFromDb(AlreadySet(entity)) => - let key = (entity: Internal.entity).id - mockDbTable->InMemoryTable.Entity.initValue( - ~allowOverWriteEntity=true, - ~key, - ~entity=Some(entity), - ) - | Updated({latest: {entityUpdateAction: Delete, entityId}}) => - mockDbTable.table.dict->deleteDictKey(entityId) - | InitialReadFromDb(NotSet) => () - } - }) -} - -let executeRowsMeta = ( - mockDb: t, - ~inMemoryStore: InMemoryStore.t, - ~getInMemTable: InMemoryStore.t => InMemoryTable.t<'key, 'entity>, - ~getKey: 'entity => 'key, -) => { - let mockDbTable = mockDb->getInternalDb->getInMemTable - inMemoryStore - ->getInMemTable - ->InMemoryTable.values - ->Array.forEach(row => { - mockDbTable->InMemoryTable.set(getKey(row), row) - }) -} - -/** -The internal make function which can be passed an in memory store and -instantiate a "MockDb". This is useful for cloning or making a MockDb -out of an existing inMemoryStore -*/ -let rec makeWithInMemoryStore: InMemoryStore.t => t = (inMemoryStore: InMemoryStore.t) => { - let rawEvents = makeStoreOperatorMeta( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db.rawEvents, - ~getKey=({chainId, eventId}) => { - chainId, - eventId: eventId->BigInt.toString, - }, - ) - - let dynamicContractRegistry = makeStoreOperatorEntity( - ~inMemoryStore, - ~getStore=db => - db - ->InMemoryStore.getInMemTable( - ~entityConfig=InternalTable.DynamicContractRegistry.config, - ) - ->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - InternalTable.DynamicContractRegistry.t, - > - ), - ~makeMockDb=makeWithInMemoryStore, - ~getKey=({chainId, contractAddress}) => { - InternalTable.DynamicContractRegistry.makeId(~chainId, ~contractAddress) - }, - ) - - let entities = { - account: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.Account)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.Account.t, - > - ), - ~getKey=({id}) => id, - ) - }, - accountBalance: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.AccountBalance)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.AccountBalance.t, - > - ), - ~getKey=({id}) => id, - ) - }, - accountPower: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.AccountPower)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.AccountPower.t, - > - ), - ~getKey=({id}) => id, - ) - }, - balanceHistory: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.BalanceHistory)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.BalanceHistory.t, - > - ), - ~getKey=({id}) => id, - ) - }, - daoMetricsDayBucket: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.DaoMetricsDayBucket)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.DaoMetricsDayBucket.t, - > - ), - ~getKey=({id}) => id, - ) - }, - delegation: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.Delegation)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.Delegation.t, - > - ), - ~getKey=({id}) => id, - ) - }, - feedEvent: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.FeedEvent)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.FeedEvent.t, - > - ), - ~getKey=({id}) => id, - ) - }, - proposalOnchain: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.ProposalOnchain)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.ProposalOnchain.t, - > - ), - ~getKey=({id}) => id, - ) - }, - token: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.Token)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.Token.t, - > - ), - ~getKey=({id}) => id, - ) - }, - tokenPrice: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.TokenPrice)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.TokenPrice.t, - > - ), - ~getKey=({id}) => id, - ) - }, - transaction: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.Transaction)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.Transaction.t, - > - ), - ~getKey=({id}) => id, - ) - }, - transfer: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.Transfer)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.Transfer.t, - > - ), - ~getKey=({id}) => id, - ) - }, - voteOnchain: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.VoteOnchain)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.VoteOnchain.t, - > - ), - ~getKey=({id}) => id, - ) - }, - votingPowerHistory: { - makeStoreOperatorEntity( - ~inMemoryStore, - ~makeMockDb=makeWithInMemoryStore, - ~getStore=db => db->InMemoryStore.getInMemTable( - ~entityConfig=module(Entities.VotingPowerHistory)->Entities.entityModToInternal, - )->( - Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - Entities.VotingPowerHistory.t, - > - ), - ~getKey=({id}) => id, - ) - }, - } - - let mockDb = { - __dbInternal__: inMemoryStore, - entities, - rawEvents, - dynamicContractRegistry, - processEvents: %raw(`null`), - } - (mockDb->Utils.magic)["processEvents"] = makeProcessEvents(mockDb, ~chainId=?None) - mockDb -} -and makeProcessEvents = (mockDb: t, ~chainId=?) => async ( - events: array>, -) => { - if events->Utils.Array.isEmpty { - mockDb - } else { - let itemsWithContractRegister = [] - - let registrations = EventRegister.finishRegistration() - let registrations = if ( - registrations.hasEvents || !(registrations.onBlockByChainId->Utils.Dict.isEmpty) - ) { - registrations - } else { - Generated.registerAllHandlers() - } - - let config = Generated.makeGeneratedConfig() - let indexer: Indexer.t = { - registrations, - config, - persistence: Generated.codegenPersistence, - } - - let processingChainId = ref(chainId) - let latestFetchedBlockNumber = ref(0) - let newItems = events->Array.map(event => { - let event = event->Internal.fromGenericEvent - let eventConfig = switch mockEventRegisters->Utils.WeakMap.get(event) { - | Some(register) => register() - | None => - Js.Exn.raiseError( - "Events must be created using the mock API (e.g. createMockEvent) to be processed by mockDb.processEvents", - ) - } - let chainId = switch chainId { - | Some(chainId) => chainId - | None => event.chainId - } - - switch processingChainId.contents { - | Some(prevItemChainId) => - if prevItemChainId !== chainId { - Js.Exn.raiseError( - `Processing events on multiple chains is not supported yet. Got chainId ${chainId->Belt.Int.toString} but expected ${prevItemChainId->Belt.Int.toString}`, - ) - } - | None => processingChainId.contents = Some(chainId) - } - - let chain = config->Config.getChain(~chainId) - let item = Internal.Event({ - eventConfig, - event, - chain, - logIndex: event.logIndex, - timestamp: event.block->Types.Block.getTimestamp, - blockNumber: event.block->Types.Block.getNumber, - }) - latestFetchedBlockNumber.contents = Pervasives.max( - latestFetchedBlockNumber.contents, - event.block->Types.Block.getNumber, - ) - if eventConfig.contractRegister->Option.isSome { - itemsWithContractRegister->Js.Array2.push(item)->ignore - } - item - }) - - let processingChainId = switch processingChainId.contents { - | Some(chainId) => chainId - | None => - Js.Exn.raiseError("No events provided to processEvents. Please provide at least one event.") - } - let processingChain = config->Config.getChain(~chainId=processingChainId) - - let chainFetcher = ChainFetcher.makeFromConfig( - config.chainMap->ChainMap.get(processingChain), - ~config, - ~registrations=indexer.registrations, - ~targetBufferSize=5000, - ) - - //Deep copy the data in mockDb, mutate the clone and return the clone - //So no side effects occur here and state can be compared between process - //steps - let mockDbClone = mockDb->cloneMockDb - - //Construct a new instance of an in memory store to run for the given event - let inMemoryStore = InMemoryStore.make(~entities=Entities.allEntities) - let loadManager = LoadManager.make() - let persistence = { - ...indexer.persistence, - storage: makeMockStorage(mockDb), - storageStatus: Ready({ - cleanRun: false, - cache: Js.Dict.empty(), - chains: [], - reorgCheckpoints: [], - checkpointId: 0, - }), - } - let indexer = { - ...indexer, - persistence, - } - - let newItemsWithDcs = if itemsWithContractRegister->Utils.Array.notEmpty { - await ChainFetcher.runContractRegistersOrThrow( - ~itemsWithContractRegister, - ~chain=processingChain, - ~config, - ) - } else { - itemsWithContractRegister - } - - let updatedFetchState = ref(chainFetcher.fetchState) - - switch newItemsWithDcs { - | [] => () - | _ => - updatedFetchState := - updatedFetchState.contents->FetchState.registerDynamicContracts(newItemsWithDcs) - } - - updatedFetchState := - updatedFetchState.contents - ->FetchState.handleQueryResult( - ~latestFetchedBlock={ - blockNumber: latestFetchedBlockNumber.contents, - blockTimestamp: 0, - }, - ~query={ - partitionId: (updatedFetchState.contents.partitions->Array.getUnsafe(0)).id, - fromBlock: 0, - selection: {eventConfigs: [], dependsOnAddresses: false}, - addressesByContractName: Js.Dict.empty(), - target: FetchState.Head, - indexingContracts: Js.Dict.empty(), - }, - ~newItems, - ) - ->Result.getExn - - // Handle query for the rest partitions without items - // to catch up the latest fully fetched block - for idx in 1 to updatedFetchState.contents.partitions->Array.length - 1 { - let partition = updatedFetchState.contents.partitions->Array.getUnsafe(idx) - updatedFetchState := - updatedFetchState.contents - ->FetchState.handleQueryResult( - ~latestFetchedBlock={ - blockNumber: latestFetchedBlockNumber.contents, - blockTimestamp: 0, - }, - ~query={ - partitionId: partition.id, - fromBlock: 0, - selection: {eventConfigs: [], dependsOnAddresses: false}, - addressesByContractName: Js.Dict.empty(), - target: FetchState.Head, - indexingContracts: Js.Dict.empty(), - }, - ~newItems=[], - ) - ->Result.getExn - } - - let batch = Batch.prepareUnorderedBatch( - ~checkpointIdBeforeBatch=0, - ~chainsBeforeBatch=ChainMap.fromArrayUnsafe([ - ( - processingChain, - ( - { - fetchState: updatedFetchState.contents, - reorgDetection: chainFetcher.reorgDetection, - progressBlockNumber: chainFetcher.committedProgressBlockNumber, - sourceBlockNumber: chainFetcher.currentBlockHeight, - totalEventsProcessed: chainFetcher.numEventsProcessed, - }: Batch.chainBeforeBatch - ), - ), - ]), - ~batchSizeTarget=newItems->Array.length, - ) - - inMemoryStore->InMemoryStore.setBatchDcs(~batch, ~shouldSaveHistory=false) - - // Create a mock chains state where the processing chain is ready (simulating "Live" mode) - let chains = Js.Dict.empty() - chains->Js.Dict.set(processingChainId->Int.toString, {Internal.isReady: true}) - - try { - await batch->EventProcessing.preloadBatchOrThrow( - ~loadManager, - ~persistence, - ~inMemoryStore, - ~chains, - ) - await batch->EventProcessing.runBatchHandlersOrThrow( - ~inMemoryStore, - ~loadManager, - ~indexer, - ~shouldSaveHistory=false, - ~shouldBenchmark=false, - ~chains, - ) - } catch { - | EventProcessing.ProcessingError({message, exn, item}) => - exn - ->ErrorHandling.make(~msg=message, ~logger=item->Logging.getItemLogger) - ->ErrorHandling.logAndRaise - } - - //In mem store can still contatin raw events and dynamic contracts for the - //testing framework in cases where either contract register or loaderHandler - //is None - mockDbClone->writeFromMemoryStore(~inMemoryStore) - mockDbClone - } -} -and makeMockStorage = (mockDb: t): Persistence.storage => { - { - isInitialized: () => Js.Exn.raiseError("Not used yet"), - initialize: (~chainConfigs as _=?, ~entities as _=?, ~enums as _=?) => - Js.Exn.raiseError("Not used yet"), - resumeInitialState: () => Js.Exn.raiseError("Not used yet"), - loadByIdsOrThrow: ( - type item, - ~ids, - ~table: Table.table, - ~rowsSchema as _: S.t>, - ) => { - let operations = mockDb->getEntityOperations(~entityName=table.tableName) - ids - ->Array.keepMap(id => operations.get(id)) - ->(Utils.magic: array => array) - ->Promise.resolve - }, - loadByFieldOrThrow: ( - ~fieldName, - ~fieldSchema as _, - ~fieldValue, - ~operator, - ~table, - ~rowsSchema as _, - ) => { - let mockDbTable = - mockDb.__dbInternal__.entities->InMemoryStore.EntityTables.get(~entityName=table.tableName) - let index = TableIndices.Index.makeSingle( - ~fieldName, - ~fieldValue, - ~operator=switch operator { - | #"=" => Eq - | #">" => Gt - | #"<" => Lt - }, - ) - mockDbTable - ->InMemoryTable.Entity.values - ->Array.keep(entity => { - index->TableIndices.Index.evaluate( - ~fieldName, - ~fieldValue=entity->Utils.magic->Js.Dict.unsafeGet(fieldName), - ) - }) - ->Promise.resolve - }, - setOrThrow: (~items as _, ~table as _, ~itemSchema as _) => Js.Exn.raiseError("Not used yet"), - setEffectCacheOrThrow: (~effect as _, ~items as _, ~initialize as _) => Promise.resolve(), - dumpEffectCache: () => Js.Exn.raiseError("Not used yet"), - } -} -and /** -Deep copies the in memory store data and returns a new mockDb with the same -state and no references to data from the passed in mockDb -*/ -cloneMockDb = (self: t) => { - let clonedInternalDb = self->getInternalDb->InMemoryStore.clone - clonedInternalDb->makeWithInMemoryStore -} -and /** -Simulates the writing of processed data in the inMemoryStore to a mockDb. This function -executes all the rows on each "store" (or pg table) in the inMemoryStore -*/ -writeFromMemoryStore = (mockDb: t, ~inMemoryStore: InMemoryStore.t) => { - //INTERNAL STORES/TABLES EXECUTION - mockDb->executeRowsMeta( - ~inMemoryStore, - ~getInMemTable=inMemStore => {inMemStore.rawEvents}, - ~getKey=(entity): InMemoryStore.rawEventsKey => { - chainId: entity.chainId, - eventId: entity.eventId->BigInt.toString, - }, - ) - - Generated.codegenPersistence.allEntities->Array.forEach(entityConfig => { - mockDb->executeRowsEntity(~inMemoryStore, ~entityConfig) - }) -} - -/** -The constructor function for a mockDb. Call it and then set up the inital state by calling -any of the set functions it provides access to. A mockDb will be passed into a processEvent -helper. Note, process event helpers will not mutate the mockDb but return a new mockDb with -new state so you can compare states before and after. -*/ -@genType -let //Note: It's called createMockDb over "make" to make it more intuitive in JS and TS - -createMockDb = () => makeWithInMemoryStore(InMemoryStore.make(~entities=Entities.allEntities)) diff --git a/apps/hypersync-indexer/generated/src/Types.gen.ts b/apps/hypersync-indexer/generated/src/Types.gen.ts deleted file mode 100644 index d923ea555..000000000 --- a/apps/hypersync-indexer/generated/src/Types.gen.ts +++ /dev/null @@ -1,557 +0,0 @@ -/* TypeScript file generated from Types.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -import type {AccountBalance_t as Entities_AccountBalance_t} from '../src/db/Entities.gen'; - -import type {AccountPower_t as Entities_AccountPower_t} from '../src/db/Entities.gen'; - -import type {Account_t as Entities_Account_t} from '../src/db/Entities.gen'; - -import type {BalanceHistory_t as Entities_BalanceHistory_t} from '../src/db/Entities.gen'; - -import type {DaoMetricsDayBucket_t as Entities_DaoMetricsDayBucket_t} from '../src/db/Entities.gen'; - -import type {Delegation_t as Entities_Delegation_t} from '../src/db/Entities.gen'; - -import type {FeedEvent_t as Entities_FeedEvent_t} from '../src/db/Entities.gen'; - -import type {HandlerContext as $$handlerContext} from './Types.ts'; - -import type {HandlerWithOptions as $$fnWithEventConfig} from './bindings/OpaqueTypes.ts'; - -import type {LoaderContext as $$loaderContext} from './Types.ts'; - -import type {ProposalOnchain_t as Entities_ProposalOnchain_t} from '../src/db/Entities.gen'; - -import type {SingleOrMultiple as $$SingleOrMultiple_t} from './bindings/OpaqueTypes'; - -import type {TokenPrice_t as Entities_TokenPrice_t} from '../src/db/Entities.gen'; - -import type {Token_t as Entities_Token_t} from '../src/db/Entities.gen'; - -import type {Transaction_t as Entities_Transaction_t} from '../src/db/Entities.gen'; - -import type {Transfer_t as Entities_Transfer_t} from '../src/db/Entities.gen'; - -import type {VoteOnchain_t as Entities_VoteOnchain_t} from '../src/db/Entities.gen'; - -import type {VotingPowerHistory_t as Entities_VotingPowerHistory_t} from '../src/db/Entities.gen'; - -import type {entityHandlerContext as Internal_entityHandlerContext} from 'envio/src/Internal.gen'; - -import type {eventOptions as Internal_eventOptions} from 'envio/src/Internal.gen'; - -import type {genericContractRegisterArgs as Internal_genericContractRegisterArgs} from 'envio/src/Internal.gen'; - -import type {genericContractRegister as Internal_genericContractRegister} from 'envio/src/Internal.gen'; - -import type {genericEvent as Internal_genericEvent} from 'envio/src/Internal.gen'; - -import type {genericHandlerArgs as Internal_genericHandlerArgs} from 'envio/src/Internal.gen'; - -import type {genericHandlerWithLoader as Internal_genericHandlerWithLoader} from 'envio/src/Internal.gen'; - -import type {genericHandler as Internal_genericHandler} from 'envio/src/Internal.gen'; - -import type {genericLoaderArgs as Internal_genericLoaderArgs} from 'envio/src/Internal.gen'; - -import type {genericLoader as Internal_genericLoader} from 'envio/src/Internal.gen'; - -import type {logger as Envio_logger} from 'envio/src/Envio.gen'; - -import type {noEventFilters as Internal_noEventFilters} from 'envio/src/Internal.gen'; - -import type {t as Address_t} from 'envio/src/Address.gen'; - -export type id = string; -export type Id = id; - -export type contractRegistrations = { - readonly log: Envio_logger; - readonly addENSGovernor: (_1:Address_t) => void; - readonly addENSToken: (_1:Address_t) => void -}; - -export type entityLoaderContext = { - readonly get: (_1:id) => Promise<(undefined | entity)>; - readonly getOrThrow: (_1:id, message:(undefined | string)) => Promise; - readonly getWhere: indexedFieldOperations; - readonly getOrCreate: (_1:entity) => Promise; - readonly set: (_1:entity) => void; - readonly deleteUnsafe: (_1:id) => void -}; - -export type loaderContext = $$loaderContext; - -export type entityHandlerContext = Internal_entityHandlerContext; - -export type handlerContext = $$handlerContext; - -export type account = Entities_Account_t; -export type Account = account; - -export type accountBalance = Entities_AccountBalance_t; -export type AccountBalance = accountBalance; - -export type accountPower = Entities_AccountPower_t; -export type AccountPower = accountPower; - -export type balanceHistory = Entities_BalanceHistory_t; -export type BalanceHistory = balanceHistory; - -export type daoMetricsDayBucket = Entities_DaoMetricsDayBucket_t; -export type DaoMetricsDayBucket = daoMetricsDayBucket; - -export type delegation = Entities_Delegation_t; -export type Delegation = delegation; - -export type feedEvent = Entities_FeedEvent_t; -export type FeedEvent = feedEvent; - -export type proposalOnchain = Entities_ProposalOnchain_t; -export type ProposalOnchain = proposalOnchain; - -export type token = Entities_Token_t; -export type Token = token; - -export type tokenPrice = Entities_TokenPrice_t; -export type TokenPrice = tokenPrice; - -export type transaction = Entities_Transaction_t; -export type Transaction = transaction; - -export type transfer = Entities_Transfer_t; -export type Transfer = transfer; - -export type voteOnchain = Entities_VoteOnchain_t; -export type VoteOnchain = voteOnchain; - -export type votingPowerHistory = Entities_VotingPowerHistory_t; -export type VotingPowerHistory = votingPowerHistory; - -export type Transaction_t = { - readonly hash: string; - readonly to: (undefined | Address_t); - readonly from: (undefined | Address_t) -}; - -export type Block_t = { - readonly number: number; - readonly timestamp: number; - readonly hash: string -}; - -export type AggregatedBlock_t = { - readonly hash: string; - readonly number: number; - readonly timestamp: number -}; - -export type AggregatedTransaction_t = { - readonly from: (undefined | Address_t); - readonly hash: string; - readonly to: (undefined | Address_t) -}; - -export type eventLog = Internal_genericEvent; -export type EventLog = eventLog; - -export type SingleOrMultiple_t = $$SingleOrMultiple_t; - -export type HandlerTypes_args = { readonly event: eventLog; readonly context: context }; - -export type HandlerTypes_contractRegisterArgs = Internal_genericContractRegisterArgs,contractRegistrations>; - -export type HandlerTypes_contractRegister = Internal_genericContractRegister>; - -export type HandlerTypes_loaderArgs = Internal_genericLoaderArgs,loaderContext>; - -export type HandlerTypes_loader = Internal_genericLoader,loaderReturn>; - -export type HandlerTypes_handlerArgs = Internal_genericHandlerArgs,handlerContext,loaderReturn>; - -export type HandlerTypes_handler = Internal_genericHandler>; - -export type HandlerTypes_loaderHandler = Internal_genericHandlerWithLoader,HandlerTypes_handler,eventFilters>; - -export type HandlerTypes_eventConfig = Internal_eventOptions; - -export type fnWithEventConfig = $$fnWithEventConfig; - -export type handlerWithOptions = fnWithEventConfig,HandlerTypes_eventConfig>; - -export type contractRegisterWithOptions = fnWithEventConfig,HandlerTypes_eventConfig>; - -export type ENSGovernor_chainId = 1; - -export type ENSGovernor_ProposalCreated_eventArgs = { - readonly proposalId: bigint; - readonly proposer: Address_t; - readonly targets: Address_t[]; - readonly values: bigint[]; - readonly signatures: string[]; - readonly calldatas: string[]; - readonly startBlock: bigint; - readonly endBlock: bigint; - readonly description: string -}; - -export type ENSGovernor_ProposalCreated_block = Block_t; - -export type ENSGovernor_ProposalCreated_transaction = Transaction_t; - -export type ENSGovernor_ProposalCreated_event = { - /** The parameters or arguments associated with this event. */ - readonly params: ENSGovernor_ProposalCreated_eventArgs; - /** The unique identifier of the blockchain network where this event occurred. */ - readonly chainId: ENSGovernor_chainId; - /** The address of the contract that emitted this event. */ - readonly srcAddress: Address_t; - /** The index of this event's log within the block. */ - readonly logIndex: number; - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - readonly transaction: ENSGovernor_ProposalCreated_transaction; - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - readonly block: ENSGovernor_ProposalCreated_block -}; - -export type ENSGovernor_ProposalCreated_loaderArgs = Internal_genericLoaderArgs; - -export type ENSGovernor_ProposalCreated_loader = Internal_genericLoader; - -export type ENSGovernor_ProposalCreated_handlerArgs = Internal_genericHandlerArgs; - -export type ENSGovernor_ProposalCreated_handler = Internal_genericHandler>; - -export type ENSGovernor_ProposalCreated_contractRegister = Internal_genericContractRegister>; - -export type ENSGovernor_ProposalCreated_eventFilter = {}; - -export type ENSGovernor_ProposalCreated_eventFilters = Internal_noEventFilters; - -export type ENSGovernor_VoteCast_eventArgs = { - readonly voter: Address_t; - readonly proposalId: bigint; - readonly support: bigint; - readonly weight: bigint; - readonly reason: string -}; - -export type ENSGovernor_VoteCast_block = Block_t; - -export type ENSGovernor_VoteCast_transaction = Transaction_t; - -export type ENSGovernor_VoteCast_event = { - /** The parameters or arguments associated with this event. */ - readonly params: ENSGovernor_VoteCast_eventArgs; - /** The unique identifier of the blockchain network where this event occurred. */ - readonly chainId: ENSGovernor_chainId; - /** The address of the contract that emitted this event. */ - readonly srcAddress: Address_t; - /** The index of this event's log within the block. */ - readonly logIndex: number; - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - readonly transaction: ENSGovernor_VoteCast_transaction; - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - readonly block: ENSGovernor_VoteCast_block -}; - -export type ENSGovernor_VoteCast_loaderArgs = Internal_genericLoaderArgs; - -export type ENSGovernor_VoteCast_loader = Internal_genericLoader; - -export type ENSGovernor_VoteCast_handlerArgs = Internal_genericHandlerArgs; - -export type ENSGovernor_VoteCast_handler = Internal_genericHandler>; - -export type ENSGovernor_VoteCast_contractRegister = Internal_genericContractRegister>; - -export type ENSGovernor_VoteCast_eventFilter = { readonly voter?: SingleOrMultiple_t }; - -export type ENSGovernor_VoteCast_eventFiltersArgs = { -/** The unique identifier of the blockchain network where this event occurred. */ -readonly chainId: ENSGovernor_chainId; -/** Addresses of the contracts indexing the event. */ -readonly addresses: Address_t[] }; - -export type ENSGovernor_VoteCast_eventFiltersDefinition = - ENSGovernor_VoteCast_eventFilter - | ENSGovernor_VoteCast_eventFilter[]; - -export type ENSGovernor_VoteCast_eventFilters = - ENSGovernor_VoteCast_eventFilter - | ENSGovernor_VoteCast_eventFilter[] - | ((_1:ENSGovernor_VoteCast_eventFiltersArgs) => ENSGovernor_VoteCast_eventFiltersDefinition); - -export type ENSGovernor_ProposalCanceled_eventArgs = { readonly proposalId: bigint }; - -export type ENSGovernor_ProposalCanceled_block = Block_t; - -export type ENSGovernor_ProposalCanceled_transaction = Transaction_t; - -export type ENSGovernor_ProposalCanceled_event = { - /** The parameters or arguments associated with this event. */ - readonly params: ENSGovernor_ProposalCanceled_eventArgs; - /** The unique identifier of the blockchain network where this event occurred. */ - readonly chainId: ENSGovernor_chainId; - /** The address of the contract that emitted this event. */ - readonly srcAddress: Address_t; - /** The index of this event's log within the block. */ - readonly logIndex: number; - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - readonly transaction: ENSGovernor_ProposalCanceled_transaction; - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - readonly block: ENSGovernor_ProposalCanceled_block -}; - -export type ENSGovernor_ProposalCanceled_loaderArgs = Internal_genericLoaderArgs; - -export type ENSGovernor_ProposalCanceled_loader = Internal_genericLoader; - -export type ENSGovernor_ProposalCanceled_handlerArgs = Internal_genericHandlerArgs; - -export type ENSGovernor_ProposalCanceled_handler = Internal_genericHandler>; - -export type ENSGovernor_ProposalCanceled_contractRegister = Internal_genericContractRegister>; - -export type ENSGovernor_ProposalCanceled_eventFilter = {}; - -export type ENSGovernor_ProposalCanceled_eventFilters = Internal_noEventFilters; - -export type ENSGovernor_ProposalExecuted_eventArgs = { readonly proposalId: bigint }; - -export type ENSGovernor_ProposalExecuted_block = Block_t; - -export type ENSGovernor_ProposalExecuted_transaction = Transaction_t; - -export type ENSGovernor_ProposalExecuted_event = { - /** The parameters or arguments associated with this event. */ - readonly params: ENSGovernor_ProposalExecuted_eventArgs; - /** The unique identifier of the blockchain network where this event occurred. */ - readonly chainId: ENSGovernor_chainId; - /** The address of the contract that emitted this event. */ - readonly srcAddress: Address_t; - /** The index of this event's log within the block. */ - readonly logIndex: number; - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - readonly transaction: ENSGovernor_ProposalExecuted_transaction; - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - readonly block: ENSGovernor_ProposalExecuted_block -}; - -export type ENSGovernor_ProposalExecuted_loaderArgs = Internal_genericLoaderArgs; - -export type ENSGovernor_ProposalExecuted_loader = Internal_genericLoader; - -export type ENSGovernor_ProposalExecuted_handlerArgs = Internal_genericHandlerArgs; - -export type ENSGovernor_ProposalExecuted_handler = Internal_genericHandler>; - -export type ENSGovernor_ProposalExecuted_contractRegister = Internal_genericContractRegister>; - -export type ENSGovernor_ProposalExecuted_eventFilter = {}; - -export type ENSGovernor_ProposalExecuted_eventFilters = Internal_noEventFilters; - -export type ENSGovernor_ProposalQueued_eventArgs = { readonly proposalId: bigint; readonly eta: bigint }; - -export type ENSGovernor_ProposalQueued_block = Block_t; - -export type ENSGovernor_ProposalQueued_transaction = Transaction_t; - -export type ENSGovernor_ProposalQueued_event = { - /** The parameters or arguments associated with this event. */ - readonly params: ENSGovernor_ProposalQueued_eventArgs; - /** The unique identifier of the blockchain network where this event occurred. */ - readonly chainId: ENSGovernor_chainId; - /** The address of the contract that emitted this event. */ - readonly srcAddress: Address_t; - /** The index of this event's log within the block. */ - readonly logIndex: number; - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - readonly transaction: ENSGovernor_ProposalQueued_transaction; - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - readonly block: ENSGovernor_ProposalQueued_block -}; - -export type ENSGovernor_ProposalQueued_loaderArgs = Internal_genericLoaderArgs; - -export type ENSGovernor_ProposalQueued_loader = Internal_genericLoader; - -export type ENSGovernor_ProposalQueued_handlerArgs = Internal_genericHandlerArgs; - -export type ENSGovernor_ProposalQueued_handler = Internal_genericHandler>; - -export type ENSGovernor_ProposalQueued_contractRegister = Internal_genericContractRegister>; - -export type ENSGovernor_ProposalQueued_eventFilter = {}; - -export type ENSGovernor_ProposalQueued_eventFilters = Internal_noEventFilters; - -export type ENSToken_chainId = 1; - -export type ENSToken_Transfer_eventArgs = { - readonly from: Address_t; - readonly to: Address_t; - readonly value: bigint -}; - -export type ENSToken_Transfer_block = Block_t; - -export type ENSToken_Transfer_transaction = Transaction_t; - -export type ENSToken_Transfer_event = { - /** The parameters or arguments associated with this event. */ - readonly params: ENSToken_Transfer_eventArgs; - /** The unique identifier of the blockchain network where this event occurred. */ - readonly chainId: ENSToken_chainId; - /** The address of the contract that emitted this event. */ - readonly srcAddress: Address_t; - /** The index of this event's log within the block. */ - readonly logIndex: number; - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - readonly transaction: ENSToken_Transfer_transaction; - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - readonly block: ENSToken_Transfer_block -}; - -export type ENSToken_Transfer_loaderArgs = Internal_genericLoaderArgs; - -export type ENSToken_Transfer_loader = Internal_genericLoader; - -export type ENSToken_Transfer_handlerArgs = Internal_genericHandlerArgs; - -export type ENSToken_Transfer_handler = Internal_genericHandler>; - -export type ENSToken_Transfer_contractRegister = Internal_genericContractRegister>; - -export type ENSToken_Transfer_eventFilter = { readonly from?: SingleOrMultiple_t; readonly to?: SingleOrMultiple_t }; - -export type ENSToken_Transfer_eventFiltersArgs = { -/** The unique identifier of the blockchain network where this event occurred. */ -readonly chainId: ENSToken_chainId; -/** Addresses of the contracts indexing the event. */ -readonly addresses: Address_t[] }; - -export type ENSToken_Transfer_eventFiltersDefinition = - ENSToken_Transfer_eventFilter - | ENSToken_Transfer_eventFilter[]; - -export type ENSToken_Transfer_eventFilters = - ENSToken_Transfer_eventFilter - | ENSToken_Transfer_eventFilter[] - | ((_1:ENSToken_Transfer_eventFiltersArgs) => ENSToken_Transfer_eventFiltersDefinition); - -export type ENSToken_DelegateChanged_eventArgs = { - readonly delegator: Address_t; - readonly fromDelegate: Address_t; - readonly toDelegate: Address_t -}; - -export type ENSToken_DelegateChanged_block = Block_t; - -export type ENSToken_DelegateChanged_transaction = Transaction_t; - -export type ENSToken_DelegateChanged_event = { - /** The parameters or arguments associated with this event. */ - readonly params: ENSToken_DelegateChanged_eventArgs; - /** The unique identifier of the blockchain network where this event occurred. */ - readonly chainId: ENSToken_chainId; - /** The address of the contract that emitted this event. */ - readonly srcAddress: Address_t; - /** The index of this event's log within the block. */ - readonly logIndex: number; - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - readonly transaction: ENSToken_DelegateChanged_transaction; - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - readonly block: ENSToken_DelegateChanged_block -}; - -export type ENSToken_DelegateChanged_loaderArgs = Internal_genericLoaderArgs; - -export type ENSToken_DelegateChanged_loader = Internal_genericLoader; - -export type ENSToken_DelegateChanged_handlerArgs = Internal_genericHandlerArgs; - -export type ENSToken_DelegateChanged_handler = Internal_genericHandler>; - -export type ENSToken_DelegateChanged_contractRegister = Internal_genericContractRegister>; - -export type ENSToken_DelegateChanged_eventFilter = { - readonly delegator?: SingleOrMultiple_t; - readonly fromDelegate?: SingleOrMultiple_t; - readonly toDelegate?: SingleOrMultiple_t -}; - -export type ENSToken_DelegateChanged_eventFiltersArgs = { -/** The unique identifier of the blockchain network where this event occurred. */ -readonly chainId: ENSToken_chainId; -/** Addresses of the contracts indexing the event. */ -readonly addresses: Address_t[] }; - -export type ENSToken_DelegateChanged_eventFiltersDefinition = - ENSToken_DelegateChanged_eventFilter - | ENSToken_DelegateChanged_eventFilter[]; - -export type ENSToken_DelegateChanged_eventFilters = - ENSToken_DelegateChanged_eventFilter - | ENSToken_DelegateChanged_eventFilter[] - | ((_1:ENSToken_DelegateChanged_eventFiltersArgs) => ENSToken_DelegateChanged_eventFiltersDefinition); - -export type ENSToken_DelegateVotesChanged_eventArgs = { - readonly delegate: Address_t; - readonly previousBalance: bigint; - readonly newBalance: bigint -}; - -export type ENSToken_DelegateVotesChanged_block = Block_t; - -export type ENSToken_DelegateVotesChanged_transaction = Transaction_t; - -export type ENSToken_DelegateVotesChanged_event = { - /** The parameters or arguments associated with this event. */ - readonly params: ENSToken_DelegateVotesChanged_eventArgs; - /** The unique identifier of the blockchain network where this event occurred. */ - readonly chainId: ENSToken_chainId; - /** The address of the contract that emitted this event. */ - readonly srcAddress: Address_t; - /** The index of this event's log within the block. */ - readonly logIndex: number; - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - readonly transaction: ENSToken_DelegateVotesChanged_transaction; - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - readonly block: ENSToken_DelegateVotesChanged_block -}; - -export type ENSToken_DelegateVotesChanged_loaderArgs = Internal_genericLoaderArgs; - -export type ENSToken_DelegateVotesChanged_loader = Internal_genericLoader; - -export type ENSToken_DelegateVotesChanged_handlerArgs = Internal_genericHandlerArgs; - -export type ENSToken_DelegateVotesChanged_handler = Internal_genericHandler>; - -export type ENSToken_DelegateVotesChanged_contractRegister = Internal_genericContractRegister>; - -export type ENSToken_DelegateVotesChanged_eventFilter = { readonly delegate?: SingleOrMultiple_t }; - -export type ENSToken_DelegateVotesChanged_eventFiltersArgs = { -/** The unique identifier of the blockchain network where this event occurred. */ -readonly chainId: ENSToken_chainId; -/** Addresses of the contracts indexing the event. */ -readonly addresses: Address_t[] }; - -export type ENSToken_DelegateVotesChanged_eventFiltersDefinition = - ENSToken_DelegateVotesChanged_eventFilter - | ENSToken_DelegateVotesChanged_eventFilter[]; - -export type ENSToken_DelegateVotesChanged_eventFilters = - ENSToken_DelegateVotesChanged_eventFilter - | ENSToken_DelegateVotesChanged_eventFilter[] - | ((_1:ENSToken_DelegateVotesChanged_eventFiltersArgs) => ENSToken_DelegateVotesChanged_eventFiltersDefinition); - -export type chainId = number; - -export type chain = 1; diff --git a/apps/hypersync-indexer/generated/src/Types.res b/apps/hypersync-indexer/generated/src/Types.res deleted file mode 100644 index 6e575921d..000000000 --- a/apps/hypersync-indexer/generated/src/Types.res +++ /dev/null @@ -1,957 +0,0 @@ -//************* -//***ENTITIES** -//************* -@genType.as("Id") -type id = string - -@genType -type contractRegistrations = { - log: Envio.logger, - // TODO: only add contracts we've registered for the event in the config - addENSGovernor: (Address.t) => unit, - addENSToken: (Address.t) => unit, -} - -@genType -type entityLoaderContext<'entity, 'indexedFieldOperations> = { - get: id => promise>, - getOrThrow: (id, ~message: string=?) => promise<'entity>, - getWhere: 'indexedFieldOperations, - getOrCreate: ('entity) => promise<'entity>, - set: 'entity => unit, - deleteUnsafe: id => unit, -} - -@genType.import(("./Types.ts", "LoaderContext")) -type loaderContext = { - log: Envio.logger, - effect: 'input 'output. (Envio.effect<'input, 'output>, 'input) => promise<'output>, - isPreload: bool, - chains: Internal.chains, - @as("Account") account: entityLoaderContext, - @as("AccountBalance") accountBalance: entityLoaderContext, - @as("AccountPower") accountPower: entityLoaderContext, - @as("BalanceHistory") balanceHistory: entityLoaderContext, - @as("DaoMetricsDayBucket") daoMetricsDayBucket: entityLoaderContext, - @as("Delegation") delegation: entityLoaderContext, - @as("FeedEvent") feedEvent: entityLoaderContext, - @as("ProposalOnchain") proposalOnchain: entityLoaderContext, - @as("Token") token: entityLoaderContext, - @as("TokenPrice") tokenPrice: entityLoaderContext, - @as("Transaction") transaction: entityLoaderContext, - @as("Transfer") transfer: entityLoaderContext, - @as("VoteOnchain") voteOnchain: entityLoaderContext, - @as("VotingPowerHistory") votingPowerHistory: entityLoaderContext, -} - -@genType -type entityHandlerContext<'entity> = Internal.entityHandlerContext<'entity> - -@genType.import(("./Types.ts", "HandlerContext")) -type handlerContext = { - log: Envio.logger, - effect: 'input 'output. (Envio.effect<'input, 'output>, 'input) => promise<'output>, - chains: Internal.chains, - @as("Account") account: entityHandlerContext, - @as("AccountBalance") accountBalance: entityHandlerContext, - @as("AccountPower") accountPower: entityHandlerContext, - @as("BalanceHistory") balanceHistory: entityHandlerContext, - @as("DaoMetricsDayBucket") daoMetricsDayBucket: entityHandlerContext, - @as("Delegation") delegation: entityHandlerContext, - @as("FeedEvent") feedEvent: entityHandlerContext, - @as("ProposalOnchain") proposalOnchain: entityHandlerContext, - @as("Token") token: entityHandlerContext, - @as("TokenPrice") tokenPrice: entityHandlerContext, - @as("Transaction") transaction: entityHandlerContext, - @as("Transfer") transfer: entityHandlerContext, - @as("VoteOnchain") voteOnchain: entityHandlerContext, - @as("VotingPowerHistory") votingPowerHistory: entityHandlerContext, -} - -//Re-exporting types for backwards compatability -@genType.as("Account") -type account = Entities.Account.t -@genType.as("AccountBalance") -type accountBalance = Entities.AccountBalance.t -@genType.as("AccountPower") -type accountPower = Entities.AccountPower.t -@genType.as("BalanceHistory") -type balanceHistory = Entities.BalanceHistory.t -@genType.as("DaoMetricsDayBucket") -type daoMetricsDayBucket = Entities.DaoMetricsDayBucket.t -@genType.as("Delegation") -type delegation = Entities.Delegation.t -@genType.as("FeedEvent") -type feedEvent = Entities.FeedEvent.t -@genType.as("ProposalOnchain") -type proposalOnchain = Entities.ProposalOnchain.t -@genType.as("Token") -type token = Entities.Token.t -@genType.as("TokenPrice") -type tokenPrice = Entities.TokenPrice.t -@genType.as("Transaction") -type transaction = Entities.Transaction.t -@genType.as("Transfer") -type transfer = Entities.Transfer.t -@genType.as("VoteOnchain") -type voteOnchain = Entities.VoteOnchain.t -@genType.as("VotingPowerHistory") -type votingPowerHistory = Entities.VotingPowerHistory.t - -//************* -//**CONTRACTS** -//************* - -module Transaction = { - @genType - type t = {hash: string, to: option, from: option} - - let schema = S.object((s): t => {hash: s.field("hash", S.string), to: s.field("to", S.nullable(Address.schema)), from: s.field("from", S.nullable(Address.schema))}) -} - -module Block = { - @genType - type t = {number: int, timestamp: int, hash: string} - - let schema = S.object((s): t => {number: s.field("number", S.int), timestamp: s.field("timestamp", S.int), hash: s.field("hash", S.string)}) - - @get - external getNumber: Internal.eventBlock => int = "number" - - @get - external getTimestamp: Internal.eventBlock => int = "timestamp" - - @get - external getId: Internal.eventBlock => string = "hash" - - let cleanUpRawEventFieldsInPlace: Js.Json.t => () = %raw(`fields => { - delete fields.hash - delete fields.number - delete fields.timestamp - }`) -} - -module AggregatedBlock = { - @genType - type t = {hash: string, number: int, timestamp: int} -} -module AggregatedTransaction = { - @genType - type t = {from: option, hash: string, to: option} -} - -@genType.as("EventLog") -type eventLog<'params> = Internal.genericEvent<'params, Block.t, Transaction.t> - -module SingleOrMultiple: { - @genType.import(("./bindings/OpaqueTypes", "SingleOrMultiple")) - type t<'a> - let normalizeOrThrow: (t<'a>, ~nestedArrayDepth: int=?) => array<'a> - let single: 'a => t<'a> - let multiple: array<'a> => t<'a> -} = { - type t<'a> = Js.Json.t - - external single: 'a => t<'a> = "%identity" - external multiple: array<'a> => t<'a> = "%identity" - external castMultiple: t<'a> => array<'a> = "%identity" - external castSingle: t<'a> => 'a = "%identity" - - exception AmbiguousEmptyNestedArray - - let rec isMultiple = (t: t<'a>, ~nestedArrayDepth): bool => - switch t->Js.Json.decodeArray { - | None => false - | Some(_arr) if nestedArrayDepth == 0 => true - | Some([]) if nestedArrayDepth > 0 => - AmbiguousEmptyNestedArray->ErrorHandling.mkLogAndRaise( - ~msg="The given empty array could be interperated as a flat array (value) or nested array. Since it's ambiguous, - please pass in a nested empty array if the intention is to provide an empty array as a value", - ) - | Some(arr) => arr->Js.Array2.unsafe_get(0)->isMultiple(~nestedArrayDepth=nestedArrayDepth - 1) - } - - let normalizeOrThrow = (t: t<'a>, ~nestedArrayDepth=0): array<'a> => { - if t->isMultiple(~nestedArrayDepth) { - t->castMultiple - } else { - [t->castSingle] - } - } -} - -module HandlerTypes = { - @genType - type args<'eventArgs, 'context> = { - event: eventLog<'eventArgs>, - context: 'context, - } - - @genType - type contractRegisterArgs<'eventArgs> = Internal.genericContractRegisterArgs, contractRegistrations> - @genType - type contractRegister<'eventArgs> = Internal.genericContractRegister> - - @genType - type loaderArgs<'eventArgs> = Internal.genericLoaderArgs, loaderContext> - @genType - type loader<'eventArgs, 'loaderReturn> = Internal.genericLoader, 'loaderReturn> - - @genType - type handlerArgs<'eventArgs, 'loaderReturn> = Internal.genericHandlerArgs, handlerContext, 'loaderReturn> - - @genType - type handler<'eventArgs, 'loaderReturn> = Internal.genericHandler> - - @genType - type loaderHandler<'eventArgs, 'loaderReturn, 'eventFilters> = Internal.genericHandlerWithLoader< - loader<'eventArgs, 'loaderReturn>, - handler<'eventArgs, 'loaderReturn>, - 'eventFilters - > - - @genType - type eventConfig<'eventFilters> = Internal.eventOptions<'eventFilters> -} - -module type Event = { - type event - - let handlerRegister: EventRegister.t - - type eventFilters -} - -@genType.import(("./bindings/OpaqueTypes.ts", "HandlerWithOptions")) -type fnWithEventConfig<'fn, 'eventConfig> = ('fn, ~eventConfig: 'eventConfig=?) => unit - -@genType -type handlerWithOptions<'eventArgs, 'loaderReturn, 'eventFilters> = fnWithEventConfig< - HandlerTypes.handler<'eventArgs, 'loaderReturn>, - HandlerTypes.eventConfig<'eventFilters>, -> - -@genType -type contractRegisterWithOptions<'eventArgs, 'eventFilters> = fnWithEventConfig< - HandlerTypes.contractRegister<'eventArgs>, - HandlerTypes.eventConfig<'eventFilters>, -> - -module MakeRegister = (Event: Event) => { - let contractRegister: fnWithEventConfig< - Internal.genericContractRegister< - Internal.genericContractRegisterArgs, - >, - HandlerTypes.eventConfig, - > = (contractRegister, ~eventConfig=?) => - Event.handlerRegister->EventRegister.setContractRegister( - contractRegister, - ~eventOptions=eventConfig, - ) - - let handler: fnWithEventConfig< - Internal.genericHandler>, - HandlerTypes.eventConfig, - > = (handler, ~eventConfig=?) => { - Event.handlerRegister->EventRegister.setHandler(args => { - if args.context.isPreload { - Promise.resolve() - } else { - handler( - args->( - Utils.magic: Internal.genericHandlerArgs< - Event.event, - Internal.handlerContext, - 'loaderReturn, - > => Internal.genericHandlerArgs - ), - ) - } - }, ~eventOptions=eventConfig) - } - - let handlerWithLoader = ( - eventConfig: Internal.genericHandlerWithLoader< - Internal.genericLoader, 'loaderReturn>, - Internal.genericHandler< - Internal.genericHandlerArgs, - >, - Event.eventFilters, - >, - ) => { - Event.handlerRegister->EventRegister.setHandler( - args => { - let promise = eventConfig.loader( - args->( - Utils.magic: Internal.genericHandlerArgs< - Event.event, - Internal.handlerContext, - 'loaderReturn, - > => Internal.genericLoaderArgs - ), - ) - if args.context.isPreload { - promise->Promise.ignoreValue - } else { - promise->Promise.then(loaderReturn => { - (args->Obj.magic)["loaderReturn"] = loaderReturn - eventConfig.handler( - args->( - Utils.magic: Internal.genericHandlerArgs< - Event.event, - Internal.handlerContext, - 'loaderReturn, - > => Internal.genericHandlerArgs - ), - ) - }) - } - }, - ~eventOptions=switch eventConfig { - | {wildcard: ?None, eventFilters: ?None} => None - | _ => - Some({ - wildcard: ?eventConfig.wildcard, - eventFilters: ?eventConfig.eventFilters, - preRegisterDynamicContracts: ?eventConfig.preRegisterDynamicContracts, - }) - }, - ) - } -} - -module ENSGovernor = { -let abi = Ethers.makeAbi((%raw(`[{"type":"event","name":"ProposalCanceled","inputs":[{"name":"proposalId","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"ProposalCreated","inputs":[{"name":"proposalId","type":"uint256","indexed":false},{"name":"proposer","type":"address","indexed":false},{"name":"targets","type":"address[]","indexed":false},{"name":"values","type":"uint256[]","indexed":false},{"name":"signatures","type":"string[]","indexed":false},{"name":"calldatas","type":"bytes[]","indexed":false},{"name":"startBlock","type":"uint256","indexed":false},{"name":"endBlock","type":"uint256","indexed":false},{"name":"description","type":"string","indexed":false}],"anonymous":false},{"type":"event","name":"ProposalExecuted","inputs":[{"name":"proposalId","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"ProposalQueued","inputs":[{"name":"proposalId","type":"uint256","indexed":false},{"name":"eta","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"VoteCast","inputs":[{"name":"voter","type":"address","indexed":true},{"name":"proposalId","type":"uint256","indexed":false},{"name":"support","type":"uint8","indexed":false},{"name":"weight","type":"uint256","indexed":false},{"name":"reason","type":"string","indexed":false}],"anonymous":false}]`): Js.Json.t)) -let eventSignatures = ["ProposalCanceled(uint256 proposalId)", "ProposalCreated(uint256 proposalId, address proposer, address[] targets, uint256[] values, string[] signatures, bytes[] calldatas, uint256 startBlock, uint256 endBlock, string description)", "ProposalExecuted(uint256 proposalId)", "ProposalQueued(uint256 proposalId, uint256 eta)", "VoteCast(address indexed voter, uint256 proposalId, uint8 support, uint256 weight, string reason)"] -@genType type chainId = [#1] -let contractName = "ENSGovernor" - -module ProposalCreated = { - -let id = "0x7d84a6263ae0d98d3329bd7b46bb4e8d6f98cd35a7adb45c274c8b7fd5ebd5e0_1" -let sighash = "0x7d84a6263ae0d98d3329bd7b46bb4e8d6f98cd35a7adb45c274c8b7fd5ebd5e0" -let name = "ProposalCreated" -let contractName = contractName - -@genType -type eventArgs = {proposalId: bigint, proposer: Address.t, targets: array, values: array, signatures: array, calldatas: array, startBlock: bigint, endBlock: bigint, description: string} -@genType -type block = Block.t -@genType -type transaction = Transaction.t - -@genType -type event = { - /** The parameters or arguments associated with this event. */ - params: eventArgs, - /** The unique identifier of the blockchain network where this event occurred. */ - chainId: chainId, - /** The address of the contract that emitted this event. */ - srcAddress: Address.t, - /** The index of this event's log within the block. */ - logIndex: int, - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - transaction: transaction, - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - block: block, -} - -@genType -type loaderArgs = Internal.genericLoaderArgs -@genType -type loader<'loaderReturn> = Internal.genericLoader -@genType -type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs -@genType -type handler<'loaderReturn> = Internal.genericHandler> -@genType -type contractRegister = Internal.genericContractRegister> - -let paramsRawEventSchema = S.object((s): eventArgs => {proposalId: s.field("proposalId", BigInt.schema), proposer: s.field("proposer", Address.schema), targets: s.field("targets", S.array(Address.schema)), values: s.field("values", S.array(BigInt.schema)), signatures: s.field("signatures", S.array(S.string)), calldatas: s.field("calldatas", S.array(S.string)), startBlock: s.field("startBlock", BigInt.schema), endBlock: s.field("endBlock", BigInt.schema), description: s.field("description", S.string)}) -let blockSchema = Block.schema -let transactionSchema = Transaction.schema - -let handlerRegister: EventRegister.t = EventRegister.make( - ~contractName, - ~eventName=name, -) - -@genType -type eventFilter = {} - -@genType type eventFilters = Internal.noEventFilters - -let register = (): Internal.evmEventConfig => { - let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=[]) - { - getEventFiltersOrThrow, - filterByAddresses, - dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, - blockSchema: blockSchema->(Utils.magic: S.t => S.t), - transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), - convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, proposer: decodedEvent.body->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, targets: decodedEvent.body->Js.Array2.unsafe_get(2)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, values: decodedEvent.body->Js.Array2.unsafe_get(3)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, signatures: decodedEvent.body->Js.Array2.unsafe_get(4)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, calldatas: decodedEvent.body->Js.Array2.unsafe_get(5)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, startBlock: decodedEvent.body->Js.Array2.unsafe_get(6)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, endBlock: decodedEvent.body->Js.Array2.unsafe_get(7)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, description: decodedEvent.body->Js.Array2.unsafe_get(8)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), - id, - name, - contractName, - isWildcard: (handlerRegister->EventRegister.isWildcard), - handler: handlerRegister->EventRegister.getHandler, - contractRegister: handlerRegister->EventRegister.getContractRegister, - paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), - } -} -} - -module VoteCast = { - -let id = "0xb8e138887d0aa13bab447e82de9d5c1777041ecd21ca36ba824ff1e6c07ddda4_2" -let sighash = "0xb8e138887d0aa13bab447e82de9d5c1777041ecd21ca36ba824ff1e6c07ddda4" -let name = "VoteCast" -let contractName = contractName - -@genType -type eventArgs = {voter: Address.t, proposalId: bigint, support: bigint, weight: bigint, reason: string} -@genType -type block = Block.t -@genType -type transaction = Transaction.t - -@genType -type event = { - /** The parameters or arguments associated with this event. */ - params: eventArgs, - /** The unique identifier of the blockchain network where this event occurred. */ - chainId: chainId, - /** The address of the contract that emitted this event. */ - srcAddress: Address.t, - /** The index of this event's log within the block. */ - logIndex: int, - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - transaction: transaction, - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - block: block, -} - -@genType -type loaderArgs = Internal.genericLoaderArgs -@genType -type loader<'loaderReturn> = Internal.genericLoader -@genType -type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs -@genType -type handler<'loaderReturn> = Internal.genericHandler> -@genType -type contractRegister = Internal.genericContractRegister> - -let paramsRawEventSchema = S.object((s): eventArgs => {voter: s.field("voter", Address.schema), proposalId: s.field("proposalId", BigInt.schema), support: s.field("support", BigInt.schema), weight: s.field("weight", BigInt.schema), reason: s.field("reason", S.string)}) -let blockSchema = Block.schema -let transactionSchema = Transaction.schema - -let handlerRegister: EventRegister.t = EventRegister.make( - ~contractName, - ~eventName=name, -) - -@genType -type eventFilter = {@as("voter") voter?: SingleOrMultiple.t} - -@genType type eventFiltersArgs = {/** The unique identifier of the blockchain network where this event occurred. */ chainId: chainId, /** Addresses of the contracts indexing the event. */ addresses: array} - -@genType @unboxed type eventFiltersDefinition = Single(eventFilter) | Multiple(array) - -@genType @unboxed type eventFilters = | ...eventFiltersDefinition | Dynamic(eventFiltersArgs => eventFiltersDefinition) - -let register = (): Internal.evmEventConfig => { - let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=["voter",], ~topic1=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("voter")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress))) - { - getEventFiltersOrThrow, - filterByAddresses, - dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, - blockSchema: blockSchema->(Utils.magic: S.t => S.t), - transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), - convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {voter: decodedEvent.indexed->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, support: decodedEvent.body->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, weight: decodedEvent.body->Js.Array2.unsafe_get(2)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, reason: decodedEvent.body->Js.Array2.unsafe_get(3)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), - id, - name, - contractName, - isWildcard: (handlerRegister->EventRegister.isWildcard), - handler: handlerRegister->EventRegister.getHandler, - contractRegister: handlerRegister->EventRegister.getContractRegister, - paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), - } -} -} - -module ProposalCanceled = { - -let id = "0x789cf55be980739dad1d0699b93b58e806b51c9d96619bfa8fe0a28abaa7b30c_1" -let sighash = "0x789cf55be980739dad1d0699b93b58e806b51c9d96619bfa8fe0a28abaa7b30c" -let name = "ProposalCanceled" -let contractName = contractName - -@genType -type eventArgs = {proposalId: bigint} -@genType -type block = Block.t -@genType -type transaction = Transaction.t - -@genType -type event = { - /** The parameters or arguments associated with this event. */ - params: eventArgs, - /** The unique identifier of the blockchain network where this event occurred. */ - chainId: chainId, - /** The address of the contract that emitted this event. */ - srcAddress: Address.t, - /** The index of this event's log within the block. */ - logIndex: int, - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - transaction: transaction, - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - block: block, -} - -@genType -type loaderArgs = Internal.genericLoaderArgs -@genType -type loader<'loaderReturn> = Internal.genericLoader -@genType -type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs -@genType -type handler<'loaderReturn> = Internal.genericHandler> -@genType -type contractRegister = Internal.genericContractRegister> - -let paramsRawEventSchema = S.object((s): eventArgs => {proposalId: s.field("proposalId", BigInt.schema)}) -let blockSchema = Block.schema -let transactionSchema = Transaction.schema - -let handlerRegister: EventRegister.t = EventRegister.make( - ~contractName, - ~eventName=name, -) - -@genType -type eventFilter = {} - -@genType type eventFilters = Internal.noEventFilters - -let register = (): Internal.evmEventConfig => { - let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=[]) - { - getEventFiltersOrThrow, - filterByAddresses, - dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, - blockSchema: blockSchema->(Utils.magic: S.t => S.t), - transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), - convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), - id, - name, - contractName, - isWildcard: (handlerRegister->EventRegister.isWildcard), - handler: handlerRegister->EventRegister.getHandler, - contractRegister: handlerRegister->EventRegister.getContractRegister, - paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), - } -} -} - -module ProposalExecuted = { - -let id = "0x712ae1383f79ac853f8d882153778e0260ef8f03b504e2866e0593e04d2b291f_1" -let sighash = "0x712ae1383f79ac853f8d882153778e0260ef8f03b504e2866e0593e04d2b291f" -let name = "ProposalExecuted" -let contractName = contractName - -@genType -type eventArgs = {proposalId: bigint} -@genType -type block = Block.t -@genType -type transaction = Transaction.t - -@genType -type event = { - /** The parameters or arguments associated with this event. */ - params: eventArgs, - /** The unique identifier of the blockchain network where this event occurred. */ - chainId: chainId, - /** The address of the contract that emitted this event. */ - srcAddress: Address.t, - /** The index of this event's log within the block. */ - logIndex: int, - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - transaction: transaction, - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - block: block, -} - -@genType -type loaderArgs = Internal.genericLoaderArgs -@genType -type loader<'loaderReturn> = Internal.genericLoader -@genType -type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs -@genType -type handler<'loaderReturn> = Internal.genericHandler> -@genType -type contractRegister = Internal.genericContractRegister> - -let paramsRawEventSchema = S.object((s): eventArgs => {proposalId: s.field("proposalId", BigInt.schema)}) -let blockSchema = Block.schema -let transactionSchema = Transaction.schema - -let handlerRegister: EventRegister.t = EventRegister.make( - ~contractName, - ~eventName=name, -) - -@genType -type eventFilter = {} - -@genType type eventFilters = Internal.noEventFilters - -let register = (): Internal.evmEventConfig => { - let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=[]) - { - getEventFiltersOrThrow, - filterByAddresses, - dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, - blockSchema: blockSchema->(Utils.magic: S.t => S.t), - transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), - convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), - id, - name, - contractName, - isWildcard: (handlerRegister->EventRegister.isWildcard), - handler: handlerRegister->EventRegister.getHandler, - contractRegister: handlerRegister->EventRegister.getContractRegister, - paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), - } -} -} - -module ProposalQueued = { - -let id = "0x9a2e42fd6722813d69113e7d0079d3d940171428df7373df9c7f7617cfda2892_1" -let sighash = "0x9a2e42fd6722813d69113e7d0079d3d940171428df7373df9c7f7617cfda2892" -let name = "ProposalQueued" -let contractName = contractName - -@genType -type eventArgs = {proposalId: bigint, eta: bigint} -@genType -type block = Block.t -@genType -type transaction = Transaction.t - -@genType -type event = { - /** The parameters or arguments associated with this event. */ - params: eventArgs, - /** The unique identifier of the blockchain network where this event occurred. */ - chainId: chainId, - /** The address of the contract that emitted this event. */ - srcAddress: Address.t, - /** The index of this event's log within the block. */ - logIndex: int, - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - transaction: transaction, - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - block: block, -} - -@genType -type loaderArgs = Internal.genericLoaderArgs -@genType -type loader<'loaderReturn> = Internal.genericLoader -@genType -type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs -@genType -type handler<'loaderReturn> = Internal.genericHandler> -@genType -type contractRegister = Internal.genericContractRegister> - -let paramsRawEventSchema = S.object((s): eventArgs => {proposalId: s.field("proposalId", BigInt.schema), eta: s.field("eta", BigInt.schema)}) -let blockSchema = Block.schema -let transactionSchema = Transaction.schema - -let handlerRegister: EventRegister.t = EventRegister.make( - ~contractName, - ~eventName=name, -) - -@genType -type eventFilter = {} - -@genType type eventFilters = Internal.noEventFilters - -let register = (): Internal.evmEventConfig => { - let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=[]) - { - getEventFiltersOrThrow, - filterByAddresses, - dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, - blockSchema: blockSchema->(Utils.magic: S.t => S.t), - transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), - convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {proposalId: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, eta: decodedEvent.body->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), - id, - name, - contractName, - isWildcard: (handlerRegister->EventRegister.isWildcard), - handler: handlerRegister->EventRegister.getHandler, - contractRegister: handlerRegister->EventRegister.getContractRegister, - paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), - } -} -} -} - -module ENSToken = { -let abi = Ethers.makeAbi((%raw(`[{"type":"event","name":"DelegateChanged","inputs":[{"name":"delegator","type":"address","indexed":true},{"name":"fromDelegate","type":"address","indexed":true},{"name":"toDelegate","type":"address","indexed":true}],"anonymous":false},{"type":"event","name":"DelegateVotesChanged","inputs":[{"name":"delegate","type":"address","indexed":true},{"name":"previousBalance","type":"uint256","indexed":false},{"name":"newBalance","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"Transfer","inputs":[{"name":"from","type":"address","indexed":true},{"name":"to","type":"address","indexed":true},{"name":"value","type":"uint256","indexed":false}],"anonymous":false}]`): Js.Json.t)) -let eventSignatures = ["DelegateChanged(address indexed delegator, address indexed fromDelegate, address indexed toDelegate)", "DelegateVotesChanged(address indexed delegate, uint256 previousBalance, uint256 newBalance)", "Transfer(address indexed from, address indexed to, uint256 value)"] -@genType type chainId = [#1] -let contractName = "ENSToken" - -module Transfer = { - -let id = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef_3" -let sighash = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" -let name = "Transfer" -let contractName = contractName - -@genType -type eventArgs = {from: Address.t, to: Address.t, value: bigint} -@genType -type block = Block.t -@genType -type transaction = Transaction.t - -@genType -type event = { - /** The parameters or arguments associated with this event. */ - params: eventArgs, - /** The unique identifier of the blockchain network where this event occurred. */ - chainId: chainId, - /** The address of the contract that emitted this event. */ - srcAddress: Address.t, - /** The index of this event's log within the block. */ - logIndex: int, - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - transaction: transaction, - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - block: block, -} - -@genType -type loaderArgs = Internal.genericLoaderArgs -@genType -type loader<'loaderReturn> = Internal.genericLoader -@genType -type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs -@genType -type handler<'loaderReturn> = Internal.genericHandler> -@genType -type contractRegister = Internal.genericContractRegister> - -let paramsRawEventSchema = S.object((s): eventArgs => {from: s.field("from", Address.schema), to: s.field("to", Address.schema), value: s.field("value", BigInt.schema)}) -let blockSchema = Block.schema -let transactionSchema = Transaction.schema - -let handlerRegister: EventRegister.t = EventRegister.make( - ~contractName, - ~eventName=name, -) - -@genType -type eventFilter = {@as("from") from?: SingleOrMultiple.t, @as("to") to?: SingleOrMultiple.t} - -@genType type eventFiltersArgs = {/** The unique identifier of the blockchain network where this event occurred. */ chainId: chainId, /** Addresses of the contracts indexing the event. */ addresses: array} - -@genType @unboxed type eventFiltersDefinition = Single(eventFilter) | Multiple(array) - -@genType @unboxed type eventFilters = | ...eventFiltersDefinition | Dynamic(eventFiltersArgs => eventFiltersDefinition) - -let register = (): Internal.evmEventConfig => { - let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=["from","to",], ~topic1=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("from")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress)), ~topic2=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("to")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress))) - { - getEventFiltersOrThrow, - filterByAddresses, - dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, - blockSchema: blockSchema->(Utils.magic: S.t => S.t), - transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), - convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {from: decodedEvent.indexed->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, to: decodedEvent.indexed->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, value: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), - id, - name, - contractName, - isWildcard: (handlerRegister->EventRegister.isWildcard), - handler: handlerRegister->EventRegister.getHandler, - contractRegister: handlerRegister->EventRegister.getContractRegister, - paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), - } -} -} - -module DelegateChanged = { - -let id = "0x3134e8a2e6d97e929a7e54011ea5485d7d196dd5f0ba4d4ef95803e8e3fc257f_4" -let sighash = "0x3134e8a2e6d97e929a7e54011ea5485d7d196dd5f0ba4d4ef95803e8e3fc257f" -let name = "DelegateChanged" -let contractName = contractName - -@genType -type eventArgs = {delegator: Address.t, fromDelegate: Address.t, toDelegate: Address.t} -@genType -type block = Block.t -@genType -type transaction = Transaction.t - -@genType -type event = { - /** The parameters or arguments associated with this event. */ - params: eventArgs, - /** The unique identifier of the blockchain network where this event occurred. */ - chainId: chainId, - /** The address of the contract that emitted this event. */ - srcAddress: Address.t, - /** The index of this event's log within the block. */ - logIndex: int, - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - transaction: transaction, - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - block: block, -} - -@genType -type loaderArgs = Internal.genericLoaderArgs -@genType -type loader<'loaderReturn> = Internal.genericLoader -@genType -type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs -@genType -type handler<'loaderReturn> = Internal.genericHandler> -@genType -type contractRegister = Internal.genericContractRegister> - -let paramsRawEventSchema = S.object((s): eventArgs => {delegator: s.field("delegator", Address.schema), fromDelegate: s.field("fromDelegate", Address.schema), toDelegate: s.field("toDelegate", Address.schema)}) -let blockSchema = Block.schema -let transactionSchema = Transaction.schema - -let handlerRegister: EventRegister.t = EventRegister.make( - ~contractName, - ~eventName=name, -) - -@genType -type eventFilter = {@as("delegator") delegator?: SingleOrMultiple.t, @as("fromDelegate") fromDelegate?: SingleOrMultiple.t, @as("toDelegate") toDelegate?: SingleOrMultiple.t} - -@genType type eventFiltersArgs = {/** The unique identifier of the blockchain network where this event occurred. */ chainId: chainId, /** Addresses of the contracts indexing the event. */ addresses: array} - -@genType @unboxed type eventFiltersDefinition = Single(eventFilter) | Multiple(array) - -@genType @unboxed type eventFilters = | ...eventFiltersDefinition | Dynamic(eventFiltersArgs => eventFiltersDefinition) - -let register = (): Internal.evmEventConfig => { - let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=["delegator","fromDelegate","toDelegate",], ~topic1=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("delegator")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress)), ~topic2=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("fromDelegate")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress)), ~topic3=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("toDelegate")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress))) - { - getEventFiltersOrThrow, - filterByAddresses, - dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, - blockSchema: blockSchema->(Utils.magic: S.t => S.t), - transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), - convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {delegator: decodedEvent.indexed->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, fromDelegate: decodedEvent.indexed->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, toDelegate: decodedEvent.indexed->Js.Array2.unsafe_get(2)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), - id, - name, - contractName, - isWildcard: (handlerRegister->EventRegister.isWildcard), - handler: handlerRegister->EventRegister.getHandler, - contractRegister: handlerRegister->EventRegister.getContractRegister, - paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), - } -} -} - -module DelegateVotesChanged = { - -let id = "0xdec2bacdd2f05b59de34da9b523dff8be42e5e38e818c82fdb0bae774387a724_2" -let sighash = "0xdec2bacdd2f05b59de34da9b523dff8be42e5e38e818c82fdb0bae774387a724" -let name = "DelegateVotesChanged" -let contractName = contractName - -@genType -type eventArgs = {delegate: Address.t, previousBalance: bigint, newBalance: bigint} -@genType -type block = Block.t -@genType -type transaction = Transaction.t - -@genType -type event = { - /** The parameters or arguments associated with this event. */ - params: eventArgs, - /** The unique identifier of the blockchain network where this event occurred. */ - chainId: chainId, - /** The address of the contract that emitted this event. */ - srcAddress: Address.t, - /** The index of this event's log within the block. */ - logIndex: int, - /** The transaction that triggered this event. Configurable in `config.yaml` via the `field_selection` option. */ - transaction: transaction, - /** The block in which this event was recorded. Configurable in `config.yaml` via the `field_selection` option. */ - block: block, -} - -@genType -type loaderArgs = Internal.genericLoaderArgs -@genType -type loader<'loaderReturn> = Internal.genericLoader -@genType -type handlerArgs<'loaderReturn> = Internal.genericHandlerArgs -@genType -type handler<'loaderReturn> = Internal.genericHandler> -@genType -type contractRegister = Internal.genericContractRegister> - -let paramsRawEventSchema = S.object((s): eventArgs => {delegate: s.field("delegate", Address.schema), previousBalance: s.field("previousBalance", BigInt.schema), newBalance: s.field("newBalance", BigInt.schema)}) -let blockSchema = Block.schema -let transactionSchema = Transaction.schema - -let handlerRegister: EventRegister.t = EventRegister.make( - ~contractName, - ~eventName=name, -) - -@genType -type eventFilter = {@as("delegate") delegate?: SingleOrMultiple.t} - -@genType type eventFiltersArgs = {/** The unique identifier of the blockchain network where this event occurred. */ chainId: chainId, /** Addresses of the contracts indexing the event. */ addresses: array} - -@genType @unboxed type eventFiltersDefinition = Single(eventFilter) | Multiple(array) - -@genType @unboxed type eventFilters = | ...eventFiltersDefinition | Dynamic(eventFiltersArgs => eventFiltersDefinition) - -let register = (): Internal.evmEventConfig => { - let {getEventFiltersOrThrow, filterByAddresses} = LogSelection.parseEventFiltersOrThrow(~eventFilters=handlerRegister->EventRegister.getEventFilters, ~sighash, ~params=["delegate",], ~topic1=(_eventFilter) => _eventFilter->Utils.Dict.dangerouslyGetNonOption("delegate")->Belt.Option.mapWithDefault([], topicFilters => topicFilters->Obj.magic->SingleOrMultiple.normalizeOrThrow->Belt.Array.map(TopicFilter.fromAddress))) - { - getEventFiltersOrThrow, - filterByAddresses, - dependsOnAddresses: !(handlerRegister->EventRegister.isWildcard) || filterByAddresses, - blockSchema: blockSchema->(Utils.magic: S.t => S.t), - transactionSchema: transactionSchema->(Utils.magic: S.t => S.t), - convertHyperSyncEventArgs: (decodedEvent: HyperSyncClient.Decoder.decodedEvent) => {delegate: decodedEvent.indexed->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, previousBalance: decodedEvent.body->Js.Array2.unsafe_get(0)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, newBalance: decodedEvent.body->Js.Array2.unsafe_get(1)->HyperSyncClient.Decoder.toUnderlying->Utils.magic, }->(Utils.magic: eventArgs => Internal.eventParams), - id, - name, - contractName, - isWildcard: (handlerRegister->EventRegister.isWildcard), - handler: handlerRegister->EventRegister.getHandler, - contractRegister: handlerRegister->EventRegister.getContractRegister, - paramsRawEventSchema: paramsRawEventSchema->(Utils.magic: S.t => S.t), - } -} -} -} - -@genType -type chainId = int - -@genType -type chain = [#1] diff --git a/apps/hypersync-indexer/generated/src/Types.ts b/apps/hypersync-indexer/generated/src/Types.ts deleted file mode 100644 index 232ca6123..000000000 --- a/apps/hypersync-indexer/generated/src/Types.ts +++ /dev/null @@ -1,832 +0,0 @@ -// This file is to dynamically generate TS types -// which we can't get using GenType -// Use @genType.import to link the types back to ReScript code - -import type { Logger, EffectCaller } from "envio"; -import type * as Entities from "./db/Entities.gen.ts"; - -export type LoaderContext = { - /** - * Access the logger instance with event as a context. The logs will be displayed in the console and Envio Hosted Service. - */ - readonly log: Logger; - /** - * Call the provided Effect with the given input. - * Effects are the best for external calls with automatic deduplication, error handling and caching. - * Define a new Effect using createEffect outside of the handler. - */ - readonly effect: EffectCaller; - /** - * True when the handlers run in preload mode - in parallel for the whole batch. - * Handlers run twice per batch of events, and the first time is the "preload" run - * During preload entities aren't set, logs are ignored and exceptions are silently swallowed. - * Preload mode is the best time to populate data to in-memory cache. - * After preload the handler will run for the second time in sequential order of events. - */ - readonly isPreload: boolean; - /** - * Per-chain state information accessible in event handlers and block handlers. - * Each chain ID maps to an object containing chain-specific state: - * - isReady: true when the chain has completed initial sync and is processing live events, - * false during historical synchronization - */ - readonly chains: { - [chainId: string]: { - readonly isReady: boolean; - }; - }; - readonly Account: { - /** - * Load the entity Account from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Account from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.Account_indexedFieldOperations, - /** - * Returns the entity Account from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Account_t) => Promise, - /** - * Set the entity Account in the storage. - */ - readonly set: (entity: Entities.Account_t) => void, - /** - * Delete the entity Account from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly AccountBalance: { - /** - * Load the entity AccountBalance from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity AccountBalance from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.AccountBalance_indexedFieldOperations, - /** - * Returns the entity AccountBalance from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.AccountBalance_t) => Promise, - /** - * Set the entity AccountBalance in the storage. - */ - readonly set: (entity: Entities.AccountBalance_t) => void, - /** - * Delete the entity AccountBalance from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly AccountPower: { - /** - * Load the entity AccountPower from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity AccountPower from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.AccountPower_indexedFieldOperations, - /** - * Returns the entity AccountPower from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.AccountPower_t) => Promise, - /** - * Set the entity AccountPower in the storage. - */ - readonly set: (entity: Entities.AccountPower_t) => void, - /** - * Delete the entity AccountPower from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly BalanceHistory: { - /** - * Load the entity BalanceHistory from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity BalanceHistory from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.BalanceHistory_indexedFieldOperations, - /** - * Returns the entity BalanceHistory from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.BalanceHistory_t) => Promise, - /** - * Set the entity BalanceHistory in the storage. - */ - readonly set: (entity: Entities.BalanceHistory_t) => void, - /** - * Delete the entity BalanceHistory from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly DaoMetricsDayBucket: { - /** - * Load the entity DaoMetricsDayBucket from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity DaoMetricsDayBucket from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.DaoMetricsDayBucket_indexedFieldOperations, - /** - * Returns the entity DaoMetricsDayBucket from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.DaoMetricsDayBucket_t) => Promise, - /** - * Set the entity DaoMetricsDayBucket in the storage. - */ - readonly set: (entity: Entities.DaoMetricsDayBucket_t) => void, - /** - * Delete the entity DaoMetricsDayBucket from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly Delegation: { - /** - * Load the entity Delegation from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Delegation from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.Delegation_indexedFieldOperations, - /** - * Returns the entity Delegation from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Delegation_t) => Promise, - /** - * Set the entity Delegation in the storage. - */ - readonly set: (entity: Entities.Delegation_t) => void, - /** - * Delete the entity Delegation from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly FeedEvent: { - /** - * Load the entity FeedEvent from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity FeedEvent from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.FeedEvent_indexedFieldOperations, - /** - * Returns the entity FeedEvent from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.FeedEvent_t) => Promise, - /** - * Set the entity FeedEvent in the storage. - */ - readonly set: (entity: Entities.FeedEvent_t) => void, - /** - * Delete the entity FeedEvent from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly ProposalOnchain: { - /** - * Load the entity ProposalOnchain from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity ProposalOnchain from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.ProposalOnchain_indexedFieldOperations, - /** - * Returns the entity ProposalOnchain from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.ProposalOnchain_t) => Promise, - /** - * Set the entity ProposalOnchain in the storage. - */ - readonly set: (entity: Entities.ProposalOnchain_t) => void, - /** - * Delete the entity ProposalOnchain from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly Token: { - /** - * Load the entity Token from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Token from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.Token_indexedFieldOperations, - /** - * Returns the entity Token from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Token_t) => Promise, - /** - * Set the entity Token in the storage. - */ - readonly set: (entity: Entities.Token_t) => void, - /** - * Delete the entity Token from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly TokenPrice: { - /** - * Load the entity TokenPrice from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity TokenPrice from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.TokenPrice_indexedFieldOperations, - /** - * Returns the entity TokenPrice from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.TokenPrice_t) => Promise, - /** - * Set the entity TokenPrice in the storage. - */ - readonly set: (entity: Entities.TokenPrice_t) => void, - /** - * Delete the entity TokenPrice from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly Transaction: { - /** - * Load the entity Transaction from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Transaction from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.Transaction_indexedFieldOperations, - /** - * Returns the entity Transaction from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Transaction_t) => Promise, - /** - * Set the entity Transaction in the storage. - */ - readonly set: (entity: Entities.Transaction_t) => void, - /** - * Delete the entity Transaction from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly Transfer: { - /** - * Load the entity Transfer from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Transfer from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.Transfer_indexedFieldOperations, - /** - * Returns the entity Transfer from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Transfer_t) => Promise, - /** - * Set the entity Transfer in the storage. - */ - readonly set: (entity: Entities.Transfer_t) => void, - /** - * Delete the entity Transfer from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly VoteOnchain: { - /** - * Load the entity VoteOnchain from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity VoteOnchain from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.VoteOnchain_indexedFieldOperations, - /** - * Returns the entity VoteOnchain from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.VoteOnchain_t) => Promise, - /** - * Set the entity VoteOnchain in the storage. - */ - readonly set: (entity: Entities.VoteOnchain_t) => void, - /** - * Delete the entity VoteOnchain from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly VotingPowerHistory: { - /** - * Load the entity VotingPowerHistory from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity VotingPowerHistory from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - readonly getWhere: Entities.VotingPowerHistory_indexedFieldOperations, - /** - * Returns the entity VotingPowerHistory from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.VotingPowerHistory_t) => Promise, - /** - * Set the entity VotingPowerHistory in the storage. - */ - readonly set: (entity: Entities.VotingPowerHistory_t) => void, - /** - * Delete the entity VotingPowerHistory from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } -}; - -export type HandlerContext = { - /** - * Access the logger instance with event as a context. The logs will be displayed in the console and Envio Hosted Service. - */ - readonly log: Logger; - /** - * Call the provided Effect with the given input. - * Effects are the best for external calls with automatic deduplication, error handling and caching. - * Define a new Effect using createEffect outside of the handler. - */ - readonly effect: EffectCaller; - /** - * Per-chain state information accessible in event handlers and block handlers. - * Each chain ID maps to an object containing chain-specific state: - * - isReady: true when the chain has completed initial sync and is processing live events, - * false during historical synchronization - */ - readonly chains: { - [chainId: string]: { - readonly isReady: boolean; - }; - }; - readonly Account: { - /** - * Load the entity Account from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Account from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity Account from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Account_t) => Promise, - /** - * Set the entity Account in the storage. - */ - readonly set: (entity: Entities.Account_t) => void, - /** - * Delete the entity Account from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly AccountBalance: { - /** - * Load the entity AccountBalance from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity AccountBalance from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity AccountBalance from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.AccountBalance_t) => Promise, - /** - * Set the entity AccountBalance in the storage. - */ - readonly set: (entity: Entities.AccountBalance_t) => void, - /** - * Delete the entity AccountBalance from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly AccountPower: { - /** - * Load the entity AccountPower from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity AccountPower from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity AccountPower from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.AccountPower_t) => Promise, - /** - * Set the entity AccountPower in the storage. - */ - readonly set: (entity: Entities.AccountPower_t) => void, - /** - * Delete the entity AccountPower from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly BalanceHistory: { - /** - * Load the entity BalanceHistory from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity BalanceHistory from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity BalanceHistory from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.BalanceHistory_t) => Promise, - /** - * Set the entity BalanceHistory in the storage. - */ - readonly set: (entity: Entities.BalanceHistory_t) => void, - /** - * Delete the entity BalanceHistory from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly DaoMetricsDayBucket: { - /** - * Load the entity DaoMetricsDayBucket from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity DaoMetricsDayBucket from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity DaoMetricsDayBucket from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.DaoMetricsDayBucket_t) => Promise, - /** - * Set the entity DaoMetricsDayBucket in the storage. - */ - readonly set: (entity: Entities.DaoMetricsDayBucket_t) => void, - /** - * Delete the entity DaoMetricsDayBucket from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly Delegation: { - /** - * Load the entity Delegation from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Delegation from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity Delegation from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Delegation_t) => Promise, - /** - * Set the entity Delegation in the storage. - */ - readonly set: (entity: Entities.Delegation_t) => void, - /** - * Delete the entity Delegation from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly FeedEvent: { - /** - * Load the entity FeedEvent from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity FeedEvent from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity FeedEvent from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.FeedEvent_t) => Promise, - /** - * Set the entity FeedEvent in the storage. - */ - readonly set: (entity: Entities.FeedEvent_t) => void, - /** - * Delete the entity FeedEvent from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly ProposalOnchain: { - /** - * Load the entity ProposalOnchain from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity ProposalOnchain from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity ProposalOnchain from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.ProposalOnchain_t) => Promise, - /** - * Set the entity ProposalOnchain in the storage. - */ - readonly set: (entity: Entities.ProposalOnchain_t) => void, - /** - * Delete the entity ProposalOnchain from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly Token: { - /** - * Load the entity Token from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Token from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity Token from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Token_t) => Promise, - /** - * Set the entity Token in the storage. - */ - readonly set: (entity: Entities.Token_t) => void, - /** - * Delete the entity Token from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly TokenPrice: { - /** - * Load the entity TokenPrice from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity TokenPrice from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity TokenPrice from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.TokenPrice_t) => Promise, - /** - * Set the entity TokenPrice in the storage. - */ - readonly set: (entity: Entities.TokenPrice_t) => void, - /** - * Delete the entity TokenPrice from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly Transaction: { - /** - * Load the entity Transaction from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Transaction from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity Transaction from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Transaction_t) => Promise, - /** - * Set the entity Transaction in the storage. - */ - readonly set: (entity: Entities.Transaction_t) => void, - /** - * Delete the entity Transaction from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly Transfer: { - /** - * Load the entity Transfer from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity Transfer from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity Transfer from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.Transfer_t) => Promise, - /** - * Set the entity Transfer in the storage. - */ - readonly set: (entity: Entities.Transfer_t) => void, - /** - * Delete the entity Transfer from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly VoteOnchain: { - /** - * Load the entity VoteOnchain from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity VoteOnchain from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity VoteOnchain from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.VoteOnchain_t) => Promise, - /** - * Set the entity VoteOnchain in the storage. - */ - readonly set: (entity: Entities.VoteOnchain_t) => void, - /** - * Delete the entity VoteOnchain from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } - readonly VotingPowerHistory: { - /** - * Load the entity VotingPowerHistory from the storage by ID. - * If the entity is not found, returns undefined. - */ - readonly get: (id: string) => Promise, - /** - * Load the entity VotingPowerHistory from the storage by ID. - * If the entity is not found, throws an error. - */ - readonly getOrThrow: (id: string, message?: string) => Promise, - /** - * Returns the entity VotingPowerHistory from the storage by ID. - * If the entity is not found, creates it using provided parameters and returns it. - */ - readonly getOrCreate: (entity: Entities.VotingPowerHistory_t) => Promise, - /** - * Set the entity VotingPowerHistory in the storage. - */ - readonly set: (entity: Entities.VotingPowerHistory_t) => void, - /** - * Delete the entity VotingPowerHistory from the storage. - * - * The 'deleteUnsafe' method is experimental and unsafe. You should manually handle all entity references after deletion to maintain database consistency. - */ - readonly deleteUnsafe: (id: string) => void, - } -}; diff --git a/apps/hypersync-indexer/generated/src/UserContext.res b/apps/hypersync-indexer/generated/src/UserContext.res deleted file mode 100644 index 3101e4e18..000000000 --- a/apps/hypersync-indexer/generated/src/UserContext.res +++ /dev/null @@ -1,360 +0,0 @@ -let codegenHelpMessage = `Rerun 'pnpm dev' to update generated code after schema.graphql changes.` - -type contextParams = { - item: Internal.item, - checkpointId: int, - inMemoryStore: InMemoryStore.t, - loadManager: LoadManager.t, - persistence: Persistence.t, - isPreload: bool, - shouldSaveHistory: bool, - chains: Internal.chains, - mutable isResolved: bool, -} - -// We don't want to expose the params to the user -// so instead of storing _params on the context object, -// we use an external WeakMap -let paramsByThis: Utils.WeakMap.t = Utils.WeakMap.make() - -let effectContextPrototype = %raw(`Object.create(null)`) -Utils.Object.defineProperty( - effectContextPrototype, - "log", - { - get: () => { - (paramsByThis->Utils.WeakMap.unsafeGet(%raw(`this`))).item->Logging.getUserLogger - }, - }, -) -%%raw(` -var EffectContext = function(params, defaultShouldCache, callEffect) { - paramsByThis.set(this, params); - this.effect = callEffect; - this.cache = defaultShouldCache; -}; -EffectContext.prototype = effectContextPrototype; -`) - -@new -external makeEffectContext: ( - contextParams, - ~defaultShouldCache: bool, - ~callEffect: (Internal.effect, Internal.effectInput) => promise, -) => Internal.effectContext = "EffectContext" - -let initEffect = (params: contextParams) => { - let rec callEffect = (effect: Internal.effect, input: Internal.effectInput) => { - let effectContext = makeEffectContext( - params, - ~defaultShouldCache=effect.defaultShouldCache, - ~callEffect, - ) - let effectArgs: Internal.effectArgs = { - input, - context: effectContext, - cacheKey: input->S.reverseConvertOrThrow(effect.input)->Utils.Hash.makeOrThrow, - } - LoadLayer.loadEffect( - ~loadManager=params.loadManager, - ~persistence=params.persistence, - ~effect, - ~effectArgs, - ~inMemoryStore=params.inMemoryStore, - ~shouldGroup=params.isPreload, - ~item=params.item, - ) - } - callEffect -} - -type entityContextParams = { - ...contextParams, - entityConfig: Internal.entityConfig, -} - -let getWhereTraps: Utils.Proxy.traps = { - get: (~target as params, ~prop: unknown) => { - let entityConfig = params.entityConfig - if prop->Js.typeof !== "string" { - Js.Exn.raiseError( - `Invalid context.${entityConfig.name}.getWhere access by a non-string property.`, - ) - } else { - let dbFieldName = prop->(Utils.magic: unknown => string) - switch entityConfig.table->Table.getFieldByDbName(dbFieldName) { - | None => - Js.Exn.raiseError( - `Invalid context.${entityConfig.name}.getWhere.${dbFieldName} - the field doesn't exist. ${codegenHelpMessage}`, - ) - | Some(field) => - let fieldValueSchema = switch field { - | Field({fieldSchema}) => fieldSchema - | DerivedFrom(_) => S.string->S.toUnknown - } - { - Entities.eq: fieldValue => - LoadLayer.loadByField( - ~loadManager=params.loadManager, - ~persistence=params.persistence, - ~operator=Eq, - ~entityConfig, - ~fieldName=dbFieldName, - ~fieldValueSchema, - ~inMemoryStore=params.inMemoryStore, - ~shouldGroup=params.isPreload, - ~item=params.item, - ~fieldValue, - ), - gt: fieldValue => - LoadLayer.loadByField( - ~loadManager=params.loadManager, - ~persistence=params.persistence, - ~operator=Gt, - ~entityConfig, - ~fieldName=dbFieldName, - ~fieldValueSchema, - ~inMemoryStore=params.inMemoryStore, - ~shouldGroup=params.isPreload, - ~item=params.item, - ~fieldValue, - ), - lt: fieldValue => - LoadLayer.loadByField( - ~loadManager=params.loadManager, - ~persistence=params.persistence, - ~operator=Lt, - ~entityConfig, - ~fieldName=dbFieldName, - ~fieldValueSchema, - ~inMemoryStore=params.inMemoryStore, - ~shouldGroup=params.isPreload, - ~item=params.item, - ~fieldValue, - ), - }->Utils.magic - } - } - }, -} - -let noopSet = (_entity: Internal.entity) => () -let noopDeleteUnsafe = (_entityId: string) => () - -let entityTraps: Utils.Proxy.traps = { - get: (~target as params, ~prop: unknown) => { - let prop = prop->(Utils.magic: unknown => string) - - let set = params.isPreload - ? noopSet - : (entity: Internal.entity) => { - params.inMemoryStore - ->InMemoryStore.getInMemTable(~entityConfig=params.entityConfig) - ->InMemoryTable.Entity.set( - { - entityId: entity.id, - checkpointId: params.checkpointId, - entityUpdateAction: Set(entity), - }, - ~shouldSaveHistory=params.shouldSaveHistory, - ) - } - - switch prop { - | "get" => - ( - entityId => - LoadLayer.loadById( - ~loadManager=params.loadManager, - ~persistence=params.persistence, - ~entityConfig=params.entityConfig, - ~inMemoryStore=params.inMemoryStore, - ~shouldGroup=params.isPreload, - ~item=params.item, - ~entityId, - ) - )->Utils.magic - | "getWhere" => params->Utils.Proxy.make(getWhereTraps)->Utils.magic - | "getOrThrow" => - ( - (entityId, ~message=?) => - LoadLayer.loadById( - ~loadManager=params.loadManager, - ~persistence=params.persistence, - ~entityConfig=params.entityConfig, - ~inMemoryStore=params.inMemoryStore, - ~shouldGroup=params.isPreload, - ~item=params.item, - ~entityId, - )->Promise.thenResolve(entity => { - switch entity { - | Some(entity) => entity - | None => - Js.Exn.raiseError( - message->Belt.Option.getWithDefault( - `Entity '${params.entityConfig.name}' with ID '${entityId}' is expected to exist.`, - ), - ) - } - }) - )->Utils.magic - | "getOrCreate" => - ( - (entity: Internal.entity) => - LoadLayer.loadById( - ~loadManager=params.loadManager, - ~persistence=params.persistence, - ~entityConfig=params.entityConfig, - ~inMemoryStore=params.inMemoryStore, - ~shouldGroup=params.isPreload, - ~item=params.item, - ~entityId=entity.id, - )->Promise.thenResolve(storageEntity => { - switch storageEntity { - | Some(entity) => entity - | None => { - set(entity) - entity - } - } - }) - )->Utils.magic - | "set" => set->Utils.magic - | "deleteUnsafe" => - if params.isPreload { - noopDeleteUnsafe - } else { - entityId => { - params.inMemoryStore - ->InMemoryStore.getInMemTable(~entityConfig=params.entityConfig) - ->InMemoryTable.Entity.set( - { - entityId, - checkpointId: params.checkpointId, - entityUpdateAction: Delete, - }, - ~shouldSaveHistory=params.shouldSaveHistory, - ) - } - }->Utils.magic - | _ => Js.Exn.raiseError(`Invalid context.${params.entityConfig.name}.${prop} operation.`) - } - }, -} - -let handlerTraps: Utils.Proxy.traps = { - get: (~target as params, ~prop: unknown) => { - let prop = prop->(Utils.magic: unknown => string) - if params.isResolved { - Utils.Error.make( - `Impossible to access context.${prop} after the handler is resolved. Make sure you didn't miss an await in the handler.`, - )->ErrorHandling.mkLogAndRaise(~logger=params.item->Logging.getItemLogger) - } - switch prop { - | "log" => - (params.isPreload ? Logging.noopLogger : params.item->Logging.getUserLogger)->Utils.magic - | "effect" => - initEffect((params :> contextParams))->( - Utils.magic: ( - (Internal.effect, Internal.effectInput) => promise - ) => unknown - ) - - | "isPreload" => params.isPreload->Utils.magic - | "chains" => params.chains->Utils.magic - | _ => - switch Entities.byName->Utils.Dict.dangerouslyGetNonOption(prop) { - | Some(entityConfig) => - { - item: params.item, - isPreload: params.isPreload, - inMemoryStore: params.inMemoryStore, - loadManager: params.loadManager, - persistence: params.persistence, - shouldSaveHistory: params.shouldSaveHistory, - checkpointId: params.checkpointId, - chains: params.chains, - isResolved: params.isResolved, - entityConfig, - } - ->Utils.Proxy.make(entityTraps) - ->Utils.magic - | None => - Js.Exn.raiseError(`Invalid context access by '${prop}' property. ${codegenHelpMessage}`) - } - } - }, -} - -let getHandlerContext = (params: contextParams): Internal.handlerContext => { - params->Utils.Proxy.make(handlerTraps)->Utils.magic -} - -// Contract register context creation -type contractRegisterParams = { - item: Internal.item, - onRegister: ( - ~item: Internal.item, - ~contractAddress: Address.t, - ~contractName: Enums.ContractType.t, - ) => unit, - config: Config.t, - mutable isResolved: bool, -} - -let contractRegisterTraps: Utils.Proxy.traps = { - get: (~target as params, ~prop: unknown) => { - let prop = prop->(Utils.magic: unknown => string) - if params.isResolved { - Utils.Error.make( - `Impossible to access context.${prop} after the contract register is resolved. Make sure you didn't miss an await in the handler.`, - )->ErrorHandling.mkLogAndRaise(~logger=params.item->Logging.getItemLogger) - } - switch prop { - | "log" => params.item->Logging.getUserLogger->Utils.magic - | _ => - // Use the pre-built mapping for efficient lookup - switch params.config.addContractNameToContractNameMapping->Utils.Dict.dangerouslyGetNonOption( - prop, - ) { - | Some(contractName) => { - let addFunction = (contractAddress: Address.t) => { - let validatedAddress = if params.config.ecosystem === Evm { - // The value is passed from the user-land, - // so we need to validate and checksum/lowercase the address. - if params.config.lowercaseAddresses { - contractAddress->Address.Evm.fromAddressLowercaseOrThrow - } else { - contractAddress->Address.Evm.fromAddressOrThrow - } - } else { - // TODO: Ideally we should do the same for other ecosystems - contractAddress - } - - params.onRegister( - ~item=params.item, - ~contractAddress=validatedAddress, - ~contractName=contractName->(Utils.magic: string => Enums.ContractType.t), - ) - } - - addFunction->Utils.magic - } - | None => - Js.Exn.raiseError(`Invalid context access by '${prop}' property. ${codegenHelpMessage}`) - } - } - }, -} - -let getContractRegisterContext = (params: contractRegisterParams) => { - params - ->Utils.Proxy.make(contractRegisterTraps) - ->Utils.magic -} - -let getContractRegisterArgs = (params: contractRegisterParams): Internal.contractRegisterArgs => { - event: (params.item->Internal.castUnsafeEventItem).event, - context: getContractRegisterContext(params), -} diff --git a/apps/hypersync-indexer/generated/src/bindings/Dotenv.res b/apps/hypersync-indexer/generated/src/bindings/Dotenv.res deleted file mode 100644 index dffee86dc..000000000 --- a/apps/hypersync-indexer/generated/src/bindings/Dotenv.res +++ /dev/null @@ -1,17 +0,0 @@ -type config = {path?: string} -type envRes - -@module("dotenv") external config: config => envRes = "config" - -module Utils = { - type require = {resolve: string => string} - external require: require = "require" - - let getEnvFilePath = () => - switch require.resolve(`../../${Path.relativePathToRootFromGenerated}/.env`) { - | path => Some(path) - | exception _exn => None - } -} - -let initialize = () => config({path: ?Utils.getEnvFilePath()})->ignore diff --git a/apps/hypersync-indexer/generated/src/bindings/Ethers.gen.ts b/apps/hypersync-indexer/generated/src/bindings/Ethers.gen.ts deleted file mode 100644 index abe963235..000000000 --- a/apps/hypersync-indexer/generated/src/bindings/Ethers.gen.ts +++ /dev/null @@ -1,15 +0,0 @@ -/* -Reexport the types to keep backward compatibility -*/ - -/* eslint-disable */ -/* tslint:disable */ - -import type { t as Address_t } from "envio/src/Address.gen"; -export type { - Addresses_mockAddresses, - Addresses_defaultAddress, - Addresses, -} from "envio/src/bindings/Ethers.gen"; - -export type ethAddress = Address_t; diff --git a/apps/hypersync-indexer/generated/src/bindings/OpaqueTypes.ts b/apps/hypersync-indexer/generated/src/bindings/OpaqueTypes.ts deleted file mode 100644 index 285ec704a..000000000 --- a/apps/hypersync-indexer/generated/src/bindings/OpaqueTypes.ts +++ /dev/null @@ -1,5 +0,0 @@ -export type EthersAddress = string; -export type Address = string; -export type Nullable = null | T; -export type SingleOrMultiple = T | T[]; -export type HandlerWithOptions = (fn: Fn, opt?: Opts) => void; diff --git a/apps/hypersync-indexer/generated/src/bindings/RescriptMocha.res b/apps/hypersync-indexer/generated/src/bindings/RescriptMocha.res deleted file mode 100644 index 52f857d5e..000000000 --- a/apps/hypersync-indexer/generated/src/bindings/RescriptMocha.res +++ /dev/null @@ -1,123 +0,0 @@ -module Assert = { - type assertion<'a> = ('a, 'a, ~message: string=?) => unit - - @module("assert") external equal: assertion<'a> = "equal" - @module("assert") external notEqual: assertion<'a> = "notEqual" - - @module("assert") external deepEqual: assertion<'a> = "deepEqual" - @module("assert") - external notDeepEqual: assertion<'a> = "notDeepEqual" - - @module("assert") external strictEqual: assertion<'a> = "strictEqual" - @module("assert") - external notStrictEqual: assertion<'a> = "notStrictEqual" - - @module("assert") - external deepStrictEqual: assertion<'a> = "deepStrictEqual" - @module("assert") - external notDeepStrictEqual: assertion<'a> = "notDeepStrictEqual" - - @module("assert") external ifError: 'a => unit = "ifError" - - @module("assert") - external throws: (unit => 'a, ~error: 'error=?, ~message: string=?) => unit = "throws" - @module("assert") - external doesNotThrow: (unit => 'a, ~error: 'error=?, ~message: string=?) => unit = "doesNotThrow" - - @module("assert") - external rejects: (unit => promise<'a>, ~error: 'error=?, ~message: string=?) => promise = - "rejects" - - @module("assert") external ok: (bool, ~message: string=?) => unit = "ok" - @module("assert") external fail: string => 'a = "fail" -} - -/* Mocha bindings on `this` for `describe` and `it` functions */ -module This = { - @val external timeout: int => unit = "this.timeout" - @val external retries: int => unit = "this.retries" - @val external slow: int => unit = "this.slow" - @val external skip: unit => unit = "this.skip" -} - -@val -external describe: (string, unit => unit) => unit = "describe" -@val -external describe_only: (string, unit => unit) => unit = "describe.only" -@val -external describe_skip: (string, unit => unit) => unit = "describe.skip" - -@val -external it: (string, unit => unit) => unit = "it" -@val -external it_only: (string, unit => unit) => unit = "it.only" -@val -external it_skip: (string, unit => unit) => unit = "it.skip" -@val -external before: (unit => unit) => unit = "before" -@val -external after: (unit => unit) => unit = "after" -@val -external beforeEach: (unit => unit) => unit = "beforeEach" -@val -external afterEach: (unit => unit) => unit = "afterEach" -@val -external beforeWithTitle: (string, unit => unit) => unit = "before" -@val -external afterWithTitle: (string, unit => unit) => unit = "after" -@val -external beforeEachWithTitle: (string, unit => unit) => unit = "beforeEach" -@val -external afterEachWithTitle: (string, unit => unit) => unit = "afterEach" - -module Async = { - @val - external it: (string, unit => promise) => unit = "it" - @val - external it_only: (string, unit => promise) => unit = "it.only" - @val - external it_skip: (string, unit => promise) => unit = "it.skip" - @val - external before: (unit => promise) => unit = "before" - @val - external after: (unit => promise) => unit = "after" - @val - external beforeEach: (unit => promise) => unit = "beforeEach" - @val - external afterEach: (unit => promise) => unit = "afterEach" - @val - external beforeWithTitle: (string, unit => promise) => unit = "before" - @val - external afterWithTitle: (string, unit => promise) => unit = "after" - @val - external beforeEachWithTitle: (string, unit => promise) => unit = "beforeEach" - @val - external afterEachWithTitle: (string, unit => promise) => unit = "afterEach" -} - -module DoneCallback = { - type doneCallback = Js.Nullable.t => unit - - @val - external it: (string, doneCallback => unit) => unit = "it" - @val - external it_only: (string, doneCallback => unit) => unit = "it.only" - @val - external it_skip: (string, doneCallback => unit) => unit = "it.skip" - @val - external before: (doneCallback => unit) => unit = "before" - @val - external after: (doneCallback => unit) => unit = "after" - @val - external beforeEach: (doneCallback => unit) => unit = "beforeEach" - @val - external afterEach: (doneCallback => unit) => unit = "afterEach" - @val - external beforeWithTitle: (string, doneCallback => unit) => unit = "before" - @val - external afterWithTitle: (string, doneCallback => unit) => unit = "after" - @val - external beforeEachWithTitle: (string, doneCallback => unit) => unit = "beforeEach" - @val - external afterEachWithTitle: (string, doneCallback => unit) => unit = "afterEach" -} diff --git a/apps/hypersync-indexer/generated/src/bindings/Yargs.res b/apps/hypersync-indexer/generated/src/bindings/Yargs.res deleted file mode 100644 index 2df5064ac..000000000 --- a/apps/hypersync-indexer/generated/src/bindings/Yargs.res +++ /dev/null @@ -1,8 +0,0 @@ -type arg = string - -type parsedArgs<'a> = 'a - -@module external yargs: array => parsedArgs<'a> = "yargs/yargs" -@module("yargs/helpers") external hideBin: array => array = "hideBin" - -@get external argv: parsedArgs<'a> => 'a = "argv" diff --git a/apps/hypersync-indexer/generated/src/db/Db.res b/apps/hypersync-indexer/generated/src/db/Db.res deleted file mode 100644 index 571ee1a05..000000000 --- a/apps/hypersync-indexer/generated/src/db/Db.res +++ /dev/null @@ -1,30 +0,0 @@ -// This is a module with all the global configuration of the DB -// Ideally it should be moved to the config and passed with it - -let makeClient = () => { - Postgres.makeSql( - ~config={ - host: Env.Db.host, - port: Env.Db.port, - username: Env.Db.user, - password: Env.Db.password, - database: Env.Db.database, - ssl: Env.Db.ssl, - // TODO: think how we want to pipe these logs to pino. - onnotice: ?( - Env.userLogLevel == #warn || Env.userLogLevel == #error ? None : Some(_str => ()) - ), - transform: {undefined: Null}, - max: 2, - // debug: (~connection, ~query, ~params as _, ~types as _) => Js.log2(connection, query), - }, - ) -} - -let publicSchema = Env.Db.publicSchema - -let allEntityTables: array = Entities.allEntities->Belt.Array.map(entityConfig => { - entityConfig.table -}) - -let schema = Schema.make(allEntityTables) diff --git a/apps/hypersync-indexer/generated/src/db/DbFunctions.res b/apps/hypersync-indexer/generated/src/db/DbFunctions.res deleted file mode 100644 index f4d6ab967..000000000 --- a/apps/hypersync-indexer/generated/src/db/DbFunctions.res +++ /dev/null @@ -1,33 +0,0 @@ -module General = { - type existsRes = {exists: bool} - - let hasRows = async (sql, ~table: Table.table) => { - let query = `SELECT EXISTS(SELECT 1 FROM "${Env.Db.publicSchema}"."${table.tableName}");` - switch await sql->Postgres.unsafe(query) { - | [{exists}] => exists - | _ => Js.Exn.raiseError("Unexpected result from hasRows query: " ++ query) - } - } -} - -module EntityHistory = { - let hasRows = async sql => { - let all = - await Entities.allEntities - ->Belt.Array.map(async entityConfig => { - try await General.hasRows(sql, ~table=entityConfig.entityHistory.table) catch { - | exn => - exn->ErrorHandling.mkLogAndRaise( - ~msg=`Failed to check if entity history table has rows`, - ~logger=Logging.createChild( - ~params={ - "entityName": entityConfig.name, - }, - ), - ) - } - }) - ->Promise.all - all->Belt.Array.some(v => v) - } -} diff --git a/apps/hypersync-indexer/generated/src/db/DbFunctionsEntities.res b/apps/hypersync-indexer/generated/src/db/DbFunctionsEntities.res deleted file mode 100644 index f4b1564e2..000000000 --- a/apps/hypersync-indexer/generated/src/db/DbFunctionsEntities.res +++ /dev/null @@ -1,22 +0,0 @@ -type id = string - -@module("./DbFunctionsImplementation.js") -external batchDeleteItemsInTable: ( - ~table: Table.table, - ~sql: Postgres.sql, - ~ids: array, -) => promise = "batchDeleteItemsInTable" - -let makeBatchDelete = (~table) => async (~logger=?, sql, ids) => - switch await batchDeleteItemsInTable(~table, ~sql, ~ids) { - | exception exn => - exn->ErrorHandling.mkLogAndRaise( - ~logger?, - ~msg=`Failed during batch delete of entity ${table.tableName}`, - ) - | res => res - } - -let batchDelete = (~entityConfig: Internal.entityConfig) => { - makeBatchDelete(~table=entityConfig.table) -} \ No newline at end of file diff --git a/apps/hypersync-indexer/generated/src/db/DbFunctionsImplementation.js b/apps/hypersync-indexer/generated/src/db/DbFunctionsImplementation.js deleted file mode 100644 index d36a66914..000000000 --- a/apps/hypersync-indexer/generated/src/db/DbFunctionsImplementation.js +++ /dev/null @@ -1,17 +0,0 @@ -const TableModule = require("envio/src/db/Table.res.js"); -const { publicSchema } = require("./Db.res.js"); - -module.exports.batchDeleteItemsInTable = (table, sql, pkArray) => { - const primaryKeyFieldNames = TableModule.getPrimaryKeyFieldNames(table); - - if (primaryKeyFieldNames.length === 1) { - return sql` - DELETE - FROM ${sql(publicSchema)}.${sql(table.tableName)} - WHERE ${sql(primaryKeyFieldNames[0])} IN ${sql(pkArray)}; - `; - } else { - //TODO, if needed create a delete query for multiple field matches - //May be best to make pkArray an array of objects with fieldName -> value - } -}; diff --git a/apps/hypersync-indexer/generated/src/db/Entities.gen.ts b/apps/hypersync-indexer/generated/src/db/Entities.gen.ts deleted file mode 100644 index 9625ebee0..000000000 --- a/apps/hypersync-indexer/generated/src/db/Entities.gen.ts +++ /dev/null @@ -1,233 +0,0 @@ -/* TypeScript file generated from Entities.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -import type {EventType_t as Enums_EventType_t} from './Enums.gen'; - -import type {Json_t as Js_Json_t} from '../../src/Js.shim'; - -import type {MetricType_t as Enums_MetricType_t} from './Enums.gen'; - -export type id = string; - -export type whereOperations = { - readonly eq: (_1:fieldType) => Promise; - readonly gt: (_1:fieldType) => Promise; - readonly lt: (_1:fieldType) => Promise -}; - -export type Account_t = { readonly id: id }; - -export type Account_indexedFieldOperations = {}; - -export type AccountBalance_t = { - readonly accountId: string; - readonly balance: bigint; - readonly delegate: string; - readonly id: id; - readonly tokenId: string -}; - -export type AccountBalance_indexedFieldOperations = { readonly accountId: whereOperations; readonly tokenId: whereOperations }; - -export type AccountPower_t = { - readonly accountId: string; - readonly daoId: string; - readonly delegationsCount: number; - readonly id: id; - readonly lastVoteTimestamp: bigint; - readonly proposalsCount: number; - readonly votesCount: number; - readonly votingPower: bigint -}; - -export type AccountPower_indexedFieldOperations = { readonly accountId: whereOperations }; - -export type BalanceHistory_t = { - readonly accountId: string; - readonly balance: bigint; - readonly daoId: string; - readonly delta: bigint; - readonly deltaMod: bigint; - readonly id: id; - readonly logIndex: number; - readonly timestamp: bigint; - readonly transactionHash: string -}; - -export type BalanceHistory_indexedFieldOperations = { readonly accountId: whereOperations; readonly transactionHash: whereOperations }; - -export type DaoMetricsDayBucket_t = { - readonly average: bigint; - readonly closeValue: bigint; - readonly count: number; - readonly daoId: string; - readonly date: bigint; - readonly high: bigint; - readonly id: id; - readonly lastUpdate: bigint; - readonly low: bigint; - readonly metricType: Enums_MetricType_t; - readonly openValue: bigint; - readonly tokenId: string; - readonly volume: bigint -}; - -export type DaoMetricsDayBucket_indexedFieldOperations = { readonly tokenId: whereOperations }; - -export type Delegation_t = { - readonly daoId: string; - readonly delegateAccountId: string; - readonly delegatedValue: bigint; - readonly delegationType: (undefined | number); - readonly delegatorAccountId: string; - readonly id: id; - readonly isCex: boolean; - readonly isDex: boolean; - readonly isLending: boolean; - readonly isTotal: boolean; - readonly logIndex: number; - readonly previousDelegate: (undefined | string); - readonly timestamp: bigint; - readonly transactionHash: string -}; - -export type Delegation_indexedFieldOperations = { - readonly delegateAccountId: whereOperations; - readonly delegatorAccountId: whereOperations; - readonly timestamp: whereOperations; - readonly transactionHash: whereOperations -}; - -export type FeedEvent_t = { - readonly eventType: Enums_EventType_t; - readonly id: id; - readonly logIndex: number; - readonly metadata: (undefined | Js_Json_t); - readonly timestamp: bigint; - readonly txHash: string; - readonly value: bigint -}; - -export type FeedEvent_indexedFieldOperations = { - readonly timestamp: whereOperations; - readonly txHash: whereOperations; - readonly value: whereOperations -}; - -export type ProposalOnchain_t = { - readonly abstainVotes: bigint; - readonly againstVotes: bigint; - readonly calldatas: Js_Json_t; - readonly daoId: string; - readonly description: string; - readonly endBlock: number; - readonly endTimestamp: bigint; - readonly forVotes: bigint; - readonly id: id; - readonly logIndex: number; - readonly proposalType: (undefined | number); - readonly proposerAccountId: string; - readonly signatures: Js_Json_t; - readonly startBlock: number; - readonly status: string; - readonly targets: Js_Json_t; - readonly timestamp: bigint; - readonly title: string; - readonly txHash: string; - readonly values: Js_Json_t -}; - -export type ProposalOnchain_indexedFieldOperations = { readonly proposerAccountId: whereOperations }; - -export type Token_t = { - readonly cexSupply: bigint; - readonly circulatingSupply: bigint; - readonly decimals: number; - readonly delegatedSupply: bigint; - readonly dexSupply: bigint; - readonly id: id; - readonly lendingSupply: bigint; - readonly name: (undefined | string); - readonly nonCirculatingSupply: bigint; - readonly totalSupply: bigint; - readonly treasury: bigint -}; - -export type Token_indexedFieldOperations = {}; - -export type TokenPrice_t = { - readonly id: id; - readonly price: bigint; - readonly timestamp: bigint -}; - -export type TokenPrice_indexedFieldOperations = {}; - -export type Transaction_t = { - readonly fromAddress: (undefined | string); - readonly id: id; - readonly isCex: boolean; - readonly isDex: boolean; - readonly isLending: boolean; - readonly isTotal: boolean; - readonly timestamp: bigint; - readonly toAddress: (undefined | string); - readonly transactionHash: string -}; - -export type Transaction_indexedFieldOperations = {}; - -export type Transfer_t = { - readonly amount: bigint; - readonly daoId: string; - readonly fromAccountId: string; - readonly id: id; - readonly isCex: boolean; - readonly isDex: boolean; - readonly isLending: boolean; - readonly isTotal: boolean; - readonly logIndex: number; - readonly timestamp: bigint; - readonly toAccountId: string; - readonly tokenId: string; - readonly transactionHash: string -}; - -export type Transfer_indexedFieldOperations = { - readonly amount: whereOperations; - readonly fromAccountId: whereOperations; - readonly timestamp: whereOperations; - readonly toAccountId: whereOperations; - readonly tokenId: whereOperations; - readonly transactionHash: whereOperations -}; - -export type VoteOnchain_t = { - readonly daoId: string; - readonly id: id; - readonly proposalId: string; - readonly reason: (undefined | string); - readonly support: string; - readonly timestamp: bigint; - readonly txHash: string; - readonly voterAccountId: string; - readonly votingPower: bigint -}; - -export type VoteOnchain_indexedFieldOperations = { readonly proposalId: whereOperations; readonly voterAccountId: whereOperations }; - -export type VotingPowerHistory_t = { - readonly accountId: string; - readonly daoId: string; - readonly delta: bigint; - readonly deltaMod: bigint; - readonly id: id; - readonly logIndex: number; - readonly timestamp: bigint; - readonly transactionHash: string; - readonly votingPower: bigint -}; - -export type VotingPowerHistory_indexedFieldOperations = { readonly accountId: whereOperations; readonly transactionHash: whereOperations }; diff --git a/apps/hypersync-indexer/generated/src/db/Entities.res b/apps/hypersync-indexer/generated/src/db/Entities.res deleted file mode 100644 index 65fb1048d..000000000 --- a/apps/hypersync-indexer/generated/src/db/Entities.res +++ /dev/null @@ -1,2077 +0,0 @@ -open Table -open Enums.EntityType -type id = string - -type internalEntity = Internal.entity -module type Entity = { - type t - let index: int - let name: string - let schema: S.t - let rowsSchema: S.t> - let table: Table.table - let entityHistory: EntityHistory.t -} -external entityModToInternal: module(Entity with type t = 'a) => Internal.entityConfig = "%identity" -external entityModsToInternal: array => array = "%identity" -external entitiesToInternal: array<'a> => array = "%identity" - -@get -external getEntityId: internalEntity => string = "id" - -// Use InMemoryTable.Entity.getEntityIdUnsafe instead of duplicating the logic -let getEntityIdUnsafe = InMemoryTable.Entity.getEntityIdUnsafe - -//shorthand for punning -let isPrimaryKey = true -let isNullable = true -let isArray = true -let isIndex = true - -@genType -type whereOperations<'entity, 'fieldType> = { - eq: 'fieldType => promise>, - gt: 'fieldType => promise>, - lt: 'fieldType => promise> -} - -module Account = { - let name = (Account :> string) - let index = 0 - @genType - type t = { - id: id, - } - - let schema = S.object((s): t => { - id: s.field("id", S.string), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module AccountBalance = { - let name = (AccountBalance :> string) - let index = 1 - @genType - type t = { - accountId: string, - balance: bigint, - delegate: string, - id: id, - tokenId: string, - } - - let schema = S.object((s): t => { - accountId: s.field("accountId", S.string), - balance: s.field("balance", BigInt.schema), - delegate: s.field("delegate", S.string), - id: s.field("id", S.string), - tokenId: s.field("tokenId", S.string), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("accountId") accountId: whereOperations, - - @as("tokenId") tokenId: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "accountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "balance", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "delegate", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "tokenId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module AccountPower = { - let name = (AccountPower :> string) - let index = 2 - @genType - type t = { - accountId: string, - daoId: string, - delegationsCount: int, - id: id, - lastVoteTimestamp: bigint, - proposalsCount: int, - votesCount: int, - votingPower: bigint, - } - - let schema = S.object((s): t => { - accountId: s.field("accountId", S.string), - daoId: s.field("daoId", S.string), - delegationsCount: s.field("delegationsCount", S.int), - id: s.field("id", S.string), - lastVoteTimestamp: s.field("lastVoteTimestamp", BigInt.schema), - proposalsCount: s.field("proposalsCount", S.int), - votesCount: s.field("votesCount", S.int), - votingPower: s.field("votingPower", BigInt.schema), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("accountId") accountId: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "accountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "daoId", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "delegationsCount", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "lastVoteTimestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "proposalsCount", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "votesCount", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "votingPower", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module BalanceHistory = { - let name = (BalanceHistory :> string) - let index = 3 - @genType - type t = { - accountId: string, - balance: bigint, - daoId: string, - delta: bigint, - deltaMod: bigint, - id: id, - logIndex: int, - timestamp: bigint, - transactionHash: string, - } - - let schema = S.object((s): t => { - accountId: s.field("accountId", S.string), - balance: s.field("balance", BigInt.schema), - daoId: s.field("daoId", S.string), - delta: s.field("delta", BigInt.schema), - deltaMod: s.field("deltaMod", BigInt.schema), - id: s.field("id", S.string), - logIndex: s.field("logIndex", S.int), - timestamp: s.field("timestamp", BigInt.schema), - transactionHash: s.field("transactionHash", S.string), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("accountId") accountId: whereOperations, - - @as("transactionHash") transactionHash: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "accountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "balance", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "daoId", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "delta", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "deltaMod", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "logIndex", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "transactionHash", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module DaoMetricsDayBucket = { - let name = (DaoMetricsDayBucket :> string) - let index = 4 - @genType - type t = { - average: bigint, - closeValue: bigint, - count: int, - daoId: string, - date: bigint, - high: bigint, - id: id, - lastUpdate: bigint, - low: bigint, - metricType: Enums.MetricType.t, - openValue: bigint, - tokenId: string, - volume: bigint, - } - - let schema = S.object((s): t => { - average: s.field("average", BigInt.schema), - closeValue: s.field("closeValue", BigInt.schema), - count: s.field("count", S.int), - daoId: s.field("daoId", S.string), - date: s.field("date", BigInt.schema), - high: s.field("high", BigInt.schema), - id: s.field("id", S.string), - lastUpdate: s.field("lastUpdate", BigInt.schema), - low: s.field("low", BigInt.schema), - metricType: s.field("metricType", Enums.MetricType.config.schema), - openValue: s.field("openValue", BigInt.schema), - tokenId: s.field("tokenId", S.string), - volume: s.field("volume", BigInt.schema), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("tokenId") tokenId: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "average", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "closeValue", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "count", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "daoId", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "date", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "high", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "lastUpdate", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "low", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "metricType", - Custom(Enums.MetricType.config.name), - ~fieldSchema=Enums.MetricType.config.schema, - - - - - - ), - mkField( - "openValue", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "tokenId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "volume", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module Delegation = { - let name = (Delegation :> string) - let index = 5 - @genType - type t = { - daoId: string, - delegateAccountId: string, - delegatedValue: bigint, - delegationType: option, - delegatorAccountId: string, - id: id, - isCex: bool, - isDex: bool, - isLending: bool, - isTotal: bool, - logIndex: int, - previousDelegate: option, - timestamp: bigint, - transactionHash: string, - } - - let schema = S.object((s): t => { - daoId: s.field("daoId", S.string), - delegateAccountId: s.field("delegateAccountId", S.string), - delegatedValue: s.field("delegatedValue", BigInt.schema), - delegationType: s.field("delegationType", S.null(S.int)), - delegatorAccountId: s.field("delegatorAccountId", S.string), - id: s.field("id", S.string), - isCex: s.field("isCex", S.bool), - isDex: s.field("isDex", S.bool), - isLending: s.field("isLending", S.bool), - isTotal: s.field("isTotal", S.bool), - logIndex: s.field("logIndex", S.int), - previousDelegate: s.field("previousDelegate", S.null(S.string)), - timestamp: s.field("timestamp", BigInt.schema), - transactionHash: s.field("transactionHash", S.string), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("delegateAccountId") delegateAccountId: whereOperations, - - @as("delegatorAccountId") delegatorAccountId: whereOperations, - - @as("timestamp") timestamp: whereOperations, - - @as("transactionHash") transactionHash: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "daoId", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "delegateAccountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "delegatedValue", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "delegationType", - Integer, - ~fieldSchema=S.null(S.int), - - ~isNullable, - - - - ), - mkField( - "delegatorAccountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "isCex", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isDex", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isLending", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isTotal", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "logIndex", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "previousDelegate", - Text, - ~fieldSchema=S.null(S.string), - - ~isNullable, - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - ~isIndex, - - ), - mkField( - "transactionHash", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module FeedEvent = { - let name = (FeedEvent :> string) - let index = 6 - @genType - type t = { - eventType: Enums.EventType.t, - id: id, - logIndex: int, - metadata: option, - timestamp: bigint, - txHash: string, - value: bigint, - } - - let schema = S.object((s): t => { - eventType: s.field("eventType", Enums.EventType.config.schema), - id: s.field("id", S.string), - logIndex: s.field("logIndex", S.int), - metadata: s.field("metadata", S.null(S.json(~validate=false))), - timestamp: s.field("timestamp", BigInt.schema), - txHash: s.field("txHash", S.string), - value: s.field("value", BigInt.schema), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("timestamp") timestamp: whereOperations, - - @as("txHash") txHash: whereOperations, - - @as("value") value: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "eventType", - Custom(Enums.EventType.config.name), - ~fieldSchema=Enums.EventType.config.schema, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "logIndex", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "metadata", - JsonB, - ~fieldSchema=S.null(S.json(~validate=false)), - - ~isNullable, - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - ~isIndex, - - ), - mkField( - "txHash", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "value", - Numeric, - ~fieldSchema=BigInt.schema, - - - - ~isIndex, - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module ProposalOnchain = { - let name = (ProposalOnchain :> string) - let index = 7 - @genType - type t = { - abstainVotes: bigint, - againstVotes: bigint, - calldatas: Js.Json.t, - daoId: string, - description: string, - endBlock: int, - endTimestamp: bigint, - forVotes: bigint, - id: id, - logIndex: int, - proposalType: option, - proposerAccountId: string, - signatures: Js.Json.t, - startBlock: int, - status: string, - targets: Js.Json.t, - timestamp: bigint, - title: string, - txHash: string, - values: Js.Json.t, - } - - let schema = S.object((s): t => { - abstainVotes: s.field("abstainVotes", BigInt.schema), - againstVotes: s.field("againstVotes", BigInt.schema), - calldatas: s.field("calldatas", S.json(~validate=false)), - daoId: s.field("daoId", S.string), - description: s.field("description", S.string), - endBlock: s.field("endBlock", S.int), - endTimestamp: s.field("endTimestamp", BigInt.schema), - forVotes: s.field("forVotes", BigInt.schema), - id: s.field("id", S.string), - logIndex: s.field("logIndex", S.int), - proposalType: s.field("proposalType", S.null(S.int)), - proposerAccountId: s.field("proposerAccountId", S.string), - signatures: s.field("signatures", S.json(~validate=false)), - startBlock: s.field("startBlock", S.int), - status: s.field("status", S.string), - targets: s.field("targets", S.json(~validate=false)), - timestamp: s.field("timestamp", BigInt.schema), - title: s.field("title", S.string), - txHash: s.field("txHash", S.string), - values: s.field("values", S.json(~validate=false)), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("proposerAccountId") proposerAccountId: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "abstainVotes", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "againstVotes", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "calldatas", - JsonB, - ~fieldSchema=S.json(~validate=false), - - - - - - ), - mkField( - "daoId", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "description", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "endBlock", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "endTimestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "forVotes", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "logIndex", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "proposalType", - Integer, - ~fieldSchema=S.null(S.int), - - ~isNullable, - - - - ), - mkField( - "proposerAccountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "signatures", - JsonB, - ~fieldSchema=S.json(~validate=false), - - - - - - ), - mkField( - "startBlock", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "status", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "targets", - JsonB, - ~fieldSchema=S.json(~validate=false), - - - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "title", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "txHash", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "values", - JsonB, - ~fieldSchema=S.json(~validate=false), - - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module Token = { - let name = (Token :> string) - let index = 8 - @genType - type t = { - cexSupply: bigint, - circulatingSupply: bigint, - decimals: int, - delegatedSupply: bigint, - dexSupply: bigint, - id: id, - lendingSupply: bigint, - name: option, - nonCirculatingSupply: bigint, - totalSupply: bigint, - treasury: bigint, - } - - let schema = S.object((s): t => { - cexSupply: s.field("cexSupply", BigInt.schema), - circulatingSupply: s.field("circulatingSupply", BigInt.schema), - decimals: s.field("decimals", S.int), - delegatedSupply: s.field("delegatedSupply", BigInt.schema), - dexSupply: s.field("dexSupply", BigInt.schema), - id: s.field("id", S.string), - lendingSupply: s.field("lendingSupply", BigInt.schema), - name: s.field("name", S.null(S.string)), - nonCirculatingSupply: s.field("nonCirculatingSupply", BigInt.schema), - totalSupply: s.field("totalSupply", BigInt.schema), - treasury: s.field("treasury", BigInt.schema), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "cexSupply", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "circulatingSupply", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "decimals", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "delegatedSupply", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "dexSupply", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "lendingSupply", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "name", - Text, - ~fieldSchema=S.null(S.string), - - ~isNullable, - - - - ), - mkField( - "nonCirculatingSupply", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "totalSupply", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "treasury", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module TokenPrice = { - let name = (TokenPrice :> string) - let index = 9 - @genType - type t = { - id: id, - price: bigint, - timestamp: bigint, - } - - let schema = S.object((s): t => { - id: s.field("id", S.string), - price: s.field("price", BigInt.schema), - timestamp: s.field("timestamp", BigInt.schema), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "price", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module Transaction = { - let name = (Transaction :> string) - let index = 10 - @genType - type t = { - fromAddress: option, - id: id, - isCex: bool, - isDex: bool, - isLending: bool, - isTotal: bool, - timestamp: bigint, - toAddress: option, - transactionHash: string, - } - - let schema = S.object((s): t => { - fromAddress: s.field("fromAddress", S.null(S.string)), - id: s.field("id", S.string), - isCex: s.field("isCex", S.bool), - isDex: s.field("isDex", S.bool), - isLending: s.field("isLending", S.bool), - isTotal: s.field("isTotal", S.bool), - timestamp: s.field("timestamp", BigInt.schema), - toAddress: s.field("toAddress", S.null(S.string)), - transactionHash: s.field("transactionHash", S.string), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "fromAddress", - Text, - ~fieldSchema=S.null(S.string), - - ~isNullable, - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "isCex", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isDex", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isLending", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isTotal", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "toAddress", - Text, - ~fieldSchema=S.null(S.string), - - ~isNullable, - - - - ), - mkField( - "transactionHash", - Text, - ~fieldSchema=S.string, - - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module Transfer = { - let name = (Transfer :> string) - let index = 11 - @genType - type t = { - amount: bigint, - daoId: string, - fromAccountId: string, - id: id, - isCex: bool, - isDex: bool, - isLending: bool, - isTotal: bool, - logIndex: int, - timestamp: bigint, - toAccountId: string, - tokenId: string, - transactionHash: string, - } - - let schema = S.object((s): t => { - amount: s.field("amount", BigInt.schema), - daoId: s.field("daoId", S.string), - fromAccountId: s.field("fromAccountId", S.string), - id: s.field("id", S.string), - isCex: s.field("isCex", S.bool), - isDex: s.field("isDex", S.bool), - isLending: s.field("isLending", S.bool), - isTotal: s.field("isTotal", S.bool), - logIndex: s.field("logIndex", S.int), - timestamp: s.field("timestamp", BigInt.schema), - toAccountId: s.field("toAccountId", S.string), - tokenId: s.field("tokenId", S.string), - transactionHash: s.field("transactionHash", S.string), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("amount") amount: whereOperations, - - @as("fromAccountId") fromAccountId: whereOperations, - - @as("timestamp") timestamp: whereOperations, - - @as("toAccountId") toAccountId: whereOperations, - - @as("tokenId") tokenId: whereOperations, - - @as("transactionHash") transactionHash: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "amount", - Numeric, - ~fieldSchema=BigInt.schema, - - - - ~isIndex, - - ), - mkField( - "daoId", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "fromAccountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "isCex", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isDex", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isLending", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "isTotal", - Boolean, - ~fieldSchema=S.bool, - - - - - - ), - mkField( - "logIndex", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - ~isIndex, - - ), - mkField( - "toAccountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "tokenId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "transactionHash", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module VoteOnchain = { - let name = (VoteOnchain :> string) - let index = 12 - @genType - type t = { - daoId: string, - id: id, - proposalId: string, - reason: option, - support: string, - timestamp: bigint, - txHash: string, - voterAccountId: string, - votingPower: bigint, - } - - let schema = S.object((s): t => { - daoId: s.field("daoId", S.string), - id: s.field("id", S.string), - proposalId: s.field("proposalId", S.string), - reason: s.field("reason", S.null(S.string)), - support: s.field("support", S.string), - timestamp: s.field("timestamp", BigInt.schema), - txHash: s.field("txHash", S.string), - voterAccountId: s.field("voterAccountId", S.string), - votingPower: s.field("votingPower", BigInt.schema), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("proposalId") proposalId: whereOperations, - - @as("voterAccountId") voterAccountId: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "daoId", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "proposalId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "reason", - Text, - ~fieldSchema=S.null(S.string), - - ~isNullable, - - - - ), - mkField( - "support", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "txHash", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "voterAccountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "votingPower", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -module VotingPowerHistory = { - let name = (VotingPowerHistory :> string) - let index = 13 - @genType - type t = { - accountId: string, - daoId: string, - delta: bigint, - deltaMod: bigint, - id: id, - logIndex: int, - timestamp: bigint, - transactionHash: string, - votingPower: bigint, - } - - let schema = S.object((s): t => { - accountId: s.field("accountId", S.string), - daoId: s.field("daoId", S.string), - delta: s.field("delta", BigInt.schema), - deltaMod: s.field("deltaMod", BigInt.schema), - id: s.field("id", S.string), - logIndex: s.field("logIndex", S.int), - timestamp: s.field("timestamp", BigInt.schema), - transactionHash: s.field("transactionHash", S.string), - votingPower: s.field("votingPower", BigInt.schema), - }) - - let rowsSchema = S.array(schema) - - @genType - type indexedFieldOperations = { - - @as("accountId") accountId: whereOperations, - - @as("transactionHash") transactionHash: whereOperations, - - } - - let table = mkTable( - (name :> string), - ~fields=[ - mkField( - "accountId", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "daoId", - Text, - ~fieldSchema=S.string, - - - - - - ), - mkField( - "delta", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "deltaMod", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "id", - Text, - ~fieldSchema=S.string, - ~isPrimaryKey, - - - - - ), - mkField( - "logIndex", - Integer, - ~fieldSchema=S.int, - - - - - - ), - mkField( - "timestamp", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - mkField( - "transactionHash", - Text, - ~fieldSchema=S.string, - - - - ~isIndex, - - ), - mkField( - "votingPower", - Numeric, - ~fieldSchema=BigInt.schema, - - - - - - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index) - - external castToInternal: t => Internal.entity = "%identity" -} - -let userEntities = [ - module(Account), - module(AccountBalance), - module(AccountPower), - module(BalanceHistory), - module(DaoMetricsDayBucket), - module(Delegation), - module(FeedEvent), - module(ProposalOnchain), - module(Token), - module(TokenPrice), - module(Transaction), - module(Transfer), - module(VoteOnchain), - module(VotingPowerHistory), -]->entityModsToInternal - -let allEntities = - userEntities->Js.Array2.concat( - [module(InternalTable.DynamicContractRegistry)]->entityModsToInternal, - ) - -let byName = - allEntities - ->Js.Array2.map(entityConfig => { - (entityConfig.name, entityConfig) - }) - ->Js.Dict.fromArray diff --git a/apps/hypersync-indexer/generated/src/db/Enums.gen.ts b/apps/hypersync-indexer/generated/src/db/Enums.gen.ts deleted file mode 100644 index 37c23647f..000000000 --- a/apps/hypersync-indexer/generated/src/db/Enums.gen.ts +++ /dev/null @@ -1,41 +0,0 @@ -/* TypeScript file generated from Enums.res by genType. */ - -/* eslint-disable */ -/* tslint:disable */ - -export type ContractType_t = "ENSGovernor" | "ENSToken"; - -export type EntityType_t = - "Account" - | "AccountBalance" - | "AccountPower" - | "BalanceHistory" - | "DaoMetricsDayBucket" - | "Delegation" - | "FeedEvent" - | "ProposalOnchain" - | "Token" - | "TokenPrice" - | "Transaction" - | "Transfer" - | "VoteOnchain" - | "VotingPowerHistory" - | "dynamic_contract_registry"; - -export type EventType_t = - "VOTE" - | "PROPOSAL" - | "PROPOSAL_EXTENDED" - | "DELEGATION" - | "DELEGATION_VOTES_CHANGED" - | "TRANSFER"; - -export type MetricType_t = - "total" - | "delegated" - | "cex" - | "dex" - | "lending" - | "circulating" - | "treasury" - | "non_circulating"; diff --git a/apps/hypersync-indexer/generated/src/db/Enums.res b/apps/hypersync-indexer/generated/src/db/Enums.res deleted file mode 100644 index 79f5532c2..000000000 --- a/apps/hypersync-indexer/generated/src/db/Enums.res +++ /dev/null @@ -1,108 +0,0 @@ -module ContractType = { - @genType - type t = - | @as("ENSGovernor") ENSGovernor - | @as("ENSToken") ENSToken - - let name = "CONTRACT_TYPE" - let variants = [ - ENSGovernor, - ENSToken, - ] - let config = Internal.makeEnumConfig(~name, ~variants) -} - -module EntityType = { - @genType - type t = - | @as("Account") Account - | @as("AccountBalance") AccountBalance - | @as("AccountPower") AccountPower - | @as("BalanceHistory") BalanceHistory - | @as("DaoMetricsDayBucket") DaoMetricsDayBucket - | @as("Delegation") Delegation - | @as("FeedEvent") FeedEvent - | @as("ProposalOnchain") ProposalOnchain - | @as("Token") Token - | @as("TokenPrice") TokenPrice - | @as("Transaction") Transaction - | @as("Transfer") Transfer - | @as("VoteOnchain") VoteOnchain - | @as("VotingPowerHistory") VotingPowerHistory - | @as("dynamic_contract_registry") DynamicContractRegistry - - let name = "ENTITY_TYPE" - let variants = [ - Account, - AccountBalance, - AccountPower, - BalanceHistory, - DaoMetricsDayBucket, - Delegation, - FeedEvent, - ProposalOnchain, - Token, - TokenPrice, - Transaction, - Transfer, - VoteOnchain, - VotingPowerHistory, - DynamicContractRegistry, - ] - let config = Internal.makeEnumConfig(~name, ~variants) -} - -module EventType = { - @genType - type t = - | @as("VOTE") VOTE - | @as("PROPOSAL") PROPOSAL - | @as("PROPOSAL_EXTENDED") PROPOSAL_EXTENDED - | @as("DELEGATION") DELEGATION - | @as("DELEGATION_VOTES_CHANGED") DELEGATION_VOTES_CHANGED - | @as("TRANSFER") TRANSFER - - let name = "EventType" - let variants = [ - VOTE, - PROPOSAL, - PROPOSAL_EXTENDED, - DELEGATION, - DELEGATION_VOTES_CHANGED, - TRANSFER, - ] - let config = Internal.makeEnumConfig(~name, ~variants) -} - -module MetricType = { - @genType - type t = - | @as("total") Total - | @as("delegated") Delegated - | @as("cex") Cex - | @as("dex") Dex - | @as("lending") Lending - | @as("circulating") Circulating - | @as("treasury") Treasury - | @as("non_circulating") Non_circulating - - let name = "MetricType" - let variants = [ - Total, - Delegated, - Cex, - Dex, - Lending, - Circulating, - Treasury, - Non_circulating, - ] - let config = Internal.makeEnumConfig(~name, ~variants) -} - -let allEnums = ([ - ContractType.config->Internal.fromGenericEnumConfig, - EntityType.config->Internal.fromGenericEnumConfig, - EventType.config->Internal.fromGenericEnumConfig, - MetricType.config->Internal.fromGenericEnumConfig, -]) diff --git a/apps/hypersync-indexer/generated/src/db/Migrations.res b/apps/hypersync-indexer/generated/src/db/Migrations.res deleted file mode 100644 index a293165c7..000000000 --- a/apps/hypersync-indexer/generated/src/db/Migrations.res +++ /dev/null @@ -1,57 +0,0 @@ -let unsafe = Postgres.unsafe - -let deleteAllTables: unit => promise = async () => { - Logging.trace("Dropping all tables") - let query = ` - DO $$ - BEGIN - DROP SCHEMA IF EXISTS ${Env.Db.publicSchema} CASCADE; - CREATE SCHEMA ${Env.Db.publicSchema}; - GRANT ALL ON SCHEMA ${Env.Db.publicSchema} TO "${Env.Db.user}"; - GRANT ALL ON SCHEMA ${Env.Db.publicSchema} TO public; - END $$;` - - await Generated.codegenPersistence.sql->unsafe(query) -} - -type t -@module external process: t = "process" - -type exitCode = | @as(0) Success | @as(1) Failure -@send external exit: (t, exitCode) => unit = "exit" - -let runUpMigrations = async ( - ~shouldExit, - // Reset is used for db-setup - ~reset=false, -) => { - let config = Generated.configWithoutRegistrations - let exitCode = try { - await Generated.codegenPersistence->Persistence.init( - ~reset, - ~chainConfigs=config.chainMap->ChainMap.values, - ) - Success - } catch { - | _ => Failure - } - if shouldExit { - process->exit(exitCode) - } - exitCode -} - -let runDownMigrations = async (~shouldExit) => { - let exitCode = ref(Success) - await deleteAllTables()->Promise.catch(err => { - exitCode := Failure - err - ->ErrorHandling.make(~msg="EE804: Error dropping entity tables") - ->ErrorHandling.log - Promise.resolve() - }) - if shouldExit { - process->exit(exitCode.contents) - } - exitCode.contents -} diff --git a/apps/hypersync-indexer/generated/src/eventFetching/ChainFetcher.res b/apps/hypersync-indexer/generated/src/eventFetching/ChainFetcher.res deleted file mode 100644 index 312cb2a37..000000000 --- a/apps/hypersync-indexer/generated/src/eventFetching/ChainFetcher.res +++ /dev/null @@ -1,464 +0,0 @@ -open Belt - -//A filter should return true if the event should be kept and isValid should return -//false when the filter should be removed/cleaned up -type processingFilter = { - filter: Internal.item => bool, - isValid: (~fetchState: FetchState.t) => bool, -} - -type t = { - logger: Pino.t, - fetchState: FetchState.t, - sourceManager: SourceManager.t, - chainConfig: Config.chain, - //The latest known block of the chain - currentBlockHeight: int, - isProgressAtHead: bool, - timestampCaughtUpToHeadOrEndblock: option, - committedProgressBlockNumber: int, - firstEventBlockNumber: option, - numEventsProcessed: int, - numBatchesFetched: int, - reorgDetection: ReorgDetection.t, - safeCheckpointTracking: option, -} - -//CONSTRUCTION -let make = ( - ~chainConfig: Config.chain, - ~dynamicContracts: array, - ~startBlock, - ~endBlock, - ~firstEventBlockNumber, - ~progressBlockNumber, - ~config: Config.t, - ~registrations: EventRegister.registrations, - ~targetBufferSize, - ~logger, - ~timestampCaughtUpToHeadOrEndblock, - ~numEventsProcessed, - ~numBatchesFetched, - ~isInReorgThreshold, - ~reorgCheckpoints: array, - ~maxReorgDepth, -): t => { - // We don't need the router itself, but only validation logic, - // since now event router is created for selection of events - // and validation doesn't work correctly in routers. - // Ideally to split it into two different parts. - let eventRouter = EventRouter.empty() - - // Aggregate events we want to fetch - let contracts = [] - let eventConfigs: array = [] - - let notRegisteredEvents = [] - - chainConfig.contracts->Array.forEach(contract => { - let contractName = contract.name - - contract.events->Array.forEach(eventConfig => { - let {isWildcard} = eventConfig - let hasContractRegister = eventConfig.contractRegister->Option.isSome - - // Should validate the events - eventRouter->EventRouter.addOrThrow( - eventConfig.id, - (), - ~contractName, - ~chain=ChainMap.Chain.makeUnsafe(~chainId=chainConfig.id), - ~eventName=eventConfig.name, - ~isWildcard, - ) - - // Filter out non-preRegistration events on preRegistration phase - // so we don't care about it in fetch state and workers anymore - let shouldBeIncluded = if config.enableRawEvents { - true - } else { - let isRegistered = hasContractRegister || eventConfig.handler->Option.isSome - if !isRegistered { - notRegisteredEvents->Array.push(eventConfig) - } - isRegistered - } - - if shouldBeIncluded { - eventConfigs->Array.push(eventConfig) - } - }) - - switch contract.startBlock { - | Some(startBlock) if startBlock < chainConfig.startBlock => - Js.Exn.raiseError( - `The start block for contract "${contractName}" is less than the chain start block. This is not supported yet.`, - ) - | _ => () - } - - contract.addresses->Array.forEach(address => { - contracts->Array.push({ - Internal.address, - contractName: contract.name, - startBlock: switch contract.startBlock { - | Some(startBlock) => startBlock - | None => chainConfig.startBlock - }, - registrationBlock: None, - }) - }) - }) - - dynamicContracts->Array.forEach(dc => contracts->Array.push(dc)) - - if notRegisteredEvents->Utils.Array.notEmpty { - logger->Logging.childInfo( - `The event${if notRegisteredEvents->Array.length > 1 { - "s" - } else { - "" - }} ${notRegisteredEvents - ->Array.map(eventConfig => `${eventConfig.contractName}.${eventConfig.name}`) - ->Js.Array2.joinWith(", ")} don't have an event handler and skipped for indexing.`, - ) - } - - let onBlockConfigs = - registrations.onBlockByChainId->Utils.Dict.dangerouslyGetNonOption(chainConfig.id->Int.toString) - switch onBlockConfigs { - | Some(onBlockConfigs) => - // TODO: Move it to the EventRegister module - // so the error is thrown with better stack trace - onBlockConfigs->Array.forEach(onBlockConfig => { - if onBlockConfig.startBlock->Option.getWithDefault(startBlock) < startBlock { - Js.Exn.raiseError( - `The start block for onBlock handler "${onBlockConfig.name}" is less than the chain start block (${startBlock->Belt.Int.toString}). This is not supported yet.`, - ) - } - switch endBlock { - | Some(chainEndBlock) => - if onBlockConfig.endBlock->Option.getWithDefault(chainEndBlock) > chainEndBlock { - Js.Exn.raiseError( - `The end block for onBlock handler "${onBlockConfig.name}" is greater than the chain end block (${chainEndBlock->Belt.Int.toString}). This is not supported yet.`, - ) - } - | None => () - } - }) - | None => () - } - - let fetchState = FetchState.make( - ~maxAddrInPartition=config.maxAddrInPartition, - ~contracts, - ~progressBlockNumber, - ~startBlock, - ~endBlock, - ~eventConfigs, - ~targetBufferSize, - ~chainId=chainConfig.id, - // FIXME: Shouldn't set with full history - ~blockLag=Pervasives.max( - !config.shouldRollbackOnReorg || isInReorgThreshold ? 0 : chainConfig.maxReorgDepth, - Env.indexingBlockLag->Option.getWithDefault(0), - ), - ~onBlockConfigs?, - ) - - let chainReorgCheckpoints = reorgCheckpoints->Array.keepMapU(reorgCheckpoint => { - if reorgCheckpoint.chainId === chainConfig.id { - Some(reorgCheckpoint) - } else { - None - } - }) - - { - logger, - chainConfig, - sourceManager: SourceManager.make( - ~sources=chainConfig.sources, - ~maxPartitionConcurrency=Env.maxPartitionConcurrency, - ), - reorgDetection: ReorgDetection.make( - ~chainReorgCheckpoints, - ~maxReorgDepth, - ~shouldRollbackOnReorg=config.shouldRollbackOnReorg, - ), - safeCheckpointTracking: SafeCheckpointTracking.make( - ~maxReorgDepth, - ~shouldRollbackOnReorg=config.shouldRollbackOnReorg, - ~chainReorgCheckpoints, - ), - currentBlockHeight: 0, - isProgressAtHead: false, - fetchState, - firstEventBlockNumber, - committedProgressBlockNumber: progressBlockNumber, - timestampCaughtUpToHeadOrEndblock, - numEventsProcessed, - numBatchesFetched, - } -} - -let makeFromConfig = (chainConfig: Config.chain, ~config, ~registrations, ~targetBufferSize) => { - let logger = Logging.createChild(~params={"chainId": chainConfig.id}) - - make( - ~chainConfig, - ~config, - ~registrations, - ~startBlock=chainConfig.startBlock, - ~endBlock=chainConfig.endBlock, - ~reorgCheckpoints=[], - ~maxReorgDepth=chainConfig.maxReorgDepth, - ~firstEventBlockNumber=None, - ~progressBlockNumber=-1, - ~timestampCaughtUpToHeadOrEndblock=None, - ~numEventsProcessed=0, - ~numBatchesFetched=0, - ~targetBufferSize, - ~logger, - ~dynamicContracts=[], - ~isInReorgThreshold=false, - ) -} - -/** - * This function allows a chain fetcher to be created from metadata, in particular this is useful for restarting an indexer and making sure it fetches blocks from the same place. - */ -let makeFromDbState = async ( - chainConfig: Config.chain, - ~resumedChainState: Persistence.initialChainState, - ~reorgCheckpoints, - ~isInReorgThreshold, - ~config, - ~registrations, - ~targetBufferSize, -) => { - let chainId = chainConfig.id - let logger = Logging.createChild(~params={"chainId": chainId}) - - Prometheus.ProgressEventsCount.set(~processedCount=resumedChainState.numEventsProcessed, ~chainId) - - let progressBlockNumber = - // Can be -1 when not set - resumedChainState.progressBlockNumber >= 0 - ? resumedChainState.progressBlockNumber - : resumedChainState.startBlock - 1 - - make( - ~dynamicContracts=resumedChainState.dynamicContracts, - ~chainConfig, - ~startBlock=resumedChainState.startBlock, - ~endBlock=resumedChainState.endBlock, - ~config, - ~registrations, - ~reorgCheckpoints, - ~maxReorgDepth=resumedChainState.maxReorgDepth, - ~firstEventBlockNumber=resumedChainState.firstEventBlockNumber, - ~progressBlockNumber, - ~timestampCaughtUpToHeadOrEndblock=Env.updateSyncTimeOnRestart - ? None - : resumedChainState.timestampCaughtUpToHeadOrEndblock, - ~numEventsProcessed=resumedChainState.numEventsProcessed, - ~numBatchesFetched=0, - ~logger, - ~targetBufferSize, - ~isInReorgThreshold, - ) -} - -/** - * Helper function to get the configured start block for a contract from config - */ -let getContractStartBlock = ( - config: Config.t, - ~chain: ChainMap.Chain.t, - ~contractName: string, -): option => { - let chainConfig = config.chainMap->ChainMap.get(chain) - chainConfig.contracts - ->Js.Array2.find(contract => contract.name === contractName) - ->Option.flatMap(contract => contract.startBlock) -} - -let runContractRegistersOrThrow = async ( - ~itemsWithContractRegister: array, - ~chain: ChainMap.Chain.t, - ~config: Config.t, -) => { - let itemsWithDcs = [] - - let onRegister = (~item: Internal.item, ~contractAddress, ~contractName) => { - let eventItem = item->Internal.castUnsafeEventItem - let {blockNumber} = eventItem - - // Use contract-specific start block if configured, otherwise fall back to registration block - let contractStartBlock = switch getContractStartBlock( - config, - ~chain, - ~contractName=(contractName: Enums.ContractType.t :> string), - ) { - | Some(configuredStartBlock) => configuredStartBlock - | None => blockNumber - } - - let dc: Internal.indexingContract = { - address: contractAddress, - contractName: (contractName: Enums.ContractType.t :> string), - startBlock: contractStartBlock, - registrationBlock: Some(blockNumber), - } - - switch item->Internal.getItemDcs { - | None => { - item->Internal.setItemDcs([dc]) - itemsWithDcs->Array.push(item) - } - | Some(dcs) => dcs->Array.push(dc) - } - } - - let promises = [] - for idx in 0 to itemsWithContractRegister->Array.length - 1 { - let item = itemsWithContractRegister->Array.getUnsafe(idx) - let eventItem = item->Internal.castUnsafeEventItem - let contractRegister = switch eventItem { - | {eventConfig: {contractRegister: Some(contractRegister)}} => contractRegister - | {eventConfig: {contractRegister: None, name: eventName}} => - // Unexpected case, since we should pass only events with contract register to this function - Js.Exn.raiseError("Contract register is not set for event " ++ eventName) - } - - let errorMessage = "Event contractRegister failed, please fix the error to keep the indexer running smoothly" - - // Catch sync and async errors - try { - let params: UserContext.contractRegisterParams = { - item, - onRegister, - config, - isResolved: false, - } - let result = contractRegister(UserContext.getContractRegisterArgs(params)) - - // Even though `contractRegister` always returns a promise, - // in the ReScript type, but it might return a non-promise value for TS API. - if result->Promise.isCatchable { - promises->Array.push( - result - ->Promise.thenResolve(r => { - params.isResolved = true - r - }) - ->Promise.catch(exn => { - params.isResolved = true - exn->ErrorHandling.mkLogAndRaise(~msg=errorMessage, ~logger=item->Logging.getItemLogger) - }), - ) - } else { - params.isResolved = true - } - } catch { - | exn => - exn->ErrorHandling.mkLogAndRaise(~msg=errorMessage, ~logger=item->Logging.getItemLogger) - } - } - - if promises->Utils.Array.notEmpty { - let _ = await Promise.all(promises) - } - - itemsWithDcs -} - -let handleQueryResult = ( - chainFetcher: t, - ~query: FetchState.query, - ~newItems, - ~newItemsWithDcs, - ~latestFetchedBlock, -) => { - let fs = switch newItemsWithDcs { - | [] => chainFetcher.fetchState - | _ => chainFetcher.fetchState->FetchState.registerDynamicContracts(newItemsWithDcs) - } - - fs - ->FetchState.handleQueryResult(~query, ~latestFetchedBlock, ~newItems) - ->Result.map(fs => { - ...chainFetcher, - fetchState: fs, - }) -} - -/** -Gets the latest item on the front of the queue and returns updated fetcher -*/ -let hasProcessedToEndblock = (self: t) => { - let {committedProgressBlockNumber, fetchState} = self - switch fetchState.endBlock { - | Some(endBlock) => committedProgressBlockNumber >= endBlock - | None => false - } -} - -let hasNoMoreEventsToProcess = (self: t) => { - self.fetchState->FetchState.bufferSize === 0 -} - -let getHighestBlockBelowThreshold = (cf: t): int => { - let highestBlockBelowThreshold = cf.currentBlockHeight - cf.chainConfig.maxReorgDepth - highestBlockBelowThreshold < 0 ? 0 : highestBlockBelowThreshold -} - -/** -Finds the last known valid block number below the reorg block -If not found, returns the highest block below threshold -*/ -let getLastKnownValidBlock = async ( - chainFetcher: t, - ~reorgBlockNumber: int, - //Parameter used for dependency injecting in tests - ~getBlockHashes=(chainFetcher.sourceManager->SourceManager.getActiveSource).getBlockHashes, -) => { - // Improtant: It's important to not include the reorg detection block number - // because there might be different instances of the source - // with mismatching hashes between them. - // So we MUST always rollback the block number where we detected a reorg. - let scannedBlockNumbers = - chainFetcher.reorgDetection->ReorgDetection.getThresholdBlockNumbersBelowBlock( - ~blockNumber=reorgBlockNumber, - ~currentBlockHeight=chainFetcher.currentBlockHeight, - ) - - let getBlockHashes = blockNumbers => { - getBlockHashes(~blockNumbers, ~logger=chainFetcher.logger)->Promise.thenResolve(res => - switch res { - | Ok(v) => v - | Error(exn) => - exn->ErrorHandling.mkLogAndRaise( - ~msg="Failed to fetch blockHashes for given blockNumbers during rollback", - ) - } - ) - } - - switch scannedBlockNumbers { - | [] => chainFetcher->getHighestBlockBelowThreshold - | _ => { - let blockNumbersAndHashes = await getBlockHashes(scannedBlockNumbers) - - switch chainFetcher.reorgDetection->ReorgDetection.getLatestValidScannedBlock( - ~blockNumbersAndHashes, - ) { - | Some(blockNumber) => blockNumber - | None => chainFetcher->getHighestBlockBelowThreshold - } - } - } -} - -let isActivelyIndexing = (chainFetcher: t) => chainFetcher.fetchState->FetchState.isActivelyIndexing diff --git a/apps/hypersync-indexer/generated/src/eventFetching/ChainManager.res b/apps/hypersync-indexer/generated/src/eventFetching/ChainManager.res deleted file mode 100644 index 11bce6b59..000000000 --- a/apps/hypersync-indexer/generated/src/eventFetching/ChainManager.res +++ /dev/null @@ -1,174 +0,0 @@ -open Belt - -type t = { - committedCheckpointId: int, - chainFetchers: ChainMap.t, - multichain: Config.multichain, - isInReorgThreshold: bool, -} - -let calculateTargetBufferSize = (~activeChainsCount, ~config: Config.t) => { - let targetBatchesInBuffer = 3 - switch Env.targetBufferSize { - | Some(size) => size - | None => - config.batchSize * (activeChainsCount > targetBatchesInBuffer ? 1 : targetBatchesInBuffer) - } -} - -let makeFromConfig = (~config: Config.t, ~registrations): t => { - let targetBufferSize = calculateTargetBufferSize( - ~activeChainsCount=config.chainMap->ChainMap.size, - ~config, - ) - let chainFetchers = - config.chainMap->ChainMap.map( - ChainFetcher.makeFromConfig(_, ~config, ~registrations, ~targetBufferSize), - ) - { - committedCheckpointId: 0, - chainFetchers, - multichain: config.multichain, - isInReorgThreshold: false, - } -} - -let makeFromDbState = async ( - ~initialState: Persistence.initialState, - ~config: Config.t, - ~registrations, - ~persistence: Persistence.t, -): t => { - let isInReorgThreshold = if initialState.cleanRun { - false - } else { - // TODO: Move to Persistence.initialState - // Since now it's possible not to have rows in the history table - // even after the indexer started saving history (entered reorg threshold), - // This rows check might incorrectly return false for recovering the isInReorgThreshold option. - // But this is not a problem. There's no history anyways, and the indexer will be able to - // correctly calculate isInReorgThreshold as it starts. - let hasStartedSavingHistory = await persistence.sql->DbFunctions.EntityHistory.hasRows - - //If we have started saving history, continue to save history - //as regardless of whether we are still in a reorg threshold - hasStartedSavingHistory - } - - let targetBufferSize = calculateTargetBufferSize( - ~activeChainsCount=initialState.chains->Array.length, - ~config, - ) - Prometheus.ProcessingMaxBatchSize.set(~maxBatchSize=config.batchSize) - Prometheus.IndexingTargetBufferSize.set(~targetBufferSize) - Prometheus.ReorgThreshold.set(~isInReorgThreshold) - initialState.cache->Utils.Dict.forEach(({effectName, count}) => { - Prometheus.EffectCacheCount.set(~count, ~effectName) - }) - - let chainFetchersArr = - await initialState.chains - ->Array.map(async (resumedChainState: Persistence.initialChainState) => { - let chain = Config.getChain(config, ~chainId=resumedChainState.id) - let chainConfig = config.chainMap->ChainMap.get(chain) - - ( - chain, - await chainConfig->ChainFetcher.makeFromDbState( - ~resumedChainState, - ~reorgCheckpoints=initialState.reorgCheckpoints, - ~isInReorgThreshold, - ~targetBufferSize, - ~config, - ~registrations, - ), - ) - }) - ->Promise.all - - let chainFetchers = ChainMap.fromArrayUnsafe(chainFetchersArr) - - { - committedCheckpointId: initialState.checkpointId, - multichain: config.multichain, - chainFetchers, - isInReorgThreshold, - } -} - -let getChainFetcher = (chainManager: t, ~chain: ChainMap.Chain.t): ChainFetcher.t => { - chainManager.chainFetchers->ChainMap.get(chain) -} - -let setChainFetcher = (chainManager: t, chainFetcher: ChainFetcher.t) => { - { - ...chainManager, - chainFetchers: chainManager.chainFetchers->ChainMap.set( - ChainMap.Chain.makeUnsafe(~chainId=chainFetcher.chainConfig.id), - chainFetcher, - ), - } -} - -let nextItemIsNone = (chainManager: t): bool => { - !Batch.hasMultichainReadyItem( - chainManager.chainFetchers->ChainMap.map(cf => { - cf.fetchState - }), - ~multichain=chainManager.multichain, - ) -} - -let createBatch = (chainManager: t, ~batchSizeTarget: int): Batch.t => { - Batch.make( - ~checkpointIdBeforeBatch=chainManager.committedCheckpointId, - ~chainsBeforeBatch=chainManager.chainFetchers->ChainMap.map((cf): Batch.chainBeforeBatch => { - fetchState: cf.fetchState, - progressBlockNumber: cf.committedProgressBlockNumber, - totalEventsProcessed: cf.numEventsProcessed, - sourceBlockNumber: cf.currentBlockHeight, - reorgDetection: cf.reorgDetection, - }), - ~multichain=chainManager.multichain, - ~batchSizeTarget, - ) -} - -let isProgressAtHead = chainManager => - chainManager.chainFetchers - ->ChainMap.values - ->Js.Array2.every(cf => cf.isProgressAtHead) - -let isActivelyIndexing = chainManager => - chainManager.chainFetchers - ->ChainMap.values - ->Js.Array2.every(ChainFetcher.isActivelyIndexing) - -let getSafeCheckpointId = (chainManager: t) => { - let chainFetchers = chainManager.chainFetchers->ChainMap.values - - let infinity = (%raw(`Infinity`): int) - let result = ref(infinity) - - for idx in 0 to chainFetchers->Array.length - 1 { - let chainFetcher = chainFetchers->Array.getUnsafe(idx) - switch chainFetcher.safeCheckpointTracking { - | None => () // Skip chains with maxReorgDepth = 0 - | Some(safeCheckpointTracking) => { - let safeCheckpointId = - safeCheckpointTracking->SafeCheckpointTracking.getSafeCheckpointId( - ~sourceBlockNumber=chainFetcher.currentBlockHeight, - ) - if safeCheckpointId < result.contents { - result := safeCheckpointId - } - } - } - } - - if result.contents === infinity || result.contents === 0 { - None // No safe checkpoint found - } else { - Some(result.contents) - } -} diff --git a/apps/hypersync-indexer/generated/src/eventFetching/NetworkSources.res b/apps/hypersync-indexer/generated/src/eventFetching/NetworkSources.res deleted file mode 100644 index b402c2301..000000000 --- a/apps/hypersync-indexer/generated/src/eventFetching/NetworkSources.res +++ /dev/null @@ -1,95 +0,0 @@ -open Belt - -type rpc = { - url: string, - sourceFor: Source.sourceFor, - syncConfig?: Config.sourceSyncOptions, -} - -let getSyncConfig = ( - { - ?initialBlockInterval, - ?backoffMultiplicative, - ?accelerationAdditive, - ?intervalCeiling, - ?backoffMillis, - ?queryTimeoutMillis, - ?fallbackStallTimeout, - }: Config.sourceSyncOptions, -): Config.sourceSync => { - let queryTimeoutMillis = queryTimeoutMillis->Option.getWithDefault(20_000) - { - initialBlockInterval: Env.Configurable.SyncConfig.initialBlockInterval->Option.getWithDefault( - initialBlockInterval->Option.getWithDefault(10_000), - ), - // After an RPC error, how much to scale back the number of blocks requested at once - backoffMultiplicative: Env.Configurable.SyncConfig.backoffMultiplicative->Option.getWithDefault( - backoffMultiplicative->Option.getWithDefault(0.8), - ), - // Without RPC errors or timeouts, how much to increase the number of blocks requested by for the next batch - accelerationAdditive: Env.Configurable.SyncConfig.accelerationAdditive->Option.getWithDefault( - accelerationAdditive->Option.getWithDefault(500), - ), - // Do not further increase the block interval past this limit - intervalCeiling: Env.Configurable.SyncConfig.intervalCeiling->Option.getWithDefault( - intervalCeiling->Option.getWithDefault(10_000), - ), - // After an error, how long to wait before retrying - backoffMillis: backoffMillis->Option.getWithDefault(5000), - // How long to wait before cancelling an RPC request - queryTimeoutMillis, - fallbackStallTimeout: fallbackStallTimeout->Option.getWithDefault(queryTimeoutMillis / 2), - } -} - -let evm = ( - ~chain, - ~contracts: array, - ~hyperSync, - ~allEventSignatures, - ~shouldUseHypersyncClientDecoder, - ~rpcs: array, - ~lowercaseAddresses, -) => { - let eventRouter = - contracts - ->Belt.Array.flatMap(contract => contract.events) - ->EventRouter.fromEvmEventModsOrThrow(~chain) - - let sources = switch hyperSync { - | Some(endpointUrl) => [ - HyperSyncSource.make({ - chain, - contracts, - endpointUrl, - allEventSignatures, - eventRouter, - shouldUseHypersyncClientDecoder, - apiToken: Env.envioApiToken, - clientMaxRetries: Env.hyperSyncClientMaxRetries, - clientTimeoutMillis: Env.hyperSyncClientTimeoutMillis, - lowercaseAddresses, - serializationFormat: Env.hypersyncClientSerializationFormat, - enableQueryCaching: Env.hypersyncClientEnableQueryCaching, - }), - ] - | _ => [] - } - rpcs->Js.Array2.forEach(({?syncConfig, url, sourceFor}) => { - let _ = sources->Js.Array2.push( - RpcSource.make({ - chain, - sourceFor, - contracts, - syncConfig: getSyncConfig(syncConfig->Option.getWithDefault({})), - url, - eventRouter, - allEventSignatures, - shouldUseHypersyncClientDecoder, - lowercaseAddresses, - }), - ) - }) - - sources -} diff --git a/apps/hypersync-indexer/generated/src/globalState/GlobalState.res b/apps/hypersync-indexer/generated/src/globalState/GlobalState.res deleted file mode 100644 index 030aff31b..000000000 --- a/apps/hypersync-indexer/generated/src/globalState/GlobalState.res +++ /dev/null @@ -1,1188 +0,0 @@ -open Belt - -type chain = ChainMap.Chain.t -type rollbackState = - | NoRollback - | ReorgDetected({chain: chain, blockNumber: int}) - | FindingReorgDepth - | FoundReorgDepth({chain: chain, rollbackTargetBlockNumber: int}) - | RollbackReady({diffInMemoryStore: InMemoryStore.t, eventsProcessedDiffByChain: dict}) - -module WriteThrottlers = { - type t = { - chainMetaData: Throttler.t, - pruneStaleEntityHistory: Throttler.t, - } - let make = (): t => { - let chainMetaData = { - let intervalMillis = Env.ThrottleWrites.chainMetadataIntervalMillis - let logger = Logging.createChild( - ~params={ - "context": "Throttler for chain metadata writes", - "intervalMillis": intervalMillis, - }, - ) - Throttler.make(~intervalMillis, ~logger) - } - - let pruneStaleEntityHistory = { - let intervalMillis = Env.ThrottleWrites.pruneStaleDataIntervalMillis - let logger = Logging.createChild( - ~params={ - "context": "Throttler for pruning stale entity history data", - "intervalMillis": intervalMillis, - }, - ) - Throttler.make(~intervalMillis, ~logger) - } - {chainMetaData, pruneStaleEntityHistory} - } -} - -type t = { - indexer: Indexer.t, - chainManager: ChainManager.t, - processedBatches: int, - currentlyProcessingBatch: bool, - rollbackState: rollbackState, - indexerStartTime: Js.Date.t, - writeThrottlers: WriteThrottlers.t, - loadManager: LoadManager.t, - keepProcessAlive: bool, - //Initialized as 0, increments, when rollbacks occur to invalidate - //responses based on the wrong stateId - id: int, -} - -let make = ( - ~indexer: Indexer.t, - ~chainManager: ChainManager.t, - ~isDevelopmentMode=false, - ~shouldUseTui=false, -) => { - { - indexer, - currentlyProcessingBatch: false, - processedBatches: 0, - chainManager, - indexerStartTime: Js.Date.make(), - rollbackState: NoRollback, - writeThrottlers: WriteThrottlers.make(), - loadManager: LoadManager.make(), - keepProcessAlive: isDevelopmentMode || shouldUseTui, - id: 0, - } -} - -let getId = self => self.id -let incrementId = self => {...self, id: self.id + 1} -let setChainManager = (self, chainManager) => { - ...self, - chainManager, -} - -let isPreparingRollback = state => - switch state.rollbackState { - | NoRollback - | // We already updated fetch states here - // so we treat it as not rolling back - RollbackReady(_) => false - | FindingReorgDepth - | ReorgDetected(_) - | FoundReorgDepth(_) => true - } - -type partitionQueryResponse = { - chain: chain, - response: Source.blockRangeFetchResponse, - query: FetchState.query, -} - -type shouldExit = ExitWithSuccess | NoExit - -// Need to dispatch an action for every async operation -// to get access to the latest state. -type action = - // After a response is received, we validate it with the new state - // if there's no reorg to continue processing the response. - | ValidatePartitionQueryResponse(partitionQueryResponse) - // This should be a separate action from ValidatePartitionQueryResponse - // because when processing the response, there might be an async contract registration. - // So after it's finished we dispatch the submit action to get the latest fetch state. - | SubmitPartitionQueryResponse({ - newItems: array, - newItemsWithDcs: array, - currentBlockHeight: int, - latestFetchedBlock: FetchState.blockNumberAndTimestamp, - query: FetchState.query, - chain: chain, - }) - | FinishWaitingForNewBlock({chain: chain, currentBlockHeight: int}) - | EventBatchProcessed({batch: Batch.t}) - | StartProcessingBatch - | StartFindingReorgDepth - | FindReorgDepth({chain: chain, rollbackTargetBlockNumber: int}) - | EnterReorgThreshold - | UpdateQueues({ - progressedChainsById: dict, - // Needed to prevent overwriting the blockLag - // set by EnterReorgThreshold - shouldEnterReorgThreshold: bool, - }) - | SuccessExit - | ErrorExit(ErrorHandling.t) - | SetRollbackState({ - diffInMemoryStore: InMemoryStore.t, - rollbackedChainManager: ChainManager.t, - eventsProcessedDiffByChain: dict, - }) - -type queryChain = CheckAllChains | Chain(chain) -type task = - | NextQuery(queryChain) - | ProcessPartitionQueryResponse(partitionQueryResponse) - | ProcessEventBatch - | UpdateChainMetaDataAndCheckForExit(shouldExit) - | Rollback - | PruneStaleEntityHistory - -let updateChainFetcherCurrentBlockHeight = (chainFetcher: ChainFetcher.t, ~currentBlockHeight) => { - if currentBlockHeight > chainFetcher.currentBlockHeight { - Prometheus.setSourceChainHeight( - ~blockNumber=currentBlockHeight, - ~chainId=chainFetcher.chainConfig.id, - ) - - { - ...chainFetcher, - currentBlockHeight, - } - } else { - chainFetcher - } -} - -let updateChainMetadataTable = ( - cm: ChainManager.t, - ~persistence: Persistence.t, - ~throttler: Throttler.t, -) => { - let chainsData: dict = Js.Dict.empty() - - cm.chainFetchers - ->ChainMap.values - ->Belt.Array.forEach(cf => { - chainsData->Js.Dict.set( - cf.chainConfig.id->Belt.Int.toString, - { - blockHeight: cf.currentBlockHeight, - firstEventBlockNumber: cf.firstEventBlockNumber->Js.Null.fromOption, - isHyperSync: (cf.sourceManager->SourceManager.getActiveSource).poweredByHyperSync, - latestFetchedBlockNumber: cf.fetchState->FetchState.bufferBlockNumber, - timestampCaughtUpToHeadOrEndblock: cf.timestampCaughtUpToHeadOrEndblock->Js.Null.fromOption, - numBatchesFetched: cf.numBatchesFetched, - }, - ) - }) - - //Don't await this set, it can happen in its own time - throttler->Throttler.schedule(() => - persistence.sql - ->InternalTable.Chains.setMeta(~pgSchema=Db.publicSchema, ~chainsData) - ->Promise.ignoreValue - ) -} - -/** -Takes in a chain manager and sets all chains timestamp caught up to head -when valid state lines up and returns an updated chain manager -*/ -let updateProgressedChains = (chainManager: ChainManager.t, ~batch: Batch.t) => { - Prometheus.ProgressBatchCount.increment() - - let nextQueueItemIsNone = chainManager->ChainManager.nextItemIsNone - - let allChainsAtHead = chainManager->ChainManager.isProgressAtHead - //Update the timestampCaughtUpToHeadOrEndblock values - let chainFetchers = chainManager.chainFetchers->ChainMap.map(cf => { - let chain = ChainMap.Chain.makeUnsafe(~chainId=cf.chainConfig.id) - - let maybeChainAfterBatch = - batch.progressedChainsById->Utils.Dict.dangerouslyGetByIntNonOption( - chain->ChainMap.Chain.toChainId, - ) - - let cf = switch maybeChainAfterBatch { - | Some(chainAfterBatch) => { - if cf.committedProgressBlockNumber !== chainAfterBatch.progressBlockNumber { - Prometheus.ProgressBlockNumber.set( - ~blockNumber=chainAfterBatch.progressBlockNumber, - ~chainId=chain->ChainMap.Chain.toChainId, - ) - } - if cf.numEventsProcessed !== chainAfterBatch.totalEventsProcessed { - Prometheus.ProgressEventsCount.set( - ~processedCount=chainAfterBatch.totalEventsProcessed, - ~chainId=chain->ChainMap.Chain.toChainId, - ) - } - - // Calculate and set latency metrics - switch batch->Batch.findLastEventItem(~chainId=chain->ChainMap.Chain.toChainId) { - | Some(eventItem) => { - let blockTimestamp = eventItem.event.block->Types.Block.getTimestamp - let currentTimeMs = Js.Date.now()->Float.toInt - let blockTimestampMs = blockTimestamp * 1000 - let latencyMs = currentTimeMs - blockTimestampMs - - Prometheus.ProgressLatency.set(~latencyMs, ~chainId=chain->ChainMap.Chain.toChainId) - } - | None => () - } - - { - ...cf, - // Since we process per chain always in order, - // we need to calculate it once, by using the first item in a batch - firstEventBlockNumber: switch cf.firstEventBlockNumber { - | Some(_) => cf.firstEventBlockNumber - | None => batch->Batch.findFirstEventBlockNumber(~chainId=chain->ChainMap.Chain.toChainId) - }, - committedProgressBlockNumber: chainAfterBatch.progressBlockNumber, - numEventsProcessed: chainAfterBatch.totalEventsProcessed, - isProgressAtHead: cf.isProgressAtHead || chainAfterBatch.isProgressAtHeadWhenBatchCreated, - safeCheckpointTracking: switch cf.safeCheckpointTracking { - | Some(safeCheckpointTracking) => - Some( - safeCheckpointTracking->SafeCheckpointTracking.updateOnNewBatch( - ~sourceBlockNumber=cf.currentBlockHeight, - ~chainId=chain->ChainMap.Chain.toChainId, - ~batchCheckpointIds=batch.checkpointIds, - ~batchCheckpointBlockNumbers=batch.checkpointBlockNumbers, - ~batchCheckpointChainIds=batch.checkpointChainIds, - ), - ) - | None => None - }, - } - } - | None => cf - } - - /* strategy for TUI synced status: - * Firstly -> only update synced status after batch is processed (not on batch creation). But also set when a batch tries to be created and there is no batch - * - * Secondly -> reset timestampCaughtUpToHead and isFetching at head when dynamic contracts get registered to a chain if they are not within 0.001 percent of the current block height - * - * New conditions for valid synced: - * - * CASE 1 (chains are being synchronised at the head) - * - * All chain fetchers are fetching at the head AND - * No events that can be processed on the queue (even if events still exist on the individual queues) - * CASE 2 (chain finishes earlier than any other chain) - * - * CASE 3 endblock has been reached and latest processed block is greater than or equal to endblock (both fields must be Some) - * - * The given chain fetcher is fetching at the head or latest processed block >= endblock - * The given chain has processed all events on the queue - * see https://github.com/Float-Capital/indexer/pull/1388 */ - if cf->ChainFetcher.hasProcessedToEndblock { - // in the case this is already set, don't reset and instead propagate the existing value - let timestampCaughtUpToHeadOrEndblock = - cf.timestampCaughtUpToHeadOrEndblock->Option.isSome - ? cf.timestampCaughtUpToHeadOrEndblock - : Js.Date.make()->Some - { - ...cf, - timestampCaughtUpToHeadOrEndblock, - } - } else if cf.timestampCaughtUpToHeadOrEndblock->Option.isNone && cf.isProgressAtHead { - //Only calculate and set timestampCaughtUpToHeadOrEndblock if chain fetcher is at the head and - //its not already set - //CASE1 - //All chains are caught up to head chainManager queue returns None - //Meaning we are busy synchronizing chains at the head - if nextQueueItemIsNone && allChainsAtHead { - { - ...cf, - timestampCaughtUpToHeadOrEndblock: Js.Date.make()->Some, - } - } else { - //CASE2 -> Only calculate if case1 fails - //All events have been processed on the chain fetchers queue - //Other chains may be busy syncing - let hasNoMoreEventsToProcess = cf->ChainFetcher.hasNoMoreEventsToProcess - - if hasNoMoreEventsToProcess { - { - ...cf, - timestampCaughtUpToHeadOrEndblock: Js.Date.make()->Some, - } - } else { - //Default to just returning cf - cf - } - } - } else { - //Default to just returning cf - cf - } - }) - - let allChainsSyncedAtHead = - chainFetchers - ->ChainMap.values - ->Array.every(cf => cf.timestampCaughtUpToHeadOrEndblock->Option.isSome) - - if allChainsSyncedAtHead { - Prometheus.setAllChainsSyncedToHead() - } - - { - ...chainManager, - committedCheckpointId: switch batch.checkpointIds->Utils.Array.last { - | Some(checkpointId) => checkpointId - | None => chainManager.committedCheckpointId - }, - chainFetchers, - } -} - -let validatePartitionQueryResponse = ( - state, - {chain, response, query} as partitionQueryResponse: partitionQueryResponse, -) => { - let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(chain) - let { - parsedQueueItems, - latestFetchedBlockNumber, - stats, - currentBlockHeight, - reorgGuard, - fromBlockQueried, - } = response - - if currentBlockHeight > chainFetcher.currentBlockHeight { - Prometheus.SourceHeight.set( - ~blockNumber=currentBlockHeight, - ~chainId=chainFetcher.chainConfig.id, - // The currentBlockHeight from response won't necessarily - // belong to the currently active source. - // But for simplicity, assume it does. - ~sourceName=(chainFetcher.sourceManager->SourceManager.getActiveSource).name, - ) - } - - if Env.Benchmark.shouldSaveData { - Benchmark.addBlockRangeFetched( - ~totalTimeElapsed=stats.totalTimeElapsed, - ~parsingTimeElapsed=stats.parsingTimeElapsed->Belt.Option.getWithDefault(0), - ~pageFetchTime=stats.pageFetchTime->Belt.Option.getWithDefault(0), - ~chainId=chain->ChainMap.Chain.toChainId, - ~fromBlock=fromBlockQueried, - ~toBlock=latestFetchedBlockNumber, - ~numEvents=parsedQueueItems->Array.length, - ~numAddresses=query.addressesByContractName->FetchState.addressesByContractNameCount, - ~queryName=switch query { - | {target: Merge(_)} => `Merge Query` - | {selection: {dependsOnAddresses: false}} => `Wildcard Query` - | {selection: {dependsOnAddresses: true}} => `Normal Query` - }, - ) - } - - let (updatedReorgDetection, reorgResult: ReorgDetection.reorgResult) = - chainFetcher.reorgDetection->ReorgDetection.registerReorgGuard(~reorgGuard, ~currentBlockHeight) - - let updatedChainFetcher = { - ...chainFetcher, - reorgDetection: updatedReorgDetection, - } - - let nextState = { - ...state, - chainManager: { - ...state.chainManager, - chainFetchers: state.chainManager.chainFetchers->ChainMap.set(chain, updatedChainFetcher), - }, - } - - let rollbackWithReorgDetectedBlockNumber = switch reorgResult { - | ReorgDetected(reorgDetected) => { - chainFetcher.logger->Logging.childInfo( - reorgDetected->ReorgDetection.reorgDetectedToLogParams( - ~shouldRollbackOnReorg=state.indexer.config.shouldRollbackOnReorg, - ), - ) - Prometheus.ReorgCount.increment(~chain) - Prometheus.ReorgDetectionBlockNumber.set( - ~blockNumber=reorgDetected.scannedBlock.blockNumber, - ~chain, - ) - if state.indexer.config.shouldRollbackOnReorg { - Some(reorgDetected.scannedBlock.blockNumber) - } else { - None - } - } - | NoReorg => None - } - - switch rollbackWithReorgDetectedBlockNumber { - | None => (nextState, [ProcessPartitionQueryResponse(partitionQueryResponse)]) - | Some(reorgDetectedBlockNumber) => { - let chainManager = switch state.rollbackState { - | RollbackReady({eventsProcessedDiffByChain}) => { - ...state.chainManager, - // Restore event counters for ALL chains, not just the reorg chain. - // The previous rollback subtracted from all chains' counters, - // but was never committed to DB. So we must undo the subtraction - // for every chain before the new rollback subtracts again. - chainFetchers: state.chainManager.chainFetchers->ChainMap.mapWithKey((c, chainFetcher) => { - switch eventsProcessedDiffByChain->Utils.Dict.dangerouslyGetByIntNonOption( - c->ChainMap.Chain.toChainId, - ) { - | Some(eventsProcessedDiff) => { - ...chainFetcher, - // Since we detected a reorg, until rollback wasn't completed in the db - // We return the events processed counter to the pre-rollback value, - // to decrease it once more for the new rollback. - numEventsProcessed: chainFetcher.numEventsProcessed + eventsProcessedDiff, - } - | None => chainFetcher - } - }), - } - | _ => state.chainManager - } - ( - { - ...nextState->incrementId, - chainManager, - rollbackState: ReorgDetected({ - chain, - blockNumber: reorgDetectedBlockNumber, - }), - }, - [Rollback], - ) - } - } -} - -let submitPartitionQueryResponse = ( - state, - ~newItems, - ~newItemsWithDcs, - ~currentBlockHeight, - ~latestFetchedBlock, - ~query, - ~chain, -) => { - let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(chain) - - let updatedChainFetcher = - chainFetcher - ->ChainFetcher.handleQueryResult(~query, ~latestFetchedBlock, ~newItems, ~newItemsWithDcs) - ->Utils.unwrapResultExn - ->updateChainFetcherCurrentBlockHeight(~currentBlockHeight) - - let updatedChainFetcher = { - ...updatedChainFetcher, - numBatchesFetched: updatedChainFetcher.numBatchesFetched + 1, - } - - if !chainFetcher.isProgressAtHead && updatedChainFetcher.isProgressAtHead { - updatedChainFetcher.logger->Logging.childInfo("All events have been fetched") - } - - let nextState = { - ...state, - chainManager: { - ...state.chainManager, - chainFetchers: state.chainManager.chainFetchers->ChainMap.set(chain, updatedChainFetcher), - }, - } - - ( - nextState, - [UpdateChainMetaDataAndCheckForExit(NoExit), ProcessEventBatch, NextQuery(Chain(chain))], - ) -} - -let processPartitionQueryResponse = async ( - state, - {chain, response, query}: partitionQueryResponse, - ~dispatchAction, -) => { - let { - parsedQueueItems, - latestFetchedBlockNumber, - currentBlockHeight, - latestFetchedBlockTimestamp, - } = response - - let itemsWithContractRegister = [] - let newItems = [] - - for idx in 0 to parsedQueueItems->Array.length - 1 { - let item = parsedQueueItems->Array.getUnsafe(idx) - let eventItem = item->Internal.castUnsafeEventItem - if eventItem.eventConfig.contractRegister !== None { - itemsWithContractRegister->Array.push(item) - } - - // TODO: Don't really need to keep it in the queue - // when there's no handler (besides raw_events, processed counter, and dcsToStore consuming) - newItems->Array.push(item) - } - - let newItemsWithDcs = switch itemsWithContractRegister { - | [] as empty => empty - | _ => - await ChainFetcher.runContractRegistersOrThrow( - ~itemsWithContractRegister, - ~chain, - ~config=state.indexer.config, - ) - } - - dispatchAction( - SubmitPartitionQueryResponse({ - newItems, - newItemsWithDcs, - currentBlockHeight, - latestFetchedBlock: { - blockNumber: latestFetchedBlockNumber, - blockTimestamp: latestFetchedBlockTimestamp, - }, - chain, - query, - }), - ) -} - -let updateChainFetcher = (chainFetcherUpdate, ~state, ~chain) => { - ( - { - ...state, - chainManager: { - ...state.chainManager, - chainFetchers: state.chainManager.chainFetchers->ChainMap.update(chain, chainFetcherUpdate), - }, - }, - [], - ) -} - -let onEnterReorgThreshold = (~state: t) => { - Logging.info("Reorg threshold reached") - Prometheus.ReorgThreshold.set(~isInReorgThreshold=true) - - let chainFetchers = state.chainManager.chainFetchers->ChainMap.map(chainFetcher => { - { - ...chainFetcher, - fetchState: chainFetcher.fetchState->FetchState.updateInternal( - ~blockLag=Env.indexingBlockLag->Option.getWithDefault(0), - ), - } - }) - - { - ...state, - chainManager: { - ...state.chainManager, - chainFetchers, - isInReorgThreshold: true, - }, - } -} - -let actionReducer = (state: t, action: action) => { - switch action { - | FinishWaitingForNewBlock({chain, currentBlockHeight}) => { - let isBelowReorgThreshold = - !state.chainManager.isInReorgThreshold && state.indexer.config.shouldRollbackOnReorg - let shouldEnterReorgThreshold = - isBelowReorgThreshold && - state.chainManager.chainFetchers - ->ChainMap.values - ->Array.every(chainFetcher => { - chainFetcher.fetchState->FetchState.isReadyToEnterReorgThreshold(~currentBlockHeight) - }) - - let state = { - ...state, - chainManager: { - ...state.chainManager, - chainFetchers: state.chainManager.chainFetchers->ChainMap.update(chain, chainFetcher => { - chainFetcher->updateChainFetcherCurrentBlockHeight(~currentBlockHeight) - }), - }, - } - - if shouldEnterReorgThreshold { - (onEnterReorgThreshold(~state), [NextQuery(CheckAllChains)]) - } else { - (state, [NextQuery(Chain(chain))]) - } - } - | ValidatePartitionQueryResponse(partitionQueryResponse) => - state->validatePartitionQueryResponse(partitionQueryResponse) - | SubmitPartitionQueryResponse({ - newItems, - newItemsWithDcs, - currentBlockHeight, - latestFetchedBlock, - query, - chain, - }) => - state->submitPartitionQueryResponse( - ~newItems, - ~newItemsWithDcs, - ~currentBlockHeight, - ~latestFetchedBlock, - ~query, - ~chain, - ) - | EventBatchProcessed({batch}) => - let maybePruneEntityHistory = - state.indexer.config->Config.shouldPruneHistory( - ~isInReorgThreshold=state.chainManager.isInReorgThreshold, - ) - ? [PruneStaleEntityHistory] - : [] - - let state = { - ...state, - // Can safely reset rollback state, since overwrite is not possible. - // If rollback is pending, the EventBatchProcessed will be handled by the invalid action reducer instead. - rollbackState: NoRollback, - chainManager: state.chainManager->updateProgressedChains(~batch), - currentlyProcessingBatch: false, - processedBatches: state.processedBatches + 1, - } - - let shouldExit = EventProcessing.allChainsEventsProcessedToEndblock( - state.chainManager.chainFetchers, - ) - ? { - Logging.info("All chains are caught up to end blocks.") - - // Keep the indexer process running when in development mode (for Dev Console) - // or when TUI is enabled (for display) - if state.keepProcessAlive { - NoExit - } else { - ExitWithSuccess - } - } - : NoExit - - ( - state, - [UpdateChainMetaDataAndCheckForExit(shouldExit), ProcessEventBatch]->Array.concat( - maybePruneEntityHistory, - ), - ) - - | StartProcessingBatch => ({...state, currentlyProcessingBatch: true}, []) - | StartFindingReorgDepth => ({...state, rollbackState: FindingReorgDepth}, []) - | FindReorgDepth({chain, rollbackTargetBlockNumber}) => ( - { - ...state, - rollbackState: FoundReorgDepth({ - chain, - rollbackTargetBlockNumber, - }), - }, - [Rollback], - ) - | EnterReorgThreshold => (onEnterReorgThreshold(~state), [NextQuery(CheckAllChains)]) - | UpdateQueues({progressedChainsById, shouldEnterReorgThreshold}) => - let chainFetchers = state.chainManager.chainFetchers->ChainMap.mapWithKey((chain, cf) => { - let fs = switch progressedChainsById->Utils.Dict.dangerouslyGetByIntNonOption( - chain->ChainMap.Chain.toChainId, - ) { - | Some(chainAfterBatch) => chainAfterBatch.fetchState - | None => cf.fetchState - } - { - ...cf, - fetchState: shouldEnterReorgThreshold - ? fs->FetchState.updateInternal(~blockLag=Env.indexingBlockLag->Option.getWithDefault(0)) - : fs, - } - }) - - let chainManager = { - ...state.chainManager, - chainFetchers, - } - - ( - { - ...state, - chainManager, - }, - [NextQuery(CheckAllChains)], - ) - | SetRollbackState({diffInMemoryStore, rollbackedChainManager, eventsProcessedDiffByChain}) => ( - { - ...state, - rollbackState: RollbackReady({ - diffInMemoryStore, - eventsProcessedDiffByChain, - }), - chainManager: rollbackedChainManager, - }, - [NextQuery(CheckAllChains), ProcessEventBatch], - ) - | SuccessExit => { - Logging.info("Exiting with success") - NodeJs.process->NodeJs.exitWithCode(Success) - (state, []) - } - | ErrorExit(errHandler) => - errHandler->ErrorHandling.log - NodeJs.process->NodeJs.exitWithCode(Failure) - (state, []) - } -} - -let invalidatedActionReducer = (state: t, action: action) => - switch action { - | EventBatchProcessed({batch}) if state->isPreparingRollback => - Logging.info("Finished processing batch before rollback, actioning rollback") - ( - { - ...state, - chainManager: state.chainManager->updateProgressedChains(~batch), - currentlyProcessingBatch: false, - processedBatches: state.processedBatches + 1, - }, - [Rollback], - ) - | ErrorExit(_) => actionReducer(state, action) - | _ => - Logging.trace({ - "msg": "Invalidated action discarded", - "action": action->S.convertOrThrow(Utils.Schema.variantTag), - }) - (state, []) - } - -let checkAndFetchForChain = ( - //Used for dependency injection for tests - ~waitForNewBlock, - ~executeQuery, - //required args - ~state, - ~dispatchAction, -) => async chain => { - let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(chain) - if !isPreparingRollback(state) { - let {currentBlockHeight, fetchState} = chainFetcher - - await chainFetcher.sourceManager->SourceManager.fetchNext( - ~fetchState, - ~waitForNewBlock=(~knownHeight) => - chainFetcher.sourceManager->waitForNewBlock(~knownHeight), - ~onNewBlock=(~knownHeight) => - dispatchAction(FinishWaitingForNewBlock({chain, currentBlockHeight: knownHeight})), - ~currentBlockHeight, - ~executeQuery=async query => { - try { - let response = await chainFetcher.sourceManager->executeQuery(~query, ~currentBlockHeight) - dispatchAction(ValidatePartitionQueryResponse({chain, response, query})) - } catch { - | exn => dispatchAction(ErrorExit(exn->ErrorHandling.make)) - } - }, - ~stateId=state.id, - ) - } -} - -let injectedTaskReducer = ( - //Used for dependency injection for tests - ~waitForNewBlock, - ~executeQuery, - ~getLastKnownValidBlock, -) => async ( - //required args - state: t, - task: task, - ~dispatchAction, -) => { - switch task { - | ProcessPartitionQueryResponse(partitionQueryResponse) => - state->processPartitionQueryResponse(partitionQueryResponse, ~dispatchAction)->Promise.done - | PruneStaleEntityHistory => - let runPrune = async () => { - switch state.chainManager->ChainManager.getSafeCheckpointId { - | None => () - | Some(safeCheckpointId) => - await state.indexer.persistence.sql->InternalTable.Checkpoints.pruneStaleCheckpoints( - ~pgSchema=Env.Db.publicSchema, - ~safeCheckpointId, - ) - - for idx in 0 to Entities.allEntities->Array.length - 1 { - if idx !== 0 { - // Add some delay between entities - // To unblock the pg client if it's needed for something else - await Utils.delay(1000) - } - let entityConfig = Entities.allEntities->Array.getUnsafe(idx) - let timeRef = Hrtime.makeTimer() - try { - let () = - await state.indexer.persistence.sql->EntityHistory.pruneStaleEntityHistory( - ~entityName=entityConfig.name, - ~entityIndex=entityConfig.index, - ~pgSchema=Env.Db.publicSchema, - ~safeCheckpointId, - ) - } catch { - | exn => - exn->ErrorHandling.mkLogAndRaise( - ~msg=`Failed to prune stale entity history`, - ~logger=Logging.createChild( - ~params={ - "entityName": entityConfig.name, - "safeCheckpointId": safeCheckpointId, - }, - ), - ) - } - Prometheus.RollbackHistoryPrune.increment( - ~timeMillis=Hrtime.timeSince(timeRef)->Hrtime.toMillis, - ~entityName=entityConfig.name, - ) - } - } - } - state.writeThrottlers.pruneStaleEntityHistory->Throttler.schedule(runPrune) - - | UpdateChainMetaDataAndCheckForExit(shouldExit) => - let {chainManager, writeThrottlers} = state - switch shouldExit { - | ExitWithSuccess => - updateChainMetadataTable( - chainManager, - ~throttler=writeThrottlers.chainMetaData, - ~persistence=state.indexer.persistence, - ) - dispatchAction(SuccessExit) - | NoExit => - updateChainMetadataTable( - chainManager, - ~throttler=writeThrottlers.chainMetaData, - ~persistence=state.indexer.persistence, - )->ignore - } - | NextQuery(chainCheck) => - let fetchForChain = checkAndFetchForChain( - ~waitForNewBlock, - ~executeQuery, - ~state, - ~dispatchAction, - ) - - switch chainCheck { - | Chain(chain) => await chain->fetchForChain - | CheckAllChains => - //Mapping from the states chainManager so we can construct tests that don't use - //all chains - let _ = - await state.chainManager.chainFetchers - ->ChainMap.keys - ->Array.map(fetchForChain(_)) - ->Promise.all - } - | ProcessEventBatch => - if !state.currentlyProcessingBatch && !isPreparingRollback(state) { - let batch = - state.chainManager->ChainManager.createBatch( - ~batchSizeTarget=state.indexer.config.batchSize, - ) - - let progressedChainsById = batch.progressedChainsById - let totalBatchSize = batch.totalBatchSize - - let isInReorgThreshold = state.chainManager.isInReorgThreshold - let shouldSaveHistory = state.indexer.config->Config.shouldSaveHistory(~isInReorgThreshold) - - let isBelowReorgThreshold = - !state.chainManager.isInReorgThreshold && state.indexer.config.shouldRollbackOnReorg - let shouldEnterReorgThreshold = - isBelowReorgThreshold && - state.chainManager.chainFetchers - ->ChainMap.values - ->Array.every(chainFetcher => { - let fetchState = switch progressedChainsById->Utils.Dict.dangerouslyGetByIntNonOption( - chainFetcher.fetchState.chainId, - ) { - | Some(chainAfterBatch) => chainAfterBatch.fetchState - | None => chainFetcher.fetchState - } - fetchState->FetchState.isReadyToEnterReorgThreshold( - ~currentBlockHeight=chainFetcher.currentBlockHeight, - ) - }) - - if shouldEnterReorgThreshold { - dispatchAction(EnterReorgThreshold) - } - - if progressedChainsById->Utils.Dict.isEmpty { - () - } else { - if Env.Benchmark.shouldSaveData { - let group = "Other" - Benchmark.addSummaryData( - ~group, - ~label=`Batch Size`, - ~value=totalBatchSize->Belt.Int.toFloat, - ) - } - - dispatchAction(StartProcessingBatch) - dispatchAction(UpdateQueues({progressedChainsById, shouldEnterReorgThreshold})) - - //In the case of a rollback, use the provided in memory store - //With rolled back values - let rollbackInMemStore = switch state.rollbackState { - | RollbackReady({diffInMemoryStore}) => Some(diffInMemoryStore) - | _ => None - } - - let inMemoryStore = rollbackInMemStore->Option.getWithDefault(InMemoryStore.make(~entities=Entities.allEntities)) - - inMemoryStore->InMemoryStore.setBatchDcs(~batch, ~shouldSaveHistory) - - switch await EventProcessing.processEventBatch( - ~batch, - ~inMemoryStore, - ~isInReorgThreshold, - ~loadManager=state.loadManager, - ~indexer=state.indexer, - ~chainFetchers=state.chainManager.chainFetchers, - ) { - | exception exn => - //All casese should be handled/caught before this with better user messaging. - //This is just a safety in case something unexpected happens - let errHandler = - exn->ErrorHandling.make(~msg="A top level unexpected error occurred during processing") - dispatchAction(ErrorExit(errHandler)) - | res => - switch res { - | Ok() => dispatchAction(EventBatchProcessed({batch: batch})) - | Error(errHandler) => dispatchAction(ErrorExit(errHandler)) - } - } - } - } - | Rollback => - //If it isn't processing a batch currently continue with rollback otherwise wait for current batch to finish processing - switch state { - | {rollbackState: NoRollback | RollbackReady(_)} => - Js.Exn.raiseError("Internal error: Rollback initiated with invalid state") - | {rollbackState: ReorgDetected({chain, blockNumber: reorgBlockNumber})} => { - let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(chain) - - dispatchAction(StartFindingReorgDepth) - let rollbackTargetBlockNumber = - await chainFetcher->getLastKnownValidBlock(~reorgBlockNumber) - - dispatchAction(FindReorgDepth({chain, rollbackTargetBlockNumber})) - } - // We can come to this case when event batch finished processing - // while we are still finding the reorg depth - // Do nothing here, just wait for reorg depth to be found - | {rollbackState: FindingReorgDepth} => () - | {rollbackState: FoundReorgDepth(_), currentlyProcessingBatch: true} => - Logging.info("Waiting for batch to finish processing before executing rollback") - | {rollbackState: FoundReorgDepth({chain: reorgChain, rollbackTargetBlockNumber})} => - let startTime = Hrtime.makeTimer() - - let chainFetcher = state.chainManager.chainFetchers->ChainMap.get(reorgChain) - - let logger = Logging.createChildFrom( - ~logger=chainFetcher.logger, - ~params={ - "action": "Rollback", - "reorgChain": reorgChain, - "targetBlockNumber": rollbackTargetBlockNumber, - }, - ) - logger->Logging.childInfo("Started rollback on reorg") - Prometheus.RollbackTargetBlockNumber.set( - ~blockNumber=rollbackTargetBlockNumber, - ~chain=reorgChain, - ) - - let reorgChainId = reorgChain->ChainMap.Chain.toChainId - - let rollbackTargetCheckpointId = { - switch await state.indexer.persistence.sql->InternalTable.Checkpoints.getRollbackTargetCheckpoint( - ~pgSchema=Env.Db.publicSchema, - ~reorgChainId, - ~lastKnownValidBlockNumber=rollbackTargetBlockNumber, - ) { - | [checkpoint] => checkpoint["id"] - | _ => 0 - } - } - - let eventsProcessedDiffByChain = Js.Dict.empty() - let newProgressBlockNumberPerChain = Js.Dict.empty() - let rollbackedProcessedEvents = ref(0) - - { - let rollbackProgressDiff = - await state.indexer.persistence.sql->InternalTable.Checkpoints.getRollbackProgressDiff( - ~pgSchema=Env.Db.publicSchema, - ~rollbackTargetCheckpointId, - ) - for idx in 0 to rollbackProgressDiff->Js.Array2.length - 1 { - let diff = rollbackProgressDiff->Js.Array2.unsafe_get(idx) - eventsProcessedDiffByChain->Utils.Dict.setByInt( - diff["chain_id"], - switch diff["events_processed_diff"]->Int.fromString { - | Some(eventsProcessedDiff) => { - rollbackedProcessedEvents := - rollbackedProcessedEvents.contents + eventsProcessedDiff - eventsProcessedDiff - } - | None => - Js.Exn.raiseError( - `Unexpedted case: Invalid events processed diff ${diff["events_processed_diff"]}`, - ) - }, - ) - newProgressBlockNumberPerChain->Utils.Dict.setByInt( - diff["chain_id"], - if rollbackTargetCheckpointId === 0 && diff["chain_id"] === reorgChainId { - Pervasives.min(diff["new_progress_block_number"], rollbackTargetBlockNumber) - } else { - diff["new_progress_block_number"] - }, - ) - } - } - - let chainFetchers = state.chainManager.chainFetchers->ChainMap.mapWithKey((chain, cf) => { - switch newProgressBlockNumberPerChain->Utils.Dict.dangerouslyGetByIntNonOption( - chain->ChainMap.Chain.toChainId, - ) { - | Some(newProgressBlockNumber) => - let fetchState = - cf.fetchState->FetchState.rollback(~targetBlockNumber=newProgressBlockNumber) - let newTotalEventsProcessed = - cf.numEventsProcessed - - eventsProcessedDiffByChain - ->Utils.Dict.dangerouslyGetByIntNonOption(chain->ChainMap.Chain.toChainId) - ->Option.getUnsafe - - if cf.committedProgressBlockNumber !== newProgressBlockNumber { - Prometheus.ProgressBlockNumber.set( - ~blockNumber=newProgressBlockNumber, - ~chainId=chain->ChainMap.Chain.toChainId, - ) - } - if cf.numEventsProcessed !== newTotalEventsProcessed { - Prometheus.ProgressEventsCount.set( - ~processedCount=newTotalEventsProcessed, - ~chainId=chain->ChainMap.Chain.toChainId, - ) - } - - { - ...cf, - reorgDetection: chain == reorgChain - ? cf.reorgDetection->ReorgDetection.rollbackToValidBlockNumber( - ~blockNumber=rollbackTargetBlockNumber, - ) - : cf.reorgDetection, - safeCheckpointTracking: switch cf.safeCheckpointTracking { - | Some(safeCheckpointTracking) => - Some( - safeCheckpointTracking->SafeCheckpointTracking.rollback( - ~targetBlockNumber=newProgressBlockNumber, - ), - ) - | None => None - }, - fetchState, - committedProgressBlockNumber: newProgressBlockNumber, - numEventsProcessed: newTotalEventsProcessed, - } - - | None => - // Even without a progress diff entry, the reorg chain must have its - // reorgDetection and fetchState rolled back. Otherwise the stale block hash - // stays in dataByBlockNumber and the same reorg is re-detected on the next - // fetch, causing an infinite reorg→rollback loop. - if chain == reorgChain { - { - ...cf, - reorgDetection: cf.reorgDetection->ReorgDetection.rollbackToValidBlockNumber( - ~blockNumber=rollbackTargetBlockNumber, - ), - fetchState: cf.fetchState->FetchState.rollback( - ~targetBlockNumber=rollbackTargetBlockNumber, - ), - } - } else { - cf - } - } - }) - - // Construct in Memory store with rollback diff - let diff = await IO.prepareRollbackDiff( - ~rollbackTargetCheckpointId, - ~persistence=state.indexer.persistence, - ) - - let chainManager = { - ...state.chainManager, - committedCheckpointId: rollbackTargetCheckpointId, - chainFetchers, - } - - logger->Logging.childTrace({ - "msg": "Finished rollback on reorg", - "entityChanges": { - "deleted": diff["deletedEntities"], - "upserted": diff["setEntities"], - }, - "rollbackedEvents": rollbackedProcessedEvents.contents, - "beforeCheckpointId": state.chainManager.committedCheckpointId, - "targetCheckpointId": rollbackTargetCheckpointId, - }) - Prometheus.RollbackSuccess.increment( - ~timeMillis=Hrtime.timeSince(startTime)->Hrtime.toMillis, - ~rollbackedProcessedEvents=rollbackedProcessedEvents.contents, - ) - - dispatchAction( - SetRollbackState({ - diffInMemoryStore: diff["inMemStore"], - rollbackedChainManager: chainManager, - eventsProcessedDiffByChain, - }), - ) - } - } -} - -let taskReducer = injectedTaskReducer( - ~waitForNewBlock=SourceManager.waitForNewBlock, - ~executeQuery=SourceManager.executeQuery, - ~getLastKnownValidBlock=(chainFetcher, ~reorgBlockNumber) => - chainFetcher->ChainFetcher.getLastKnownValidBlock(~reorgBlockNumber), -) diff --git a/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.res b/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.res deleted file mode 100644 index 11fcc7880..000000000 --- a/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.res +++ /dev/null @@ -1,76 +0,0 @@ -open Belt -module type State = { - type t - type action - type task - - let taskReducer: (t, task, ~dispatchAction: action => unit) => promise - let actionReducer: (t, action) => (t, array) - let invalidatedActionReducer: (t, action) => (t, array) - let getId: t => int -} - -module MakeManager = (S: State) => { - type t = {mutable state: S.t, stateUpdatedHook: option unit>, onError: exn => unit} - - let make = ( - state: S.t, - ~stateUpdatedHook: option unit>=?, - ~onError=e => { - e->ErrorHandling.make(~msg="Indexer has failed with an unexpected error")->ErrorHandling.log - NodeJs.process->NodeJs.exitWithCode(Failure) - }, - ) => { - state, - stateUpdatedHook, - onError, - } - - let rec dispatchAction = (~stateId=0, self: t, action: S.action) => { - try { - let reducer = if stateId == self.state->S.getId { - S.actionReducer - } else { - S.invalidatedActionReducer - } - let (nextState, nextTasks) = reducer(self.state, action) - switch self.stateUpdatedHook { - // In ReScript `!==` is shallow equality check rather than `!=` - // This is just a check to see if a new object reference was returned - | Some(hook) if self.state !== nextState => hook(nextState) - | _ => () - } - self.state = nextState - nextTasks->Array.forEach(task => dispatchTask(self, task)) - } catch { - | e => e->self.onError - } - } - and dispatchTask = (self, task: S.task) => { - let stateId = self.state->S.getId - Js.Global.setTimeout(() => { - if stateId !== self.state->S.getId { - Logging.info("Invalidated task discarded") - } else { - try { - S.taskReducer(self.state, task, ~dispatchAction=action => - dispatchAction(~stateId, self, action) - ) - ->Promise.catch(e => { - e->self.onError - Promise.resolve() - }) - ->ignore - } catch { - | e => e->self.onError - } - } - }, 0)->ignore - } - - let getState = self => self.state - let setState = (self: t, state: S.t) => self.state = state -} - -module Manager = MakeManager(GlobalState) -include Manager diff --git a/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.resi b/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.resi deleted file mode 100644 index d9ff6d8c9..000000000 --- a/apps/hypersync-indexer/generated/src/globalState/GlobalStateManager.resi +++ /dev/null @@ -1,7 +0,0 @@ -type t - -let make: (GlobalState.t, ~stateUpdatedHook: GlobalState.t => unit=?, ~onError: exn => unit=?) => t -let dispatchAction: (~stateId: int=?, t, GlobalState.action) => unit -let dispatchTask: (t, GlobalState.task) => unit -let getState: t => GlobalState.t -let setState: (t, GlobalState.t) => unit diff --git a/apps/hypersync-indexer/generated/src/ink/EnvioInkApp.res b/apps/hypersync-indexer/generated/src/ink/EnvioInkApp.res deleted file mode 100644 index 926dd288d..000000000 --- a/apps/hypersync-indexer/generated/src/ink/EnvioInkApp.res +++ /dev/null @@ -1,67 +0,0 @@ -open Ink -open Belt - -type chainData = ChainData.chainData -type appState = { - chains: array, - indexerStartTime: Js.Date.t, - config: Config.t, -} - -let getTotalNumEventsProcessed = (~chains: array) => { - chains->Array.reduce(0, (acc, chain) => { - acc + chain.progress->ChainData.getNumberOfEventsProccessed - }) -} - -module TotalEventsProcessed = { - @react.component - let make = (~totalEventsProcessed) => { - let label = "Total Events Processed: " - - {label->React.string} - - {`${totalEventsProcessed->ChainData.formatLocaleString}`->React.string} - - - } -} - -module App = { - @react.component - let make = (~appState: appState) => { - let {chains, indexerStartTime, config} = appState - let totalEventsProcessed = getTotalNumEventsProcessed(~chains) - - - {chains - ->Array.mapWithIndex((i, chainData) => { - Int.toString} chainData /> - }) - ->React.array} - - - - - - {"Development Console: "->React.string} - - {`${Env.envioAppUrl}/console`->React.string} - - - - {"GraphQL Endpoint: "->React.string} - - {`${Env.Hasura.url}/v1/graphql`->React.string} - - - - } -} - -let startApp = appState => { - let {rerender} = render() - appState => { - rerender() - } -} diff --git a/apps/hypersync-indexer/generated/src/ink/bindings/DateFns.res b/apps/hypersync-indexer/generated/src/ink/bindings/DateFns.res deleted file mode 100644 index a628d1447..000000000 --- a/apps/hypersync-indexer/generated/src/ink/bindings/DateFns.res +++ /dev/null @@ -1,71 +0,0 @@ -/** -Formats: -hh:mm:ss | 00:00:00 -do MMM ''yy | 1st Jan '21 -ha do MMM ''yy | 8PM 1st Jan '21 -ha | 8PM -iii | Tues -iii MMM | Tues Jan -MMM | Jan -`) -*/ -type dateFormats = - | @as("HH:mm:ss") HoursMinSec - | @as("ha") Hour - | @as("do MMM ''yy") DayMonthYear - | @as("ha do MMM ''yy") HourDayMonthYear - | @as("h:mma do MMM ''yy") HourMinDayMonthYear - | @as("iii") DayName - | @as("iii MMM") DayNameMonth - | @as("do MMM") DayMonth - | @as("MMM") Month - | @as("h:mma") HourMin - -@module("date-fns") external format: (Js.Date.t, dateFormats) => string = "format" - -type formatDistanceToNowOptions = {includeSeconds: bool} -@module("date-fns") -external formatDistanceToNow: Js.Date.t => string = "formatDistanceToNow" - -@module("date-fns") -external formatDistance: (Js.Date.t, Js.Date.t) => string = "formatDistance" - -@module("date-fns") -external formatDistanceWithOptions: (Js.Date.t, Js.Date.t, formatDistanceToNowOptions) => string = - "formatDistance" - -@module("date-fns") -external formatDistanceToNowWithOptions: (Js.Date.t, formatDistanceToNowOptions) => string = - "formatDistanceToNow" - -let formatDistanceToNowWithSeconds = (date: Js.Date.t) => - date->formatDistanceToNowWithOptions({includeSeconds: true}) - -type durationTimeFormat = { - years: int, - months: int, - weeks: int, - days: int, - hours: int, - minutes: int, - seconds: int, -} - -@module("date-fns") -external formatRelative: (Js.Date.t, Js.Date.t) => string = "formatRelative" - -type durationFormatOutput = {format: array} - -@module("date-fns") -external formatDuration: (durationTimeFormat, durationFormatOutput) => string = "formatDuration" - -type interval = {start: Js_date.t, end: Js_date.t} - -@module("date-fns") -external intervalToDuration: interval => durationTimeFormat = "intervalToDuration" - -//helper to convert millis elapsed to duration object -let durationFromMillis = (millis: int) => - intervalToDuration({start: 0->Utils.magic, end: millis->Utils.magic}) - -@module("date-fns") external fromUnixTime: float => Js.Date.t = "fromUnixTime" diff --git a/apps/hypersync-indexer/generated/src/ink/bindings/Ink.res b/apps/hypersync-indexer/generated/src/ink/bindings/Ink.res deleted file mode 100644 index 6dad99675..000000000 --- a/apps/hypersync-indexer/generated/src/ink/bindings/Ink.res +++ /dev/null @@ -1,355 +0,0 @@ -open Style - -type instance = { - rerender: React.element => unit, - unmount: unit => unit, - waitUntilExit: unit => promise, - clear: unit => unit, -} -type readableStream -type writableStream -type options = { - stdout?: writableStream, - stdin?: readableStream, - exitOnCtrlC?: bool, - patchConsole?: bool, - debug?: bool, -} -@module("ink") -external renderInternal: (React.element, ~options: option) => instance = "render" - -let render = (~options=?, element) => { - renderInternal(element, ~options) -} -type measurement = {width: int, height: int} - -@module("ink") -external measureElement: React.ref<'a> => measurement = "measureElement" - -module Text = { - type wrapOptions = - | @as("wrap") Wrap - | @as("truncate") Truncate - | @as("truncate-start") TruncateStart - | @as("truncate-middle") TruncateMiddle - | @as("truncate-end") TruncateEnd - @module("ink") @react.component - external make: ( - ~children: React.element, - ~color: chalkTheme=?, - ~backgroundColor: chalkTheme=?, - ~dimColor: bool=?, - ~bold: bool=?, - ~italic: bool=?, - ~underline: bool=?, - ~strikethrough: bool=?, - ~inverse: bool=?, - ~wrap: wrapOptions=?, - ) => React.element = "Text" -} - -module Box = { - @module("ink") @react.component - external make: ( - ~children: React.element=?, - ~width: numOrStr=?, - ~height: numOrStr=?, - ~minWidth: int=?, - ~minHeight: int=?, - ~padding: int=?, - ~paddingTop: int=?, - ~paddingBottom: int=?, - ~paddingLeft: int=?, - ~paddingRight: int=?, - ~paddingX: int=?, - ~paddingY: int=?, - ~margin: int=?, - ~marginTop: int=?, - ~marginBottom: int=?, - ~marginLeft: int=?, - ~marginRight: int=?, - ~marginX: int=?, - ~marginY: int=?, - ~gap: int=?, - ~rowGap: int=?, - ~flexGrow: int=?, - ~flexShrink: int=?, - ~flexBasis: numOrStr=?, - ~flexDirection: flexDirection=?, - ~flexWrap: flexDirection=?, - ~alignItems: alignItems=?, - ~alignSelf: alignSelf=?, - ~justifyContent: justifyContent=?, - ~display: display=?, - ~overflow: overflow=?, - ~overflowX: overflow=?, - ~overflowY: overflow=?, - ~borderStyle: borderStyle=?, - ~borderColor: chalkTheme=?, - ~borderTopColor: chalkTheme=?, - ~borderRightColor: chalkTheme=?, - ~borderBottomColor: chalkTheme=?, - ~borderLeftColor: chalkTheme=?, - ~borderDimColor: bool=?, - ~borderTopDimColor: bool=?, - ~borderRightDimColor: bool=?, - ~borderBottomDimColor: bool=?, - ~borderLeftDimColor: bool=?, - ~borderTop: bool=?, - ~borderRight: bool=?, - ~borderBottom: bool=?, - ~borderLeft: bool=?, - ) => React.element = "Box" -} - -module Newline = { - /** - Adds one or more newline characters. Must be used within components. - - */ - @module("ink") - @react.component - external make: (~count: int=?) => React.element = "Newline" -} - -module Spacer = { - /** - A flexible space that expands along the major axis of its containing layout. It's useful as a shortcut for filling all the available spaces between elements. - - For example, using in a with default flex direction (row) will position "Left" on the left side and will push "Right" to the right side. - */ - @module("ink") - @react.component - external make: unit => React.element = "Spacer" -} - -module Static = { - /** - component permanently renders its output above everything else. It's useful for displaying activity like completed tasks or logs - things that are not changing after they're rendered (hence the name "Static"). - - It's preferred to use for use cases like these, when you can't know or control the amount of items that need to be rendered. - */ - @module("ink") - @react.component - external make: ( - ~children: ('a, int) => React.element, - ~items: array<'a>, - ~style: styles=?, - ) => React.element = "Static" -} - -module Transform = { - /** - Transform a string representation of React components before they are written to output. For example, you might want to apply a gradient to text, add a clickable link or create some text effects. These use cases can't accept React nodes as input, they are expecting a string. That's what component does, it gives you an output string of its child components and lets you transform it in any way. - - Note: must be applied only to children components and shouldn't change the dimensions of the output, otherwise layout will be incorrect. - */ - type outputLine = string - type index = int - @module("ink") @react.component - external make: ( - ~children: string, - ~tranform: (outputLine, index) => string, - ~index: int=?, - ) => React.element = "Transform" -} - -module Hooks = { - type key = { - leftArrow: bool, - rightArrow: bool, - upArrow: bool, - downArrow: bool, - return: bool, - escape: bool, - ctrl: bool, - shift: bool, - tab: bool, - backspace: bool, - delete: bool, - pageDown: bool, - pageUp: bool, - meta: bool, - enter: bool, - } - type input = string - type inputHandler = (input, key) => unit - type options = {isActive?: bool} - - @module("ink") external useInput: (inputHandler, ~options: options=?) => unit = "useInput" - - type app = {exit: (~err: exn=?) => unit} - @module("ink") external useApp: unit => app = "useApp" - - type stdin = { - stdin: readableStream, - isRawModeSupported: bool, - setRawMode: bool => unit, - } - - @module("ink") external useStdin: unit => stdin = "useStdin" - - type stdout = { - stdout: writableStream, - write: string => unit, - } - - @module("ink") external useStdout: unit => stdout = "useStdout" - - type stderr = { - stderr: writableStream, - write: string => unit, - } - - @module("ink") external useStderr: unit => stderr = "useStderr" - - type focusOptions = {autoFocus?: bool, isActive?: bool, id?: string} - type focus = {isFocused: bool} - @module("ink") external useFocus: (~options: focusOptions=?) => focus = "useFocus" - - type focusManager = { - enableFocus: unit => unit, - disableFocus: unit => unit, - focusNext: unit => unit, - focusPrevious: unit => unit, - focusId: string => unit, - } - @module("ink") - external useFocusManager: unit => focusManager = "useFocusManager" -} - -module BigText = { - type font = - | @as("block") Block - | @as("slick") Slick - | @as("tiny") Tiny - | @as("grid") Grid - | @as("pallet") Pallet - | @as("shade") Shade - | @as("simple") Simple - | @as("simpleBlock") SimpleBlock - | @as("3d") D3 - | @as("simple3d") Simple3D - | @as("chrome") Chrome - | @as("huge") Huge - type align = - | @as("left") Left - | @as("center") Center - | @as("right") Right - type backgroundColor = - | @as("transparent") Transparent - | @as("black") Black - | @as("red") Red - | @as("green") Green - | @as("yellow") Yellow - | @as("blue") Blue - | @as("magenta") Magenta - | @as("cyan") Cyan - | @as("white") White - - type color = | ...chalkTheme | @as("system") System - @module @react.component - external make: ( - ~text: string, - ~font: font=?, //default block - ~align: align=?, //default left - ~colors: array=?, //default [system] - ~backgroundColor: backgroundColor=?, //default transparent - ~letterSpacing: int=?, //default 1 - ~lineHeight: int=?, //default 1 - ~space: bool=?, //default true - ~maxLength: int=?, - ) => React.element = "ink-big-text" -} - -module Spinner = { - type typeOption = - | @as("dots") Dots - | @as("dots2") Dots2 - | @as("dots3") Dots3 - | @as("dots4") Dots4 - | @as("dots5") Dots5 - | @as("dots6") Dots6 - | @as("dots7") Dots7 - | @as("dots8") Dots8 - | @as("dots9") Dots9 - | @as("dots10") Dots10 - | @as("dots11") Dots11 - | @as("dots12") Dots12 - | @as("dots13") Dots13 - | @as("dots8Bit") Dots8Bit - | @as("sand") Sand - | @as("line") Line - | @as("line2") Line2 - | @as("pipe") Pipe - | @as("simpleDots") SimpleDots - | @as("simpleDotsScrolling") SimpleDotsScrolling - | @as("star") Star - | @as("star2") Star2 - | @as("flip") Flip - | @as("hamburger") Hamburger - | @as("growVertical") GrowVertical - | @as("growHorizontal") GrowHorizontal - | @as("balloon") Balloon - | @as("balloon2") Balloon2 - | @as("noise") Noise - | @as("bounce") Bounce - | @as("boxBounce") BoxBounce - | @as("boxBounce2") BoxBounce2 - | @as("triangle") Triangle - | @as("binary") Binary - | @as("arc") Arc - | @as("circle") Circle - | @as("squareCorners") SquareCorners - | @as("circleQuarters") CircleQuarters - | @as("circleHalves") CircleHalves - | @as("squish") Squish - | @as("toggle") Toggle - | @as("toggle2") Toggle2 - | @as("toggle3") Toggle3 - | @as("toggle4") Toggle4 - | @as("toggle5") Toggle5 - | @as("toggle6") Toggle6 - | @as("toggle7") Toggle7 - | @as("toggle8") Toggle8 - | @as("toggle9") Toggle9 - | @as("toggle10") Toggle10 - | @as("toggle11") Toggle11 - | @as("toggle12") Toggle12 - | @as("toggle13") Toggle13 - | @as("arrow") Arrow - | @as("arrow2") Arrow2 - | @as("arrow3") Arrow3 - | @as("bouncingBar") BouncingBar - | @as("bouncingBall") BouncingBall - | @as("smiley") Smiley - | @as("monkey") Monkey - | @as("hearts") Hearts - | @as("clock") Clock - | @as("earth") Earth - | @as("material") Material - | @as("moon") Moon - | @as("runner") Runner - | @as("pong") Pong - | @as("shark") Shark - | @as("dqpb") Dqpb - | @as("weather") Weather - | @as("christmas") Christmas - | @as("grenade") Grenade - | @as("point") Point - | @as("layer") Layer - | @as("betaWave") BetaWave - | @as("fingerDance") FingerDance - | @as("fistBump") FistBump - | @as("soccerHeader") SoccerHeader - | @as("mindblown") Mindblown - | @as("speaker") Speaker - | @as("orangePulse") OrangePulse - | @as("bluePulse") BluePulse - | @as("orangeBluePulse") OrangeBluePulse - | @as("timeTravel") TimeTravel - | @as("aesthetic") Aesthetic - | @as("dwarfFortress") DwarfFortress - @module("ink-spinner") @react.component - external make: (@as("type") ~type_: typeOption=?) => React.element = "default" -} diff --git a/apps/hypersync-indexer/generated/src/ink/bindings/Style.res b/apps/hypersync-indexer/generated/src/ink/bindings/Style.res deleted file mode 100644 index 8b3fc30f3..000000000 --- a/apps/hypersync-indexer/generated/src/ink/bindings/Style.res +++ /dev/null @@ -1,123 +0,0 @@ -type chalkTheme = - | @as("#9860E5") Primary - | @as("#FFBB2F") Secondary - | @as("#6CBFEE") Info - | @as("#FF8269") Danger - | @as("#3B8C3D") Success - | @as("white") White - | @as("gray") Gray - -@unboxed type numOrStr = Num(int) | Str(string) - -type textWrap = - | @as("wrap") Wrap - | @as("end") End - | @as("middle") Middle - | @as("truncate-end") TruncateEnd - | @as("truncate") Truncate - | @as("truncate-middle") TruncateMiddle - | @as("truncate-start") TruncateStart - -type position = - | @as("absolute") Absolute - | @as("relative") Relative - -type flexDirection = - | @as("row") Row - | @as("column") Column - | @as("row-reverse") RowReverse - | @as("column-reverse") ColumnReverse - -type flexWrap = - | @as("nowrap") NoWrap - | @as("wrap") Wrap - | @as("wrap-reverse") WrapReverse - -type alignItems = - | @as("flex-start") FlexStart - | @as("center") Center - | @as("flex-end") FlexEnd - | @as("stretch") Stretch - -type alignSelf = - | @as("flex-start") FlexStartSelf - | @as("center") CenterSelf - | @as("flex-end") FlexEndSelf - | @as("auto") Auto - -type justifyContent = - | @as("flex-start") JustifyFlexStart - | @as("flex-end") JustifyFlexEnd - | @as("space-between") SpaceBetween - | @as("space-around") SpaceAround - | @as("center") JustifyCenter - -type display = - | @as("flex") Flex - | @as("none") None - -type overflow = - | @as("visible") Visible - | @as("hidden") Hidden - -type borderStyle = - | @as("single") Single - | @as("double") Double - | @as("round") Round - | @as("bold") Bold - | @as("singleDouble") SingleDouble - | @as("doubleSingle") DoubleSingle - | @as("classic") Classic - -type styles = { - textWrap?: textWrap, - position?: position, - columnGap?: int, - rowGap?: int, - gap?: int, - margin?: int, - marginX?: int, - marginY?: int, - marginTop?: int, - marginBottom?: int, - marginLeft?: int, - marginRight?: int, - padding?: int, - paddingX?: int, - paddingY?: int, - paddingTop?: int, - paddingBottom?: int, - paddingLeft?: int, - paddingRight?: int, - flexGrow?: int, - flexShrink?: int, - flexDirection?: flexDirection, - flexBasis?: numOrStr, - flexWrap?: flexWrap, - alignItems?: alignItems, - alignSelf?: alignSelf, - justifyContent?: justifyContent, - width?: numOrStr, - height?: numOrStr, - minWidth?: numOrStr, - minHeight?: numOrStr, - display?: display, - borderStyle?: borderStyle, - borderTop?: bool, - borderBottom?: bool, - borderLeft?: bool, - borderRight?: bool, - borderColor?: chalkTheme, - borderTopColor?: chalkTheme, - borderBottomColor?: chalkTheme, - borderLeftColor?: chalkTheme, - borderRightColor?: chalkTheme, - borderDimColor?: bool, - borderTopDimColor?: bool, - borderBottomDimColor?: bool, - borderLeftDimColor?: bool, - borderRightDimColor?: bool, - overflow?: overflow, - overflowX?: overflow, - overflowY?: overflow, -} diff --git a/apps/hypersync-indexer/generated/src/ink/components/BufferedProgressBar.res b/apps/hypersync-indexer/generated/src/ink/components/BufferedProgressBar.res deleted file mode 100644 index 72aed5314..000000000 --- a/apps/hypersync-indexer/generated/src/ink/components/BufferedProgressBar.res +++ /dev/null @@ -1,40 +0,0 @@ -open Ink -open Belt -@react.component -let make = (~loaded, ~buffered=?, ~outOf, ~barWidth=36, ~loadingColor=Style.Secondary) => { - let maxCount = barWidth - - let loadedFraction = loaded->Int.toFloat /. outOf->Int.toFloat - let loadedCount = Pervasives.min( - Js.Math.floor_float(maxCount->Js.Int.toFloat *. loadedFraction)->Belt.Float.toInt, - maxCount, - ) - - let bufferedCount = buffered->Option.mapWithDefault(loadedCount, buffered => { - let bufferedFraction = buffered->Int.toFloat /. outOf->Int.toFloat - Pervasives.min( - Js.Math.floor_float(maxCount->Js.Int.toFloat *. bufferedFraction)->Belt.Float.toInt, - maxCount, - ) - }) - let loadedFraction = loadedFraction > 0.0 ? loadedFraction : 0.0 - let loadedPercentageStr = (loadedFraction *. 100.)->Int.fromFloat->Int.toString ++ "% " - - let loadedPercentageStrCount = loadedPercentageStr->String.length - let loadedSpaces = Pervasives.max(loadedCount - loadedPercentageStrCount, 0) - let loadedCount = Pervasives.max(loadedCount, loadedPercentageStrCount) - let bufferedCount = Pervasives.max(bufferedCount, loadedCount) - - - - {" "->Js.String2.repeat(loadedSpaces)->React.string} - {loadedPercentageStr->React.string} - - - {" "->Js.String2.repeat(bufferedCount - loadedCount)->React.string} - - - {" "->Js.String2.repeat(maxCount - bufferedCount)->React.string} - - -} diff --git a/apps/hypersync-indexer/generated/src/ink/components/ChainData.res b/apps/hypersync-indexer/generated/src/ink/components/ChainData.res deleted file mode 100644 index 946e9a4e3..000000000 --- a/apps/hypersync-indexer/generated/src/ink/components/ChainData.res +++ /dev/null @@ -1,161 +0,0 @@ -open Ink - -type syncing = { - firstEventBlockNumber: int, - latestProcessedBlock: int, - numEventsProcessed: int, -} -type synced = { - ...syncing, - timestampCaughtUpToHeadOrEndblock: Js.Date.t, -} - -type progress = SearchingForEvents | Syncing(syncing) | Synced(synced) - -let getNumberOfEventsProccessed = (progress: progress) => { - switch progress { - | SearchingForEvents => 0 - | Syncing(syncing) => syncing.numEventsProcessed - | Synced(synced) => synced.numEventsProcessed - } -} -type chainData = { - chain: ChainMap.Chain.t, - poweredByHyperSync: bool, - progress: progress, - latestFetchedBlockNumber: int, - currentBlockHeight: int, - numBatchesFetched: int, - endBlock: option, -} - -let minOfOption: (int, option) => int = (a: int, b: option) => { - switch (a, b) { - | (a, Some(b)) => min(a, b) - | (a, None) => a - } -} - -type number -@val external number: int => number = "Number" -@send external toLocaleString: number => string = "toLocaleString" -let formatLocaleString = n => n->number->toLocaleString - -module BlocksDisplay = { - @react.component - let make = (~latestProcessedBlock, ~currentBlockHeight) => { - - {"blocks: "->React.string} - - - {latestProcessedBlock->formatLocaleString->React.string} - - - {"/"->React.string} - {currentBlockHeight->formatLocaleString->React.string} - - - - } -} - -module SyncBar = { - @react.component - let make = ( - ~chainId, - ~loaded, - ~buffered=?, - ~outOf, - ~loadingColor, - ~poweredByHyperSync=true, - ~isSearching=false, - ) => { - - - {poweredByHyperSync ? {"⚡"->React.string} : React.null} - {"Chain ID: "->React.string} - {chainId->React.int} - {" "->React.string} - - {isSearching - ? - - - : } - - } -} - -@react.component -let make = (~chainData: chainData) => { - let { - chain, - progress, - poweredByHyperSync, - latestFetchedBlockNumber, - currentBlockHeight, - endBlock, - } = chainData - let chainId = chain->ChainMap.Chain.toChainId - - let toBlock = minOfOption(currentBlockHeight, endBlock) - - switch progress { - | SearchingForEvents => - - - {"Searching for events..."->React.string} - - - - - - | Syncing({firstEventBlockNumber, latestProcessedBlock, numEventsProcessed}) => - - - - - {"Events Processed: "->React.string} - - {numEventsProcessed->formatLocaleString->React.string} - - - - - - - | Synced({firstEventBlockNumber, latestProcessedBlock, numEventsProcessed}) => - - - - {"Events Processed: "->React.string} - {numEventsProcessed->React.int} - - - - - - - } -} diff --git a/apps/hypersync-indexer/generated/src/ink/components/CustomHooks.res b/apps/hypersync-indexer/generated/src/ink/components/CustomHooks.res deleted file mode 100644 index 335edbe06..000000000 --- a/apps/hypersync-indexer/generated/src/ink/components/CustomHooks.res +++ /dev/null @@ -1,114 +0,0 @@ -open Belt -module InitApi = { - type ecosystem = | @as("evm") Evm | @as("fuel") Fuel - type body = { - envioVersion: string, - envioApiToken: option, - ecosystem: ecosystem, - hyperSyncNetworks: array, - rpcNetworks: array, - } - - let bodySchema = S.object(s => { - envioVersion: s.field("envioVersion", S.string), - envioApiToken: s.field("envioApiToken", S.option(S.string)), - ecosystem: s.field("ecosystem", S.enum([Evm, Fuel])), - hyperSyncNetworks: s.field("hyperSyncNetworks", S.array(S.int)), - rpcNetworks: s.field("rpcNetworks", S.array(S.int)), - }) - - let makeBody = (~envioVersion, ~envioApiToken, ~config: Config.t) => { - let hyperSyncNetworks = [] - let rpcNetworks = [] - config.chainMap - ->ChainMap.values - ->Array.forEach(({sources, id}) => { - switch sources->Js.Array2.some(s => s.poweredByHyperSync) { - | true => hyperSyncNetworks - | false => rpcNetworks - } - ->Js.Array2.push(id) - ->ignore - }) - - { - envioVersion, - envioApiToken, - ecosystem: (config.ecosystem :> ecosystem), - hyperSyncNetworks, - rpcNetworks, - } - } - - type messageColor = - | @as("primary") Primary - | @as("secondary") Secondary - | @as("info") Info - | @as("danger") Danger - | @as("success") Success - | @as("white") White - | @as("gray") Gray - - let toTheme = (color: messageColor): Style.chalkTheme => - switch color { - | Primary => Primary - | Secondary => Secondary - | Info => Info - | Danger => Danger - | Success => Success - | White => White - | Gray => Gray - } - - type message = { - color: messageColor, - content: string, - } - - let messageSchema = S.object(s => { - color: s.field("color", S.enum([Primary, Secondary, Info, Danger, Success, White, Gray])), - content: s.field("content", S.string), - }) - - let client = Rest.client(Env.envioAppUrl ++ "/api") - - let route = Rest.route(() => { - method: Post, - path: "/hyperindex/init", - input: s => s.body(bodySchema), - responses: [s => s.field("messages", S.array(messageSchema))], - }) - - let getMessages = async (~config) => { - let envioVersion = Utils.EnvioPackage.json.version - let body = makeBody(~envioVersion, ~envioApiToken=Env.envioApiToken, ~config) - - switch await route->Rest.fetch(body, ~client) { - | exception exn => Error(exn->Obj.magic) - | messages => Ok(messages) - } - } -} - -type request<'ok, 'err> = Data('ok) | Loading | Err('err) - -let useMessages = (~config) => { - let (request, setRequest) = React.useState(_ => Loading) - React.useEffect0(() => { - InitApi.getMessages(~config) - ->Promise.thenResolve(res => - switch res { - | Ok(data) => setRequest(_ => Data(data)) - | Error(e) => - Logging.error({ - "msg": "Failed to load messages from envio server", - "err": e->Utils.prettifyExn, - }) - setRequest(_ => Err(e)) - } - ) - ->ignore - None - }) - request -} diff --git a/apps/hypersync-indexer/generated/src/ink/components/Messages.res b/apps/hypersync-indexer/generated/src/ink/components/Messages.res deleted file mode 100644 index b7df3ff25..000000000 --- a/apps/hypersync-indexer/generated/src/ink/components/Messages.res +++ /dev/null @@ -1,41 +0,0 @@ -open Belt -open Ink -module Message = { - @react.component - let make = (~message: CustomHooks.InitApi.message) => { - CustomHooks.InitApi.toTheme}> - {message.content->React.string} - - } -} - -module Notifications = { - @react.component - let make = (~children) => { - <> - - {"Notifications:"->React.string} - {children} - - } -} - -@react.component -let make = (~config) => { - let messages = CustomHooks.useMessages(~config) - <> - {switch messages { - | Data([]) | Loading => React.null //Don't show anything while loading or no messages - | Data(messages) => - - {messages - ->Array.mapWithIndex((i, message) => {Int.toString} message />}) - ->React.array} - - | Err(_) => - - - - }} - -} diff --git a/apps/hypersync-indexer/generated/src/ink/components/SyncETA.res b/apps/hypersync-indexer/generated/src/ink/components/SyncETA.res deleted file mode 100644 index f3f83e09b..000000000 --- a/apps/hypersync-indexer/generated/src/ink/components/SyncETA.res +++ /dev/null @@ -1,198 +0,0 @@ -open Ink -open Belt - -let isIndexerFullySynced = (chains: array) => { - chains->Array.reduce(true, (accum, current) => { - switch current.progress { - | Synced(_) => accum - | _ => false - } - }) -} - -let getTotalRemainingBlocks = (chains: array) => { - chains->Array.reduce(0, (accum, {progress, currentBlockHeight, latestFetchedBlockNumber, endBlock}) => { - let finalBlock = switch endBlock { - | Some(endBlock) => endBlock - | None => currentBlockHeight - } - switch progress { - | Syncing({latestProcessedBlock}) - | Synced({latestProcessedBlock}) => - finalBlock - latestProcessedBlock + accum - | SearchingForEvents => finalBlock - latestFetchedBlockNumber + accum - } - }) -} - -let getLatestTimeCaughtUpToHead = ( - chains: array, - indexerStartTime: Js.Date.t, -) => { - let latesttimestampCaughtUpToHeadOrEndblockFloat = chains->Array.reduce(0.0, (accum, current) => { - switch current.progress { - | Synced({timestampCaughtUpToHeadOrEndblock}) => - timestampCaughtUpToHeadOrEndblock->Js.Date.valueOf > accum - ? timestampCaughtUpToHeadOrEndblock->Js.Date.valueOf - : accum - | Syncing(_) - | SearchingForEvents => accum - } - }) - - DateFns.formatDistanceWithOptions( - indexerStartTime, - latesttimestampCaughtUpToHeadOrEndblockFloat->Js.Date.fromFloat, - {includeSeconds: true}, - ) -} - -let getTotalBlocksProcessed = (chains: array) => { - chains->Array.reduce(0, (accum, {progress, latestFetchedBlockNumber}) => { - switch progress { - | Syncing({latestProcessedBlock, firstEventBlockNumber}) - | Synced({latestProcessedBlock, firstEventBlockNumber}) => - latestProcessedBlock - firstEventBlockNumber + accum - | SearchingForEvents => latestFetchedBlockNumber + accum - } - }) -} - -let useShouldDisplayEta = (~chains: array) => { - let (shouldDisplayEta, setShouldDisplayEta) = React.useState(_ => false) - React.useEffect(() => { - //Only compute this while it is not displaying eta - if !shouldDisplayEta { - //Each chain should have fetched at least one batch - let (allChainsHaveFetchedABatch, totalNumBatchesFetched) = chains->Array.reduce((true, 0), ( - (allChainsHaveFetchedABatch, totalNumBatchesFetched), - chain, - ) => { - ( - allChainsHaveFetchedABatch && chain.numBatchesFetched >= 1, - totalNumBatchesFetched + chain.numBatchesFetched, - ) - }) - - //Min num fetched batches is num of chains + 2. All - // Chains should have fetched at least 1 batch. (They - // could then be blocked from fetching if they are past - //the max queue size on first batch) - // Only display once an additinal 2 batches have been fetched to allow - // eta to realistically stabalize - let numChains = chains->Array.length - let minTotalBatches = numChains + 2 - let hasMinNumBatches = totalNumBatchesFetched >= minTotalBatches - - let shouldDisplayEta = allChainsHaveFetchedABatch && hasMinNumBatches - - if shouldDisplayEta { - setShouldDisplayEta(_ => true) - } - } - - None - }, [chains]) - - shouldDisplayEta -} - -let useEta = (~chains, ~indexerStartTime) => { - let shouldDisplayEta = useShouldDisplayEta(~chains) - let (secondsToSub, setSecondsToSub) = React.useState(_ => 0.) - let (timeSinceStart, setTimeSinceStart) = React.useState(_ => 0.) - - React.useEffect2(() => { - setTimeSinceStart(_ => Js.Date.now() -. indexerStartTime->Js.Date.valueOf) - setSecondsToSub(_ => 0.) - - let intervalId = Js.Global.setInterval(() => { - setSecondsToSub(prev => prev +. 1.) - }, 1000) - - Some(() => Js.Global.clearInterval(intervalId)) - }, (chains, indexerStartTime)) - - //blocksProcessed/remainingBlocks = timeSoFar/eta - //eta = (timeSoFar/blocksProcessed) * remainingBlocks - - let blocksProcessed = getTotalBlocksProcessed(chains)->Int.toFloat - if shouldDisplayEta && blocksProcessed > 0. { - let nowDate = Js.Date.now() - let remainingBlocks = getTotalRemainingBlocks(chains)->Int.toFloat - let etaFloat = timeSinceStart /. blocksProcessed *. remainingBlocks - let millisToSub = secondsToSub *. 1000. - let etaFloat = Pervasives.max(etaFloat -. millisToSub, 0.0) //template this - let eta = (etaFloat +. nowDate)->Js.Date.fromFloat - let interval: DateFns.interval = {start: nowDate->Js.Date.fromFloat, end: eta} - let duration = DateFns.intervalToDuration(interval) - let formattedDuration = DateFns.formatDuration( - duration, - {format: ["hours", "minutes", "seconds"]}, - ) - let outputString = switch formattedDuration { - | "" => "less than 1 second" - | formattedDuration => formattedDuration - } - Some(outputString) - } else { - None - } -} - -module Syncing = { - @react.component - let make = (~etaStr) => { - - - {"Sync Time ETA: "->React.string} - - {etaStr->React.string} - {" ("->React.string} - - - - {" in progress"->React.string} - {")"->React.string} - - } -} - -module Synced = { - @react.component - let make = (~latestTimeCaughtUpToHeadStr) => { - - {"Time Synced: "->React.string} - {`${latestTimeCaughtUpToHeadStr}`->React.string} - {" ("->React.string} - {"synced"->React.string} - {")"->React.string} - - } -} - -module Calculating = { - @react.component - let make = () => { - - - - - {" Calculating ETA..."->React.string} - - } -} - -@react.component -let make = (~chains, ~indexerStartTime) => { - let optEta = useEta(~chains, ~indexerStartTime) - if isIndexerFullySynced(chains) { - let latestTimeCaughtUpToHeadStr = getLatestTimeCaughtUpToHead(chains, indexerStartTime) - //TODO add real time - } else { - switch optEta { - | Some(etaStr) => - | None => - } - } -} diff --git a/generated@0.1.0 b/generated@0.1.0 deleted file mode 100644 index e69de29bb..000000000 From 2cb4039b0af8e69451ddb8b9d4834bb7b2a8e5ec Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 13:51:56 -0300 Subject: [PATCH 04/17] fix: dockerfile --- Dockerfile.hypersync-indexer | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile.hypersync-indexer b/Dockerfile.hypersync-indexer index 73fe67fd8..809534610 100644 --- a/Dockerfile.hypersync-indexer +++ b/Dockerfile.hypersync-indexer @@ -16,6 +16,7 @@ COPY --from=builder /app/out/full/ . COPY --from=builder /app/turbo.json turbo.json WORKDIR /app/apps/hypersync-indexer RUN pnpm envio codegen +RUN npm install --prefix generated FROM base AS runner ENV NODE_ENV=production From d0d48833eeb50e481466d1860a3dbd206247be31 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 14:03:58 -0300 Subject: [PATCH 05/17] fix: claude pls fix --- Dockerfile.hypersync-indexer | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile.hypersync-indexer b/Dockerfile.hypersync-indexer index 809534610..8bfdc002a 100644 --- a/Dockerfile.hypersync-indexer +++ b/Dockerfile.hypersync-indexer @@ -16,7 +16,7 @@ COPY --from=builder /app/out/full/ . COPY --from=builder /app/turbo.json turbo.json WORKDIR /app/apps/hypersync-indexer RUN pnpm envio codegen -RUN npm install --prefix generated +RUN npm install --prefix generated --legacy-peer-deps FROM base AS runner ENV NODE_ENV=production From 8e59ab99785baa5394b4515342fb54458badd0ac Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 14:06:27 -0300 Subject: [PATCH 06/17] fix: builder stage --- Dockerfile.hypersync-indexer | 2 +- apps/hypersync-indexer/.gitignore | 3 +- apps/hypersync-indexer/generated/package.json | 49 +++++++++++++++++++ 3 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 apps/hypersync-indexer/generated/package.json diff --git a/Dockerfile.hypersync-indexer b/Dockerfile.hypersync-indexer index 8bfdc002a..8ccce2fe6 100644 --- a/Dockerfile.hypersync-indexer +++ b/Dockerfile.hypersync-indexer @@ -15,8 +15,8 @@ RUN pnpm install COPY --from=builder /app/out/full/ . COPY --from=builder /app/turbo.json turbo.json WORKDIR /app/apps/hypersync-indexer -RUN pnpm envio codegen RUN npm install --prefix generated --legacy-peer-deps +RUN pnpm envio codegen FROM base AS runner ENV NODE_ENV=production diff --git a/apps/hypersync-indexer/.gitignore b/apps/hypersync-indexer/.gitignore index 22fc4b26d..867e74673 100644 --- a/apps/hypersync-indexer/.gitignore +++ b/apps/hypersync-indexer/.gitignore @@ -14,7 +14,8 @@ yarn-error.log* .env # HyperIndex codegen output (regenerated by `envio codegen`) -/generated/ +/generated/* +!/generated/package.json # HyperIndex runtime state persisted_state.envio.json diff --git a/apps/hypersync-indexer/generated/package.json b/apps/hypersync-indexer/generated/package.json new file mode 100644 index 000000000..c6d1606a3 --- /dev/null +++ b/apps/hypersync-indexer/generated/package.json @@ -0,0 +1,49 @@ +{ + "name": "generated", + "version": "0.1.0", + "main": "index.js", + "types": "index.d.ts", + "private": true, + "scripts": { + "clean": "rescript clean", + "build": "rescript", + "watch": "rescript -w", + "format": "rescript format -all", + "db-up": "node -e 'require(`./src/db/Migrations.res.js`).runUpMigrations(true)'", + "db-down": "node -e 'require(`./src/db/Migrations.res.js`).runDownMigrations(true)'", + "db-setup": "node -e 'require(`./src/db/Migrations.res.js`).runUpMigrations(true, true)'", + "print-benchmark-summary": "node -e 'require(`./src/Benchmark.res.js`).Summary.printSummary()'", + "start": "node_modules/.bin/ts-node src/Index.res.js" + }, + "keywords": [ + "ReScript" + ], + "engines": { + "node": ">=18.0.0" + }, + "author": "", + "license": "MIT", + "dependencies": { + "ts-node": "10.9.1", + "@rescript/react": "0.12.1", + "bignumber.js": "9.1.2", + "date-fns": "3.3.1", + "dotenv": "16.4.5", + "ethers": "6.8.0", + "express": "4.19.2", + "ink": "3.2.0", + "ink-big-text": "1.2.0", + "ink-spinner": "4.0.3", + "js-sdsl": "4.4.2", + "pino": "8.16.1", + "postgres": "3.4.1", + "react": "18.2.0", + "rescript": "11.1.3", + "rescript-envsafe": "5.0.0", + "rescript-schema": "9.3.0", + "envio": "2.32.12", + "viem": "2.21.0", + "yargs": "17.7.2", + "prom-client": "15.0.0" + } +} From 20152b87f1e2fc1b02c4cb009e2390bba3070645 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 15:38:03 -0300 Subject: [PATCH 07/17] feat: use ERPC --- apps/hypersync-indexer/config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/hypersync-indexer/config.yaml b/apps/hypersync-indexer/config.yaml index a007ff9d4..fe6fcacf0 100644 --- a/apps/hypersync-indexer/config.yaml +++ b/apps/hypersync-indexer/config.yaml @@ -26,8 +26,8 @@ contracts: networks: - id: 1 - hypersync_config: - url: https://eth.hypersync.xyz + rpc_config: + url: ${RPC_URL} start_block: 9380410 contracts: - name: ENSToken From 2c24319934dc7d37873822b237fd216f333c0360 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 15:42:53 -0300 Subject: [PATCH 08/17] fix: hardcode for now --- apps/hypersync-indexer/config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/hypersync-indexer/config.yaml b/apps/hypersync-indexer/config.yaml index fe6fcacf0..88c2715fa 100644 --- a/apps/hypersync-indexer/config.yaml +++ b/apps/hypersync-indexer/config.yaml @@ -27,7 +27,7 @@ contracts: networks: - id: 1 rpc_config: - url: ${RPC_URL} + url: http://erpc.railway.internal:5000/indexer/evm/1 start_block: 9380410 contracts: - name: ENSToken From db5e2769c7558bc54bfde7f87f7b3eb3fa8ded35 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 15:59:25 -0300 Subject: [PATCH 09/17] Revert "fix: hardcode for now" This reverts commit 2c24319934dc7d37873822b237fd216f333c0360. --- apps/hypersync-indexer/config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/hypersync-indexer/config.yaml b/apps/hypersync-indexer/config.yaml index 88c2715fa..fe6fcacf0 100644 --- a/apps/hypersync-indexer/config.yaml +++ b/apps/hypersync-indexer/config.yaml @@ -27,7 +27,7 @@ contracts: networks: - id: 1 rpc_config: - url: http://erpc.railway.internal:5000/indexer/evm/1 + url: ${RPC_URL} start_block: 9380410 contracts: - name: ENSToken From e90386af5deb7ae2d7fe1e11cad021e5d9f85baa Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Wed, 8 Apr 2026 15:59:36 -0300 Subject: [PATCH 10/17] Revert "feat: use ERPC" This reverts commit 20152b87f1e2fc1b02c4cb009e2390bba3070645. --- apps/hypersync-indexer/config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/hypersync-indexer/config.yaml b/apps/hypersync-indexer/config.yaml index fe6fcacf0..a007ff9d4 100644 --- a/apps/hypersync-indexer/config.yaml +++ b/apps/hypersync-indexer/config.yaml @@ -26,8 +26,8 @@ contracts: networks: - id: 1 - rpc_config: - url: ${RPC_URL} + hypersync_config: + url: https://eth.hypersync.xyz start_block: 9380410 contracts: - name: ENSToken From 187db53d5ddad9085e5b0e3a45c6a5ccbfbf7186 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Thu, 9 Apr 2026 14:39:40 -0300 Subject: [PATCH 11/17] feat: enable/adjust API --- apps/api/src/database/schema.ts | 241 ++++++++---------- .../account-balance/historical.ts | 4 +- .../api/src/repositories/voting-power/aave.ts | 18 +- .../src/repositories/voting-power/general.ts | 17 +- .../src/repositories/voting-power/nouns.ts | 14 +- apps/hypersync-indexer/schema.graphql | 6 +- apps/indexer/src/lib/blockTime.ts | 15 -- apps/indexer/src/lib/enums.ts | 8 - 8 files changed, 137 insertions(+), 186 deletions(-) delete mode 100644 apps/indexer/src/lib/blockTime.ts diff --git a/apps/api/src/database/schema.ts b/apps/api/src/database/schema.ts index 091f4491f..ac7f41205 100644 --- a/apps/api/src/database/schema.ts +++ b/apps/api/src/database/schema.ts @@ -10,142 +10,119 @@ import { Address, zeroAddress } from "viem"; import { MetricTypesArray } from "@/lib/constants"; -export const token = pgTable("token", (drizzle) => ({ +export const token = pgTable("Token", (drizzle) => ({ id: drizzle.text().primaryKey(), name: drizzle.text(), decimals: drizzle.integer().notNull(), - totalSupply: bigint("total_supply", { mode: "bigint" }).notNull().default(0n), - delegatedSupply: bigint("delegated_supply", { mode: "bigint" }) + totalSupply: bigint("totalSupply", { mode: "bigint" }).notNull().default(0n), + delegatedSupply: bigint("delegatedSupply", { mode: "bigint" }) .notNull() .default(0n), - cexSupply: bigint("cex_supply", { mode: "bigint" }).notNull().default(0n), - dexSupply: bigint("dex_supply", { mode: "bigint" }).notNull().default(0n), - lendingSupply: bigint("lending_supply", { mode: "bigint" }) + cexSupply: bigint("cexSupply", { mode: "bigint" }).notNull().default(0n), + dexSupply: bigint("dexSupply", { mode: "bigint" }).notNull().default(0n), + lendingSupply: bigint("lendingSupply", { mode: "bigint" }) .notNull() .default(0n), - circulatingSupply: bigint("circulating_supply", { mode: "bigint" }) + circulatingSupply: bigint("circulatingSupply", { mode: "bigint" }) .notNull() .default(0n), treasury: bigint({ mode: "bigint" }).notNull().default(0n), - nonCirculatingSupply: bigint("non_circulating_supply", { mode: "bigint" }) + nonCirculatingSupply: bigint("nonCirculatingSupply", { mode: "bigint" }) .notNull() .default(0n), })); -export const account = pgTable("account", (drizzle) => ({ +export const account = pgTable("Account", (drizzle) => ({ id: drizzle.text().primaryKey(), })); export const accountBalance = pgTable( - "account_balance", + "AccountBalance", (drizzle) => ({ - accountId: drizzle.text("account_id").$type
().notNull(), - tokenId: drizzle.text("token_id").notNull(), + id: drizzle.text().primaryKey(), + accountId: drizzle.text("accountId").$type
().notNull(), + tokenId: drizzle.text("tokenId").notNull(), balance: bigint({ mode: "bigint" }).notNull(), - // This field represents for who the account is delegating their voting power to delegate: drizzle.text().$type
().default(zeroAddress).notNull(), }), - (table) => [ - primaryKey({ - columns: [table.accountId, table.tokenId], - }), - index().on(table.delegate), - ], + (table) => [index().on(table.delegate)], ); export const accountPower = pgTable( - "account_power", + "AccountPower", (drizzle) => ({ - accountId: drizzle.text("account_id").$type
().notNull(), - daoId: drizzle.text("dao_id").notNull(), - votingPower: bigint("voting_power", { mode: "bigint" }) + id: drizzle.text().primaryKey(), + accountId: drizzle.text("accountId").$type
().notNull(), + daoId: drizzle.text("daoId").notNull(), + votingPower: bigint("votingPower", { mode: "bigint" }) .default(BigInt(0)) .notNull(), - votesCount: drizzle.integer("votes_count").default(0).notNull(), - proposalsCount: drizzle.integer("proposals_count").default(0).notNull(), - delegationsCount: drizzle.integer("delegations_count").default(0).notNull(), - lastVoteTimestamp: bigint("last_vote_timestamp", { mode: "bigint" }) + votesCount: drizzle.integer("votesCount").default(0).notNull(), + proposalsCount: drizzle.integer("proposalsCount").default(0).notNull(), + delegationsCount: drizzle.integer("delegationsCount").default(0).notNull(), + lastVoteTimestamp: bigint("lastVoteTimestamp", { mode: "bigint" }) .default(BigInt(0)) .notNull(), }), - (table) => [ - primaryKey({ - columns: [table.accountId], - }), - index().on(table.lastVoteTimestamp), - ], + (table) => [index().on(table.lastVoteTimestamp)], ); export const votingPowerHistory = pgTable( - "voting_power_history", + "VotingPowerHistory", (drizzle) => ({ - transactionHash: drizzle.text("transaction_hash").notNull(), - daoId: drizzle.text("dao_id").notNull(), - accountId: drizzle.text("account_id").$type
().notNull(), - votingPower: bigint("voting_power", { mode: "bigint" }).notNull(), + id: drizzle.text().primaryKey(), + transactionHash: drizzle.text("transactionHash").notNull(), + daoId: drizzle.text("daoId").notNull(), + accountId: drizzle.text("accountId").$type
().notNull(), + votingPower: bigint("votingPower", { mode: "bigint" }).notNull(), delta: bigint({ mode: "bigint" }).notNull(), - deltaMod: bigint("delta_mod", { mode: "bigint" }).notNull(), + deltaMod: bigint("deltaMod", { mode: "bigint" }).notNull(), timestamp: bigint({ mode: "bigint" }).notNull(), - logIndex: drizzle.integer("log_index").notNull(), + logIndex: drizzle.integer("logIndex").notNull(), }), - (table) => [ - primaryKey({ - columns: [table.transactionHash, table.accountId, table.logIndex], - }), - ], ); export const balanceHistory = pgTable( - "balance_history", + "BalanceHistory", (drizzle) => ({ - transactionHash: drizzle.text("transaction_hash").notNull(), - daoId: drizzle.text("dao_id").notNull(), - accountId: drizzle.text("account_id").$type
().notNull(), + id: drizzle.text().primaryKey(), + transactionHash: drizzle.text("transactionHash").notNull(), + daoId: drizzle.text("daoId").notNull(), + accountId: drizzle.text("accountId").$type
().notNull(), balance: bigint({ mode: "bigint" }).notNull(), delta: bigint({ mode: "bigint" }).notNull(), - deltaMod: bigint("delta_mod", { mode: "bigint" }).notNull(), + deltaMod: bigint("deltaMod", { mode: "bigint" }).notNull(), timestamp: bigint({ mode: "bigint" }).notNull(), - logIndex: drizzle.integer("log_index").notNull(), + logIndex: drizzle.integer("logIndex").notNull(), }), - (table) => [ - primaryKey({ - columns: [table.transactionHash, table.accountId, table.logIndex], - }), - ], ); export const delegation = pgTable( - "delegations", + "Delegation", (drizzle) => ({ - transactionHash: drizzle.text("transaction_hash").notNull(), - daoId: drizzle.text("dao_id").notNull(), + id: drizzle.text().primaryKey(), + transactionHash: drizzle.text("transactionHash").notNull(), + daoId: drizzle.text("daoId").notNull(), delegateAccountId: drizzle - .text("delegate_account_id") + .text("delegateAccountId") .$type
() .notNull(), delegatorAccountId: drizzle - .text("delegator_account_id") + .text("delegatorAccountId") .$type
() .notNull(), - delegatedValue: bigint("delegated_value", { mode: "bigint" }) + delegatedValue: bigint("delegatedValue", { mode: "bigint" }) .notNull() .default(0n), - previousDelegate: drizzle.text("previous_delegate"), + previousDelegate: drizzle.text("previousDelegate"), timestamp: bigint({ mode: "bigint" }).notNull(), - logIndex: drizzle.integer("log_index").notNull(), - isCex: drizzle.boolean("is_cex").notNull().default(false), - isDex: drizzle.boolean("is_dex").notNull().default(false), - isLending: drizzle.boolean("is_lending").notNull().default(false), - isTotal: drizzle.boolean("is_total").notNull().default(false), + logIndex: drizzle.integer("logIndex").notNull(), + isCex: drizzle.boolean("isCex").notNull().default(false), + isDex: drizzle.boolean("isDex").notNull().default(false), + isLending: drizzle.boolean("isLending").notNull().default(false), + isTotal: drizzle.boolean("isTotal").notNull().default(false), }), (table) => [ - primaryKey({ - columns: [ - table.transactionHash, - table.delegatorAccountId, - table.delegateAccountId, - ], - }), index().on(table.transactionHash), index().on(table.timestamp), index().on(table.delegatorAccountId), @@ -155,25 +132,23 @@ export const delegation = pgTable( ); export const transfer = pgTable( - "transfers", + "Transfer", (drizzle) => ({ - transactionHash: drizzle.text("transaction_hash").notNull(), - daoId: drizzle.text("dao_id").notNull(), - tokenId: drizzle.text("token_id").notNull(), + id: drizzle.text().primaryKey(), + transactionHash: drizzle.text("transactionHash").notNull(), + daoId: drizzle.text("daoId").notNull(), + tokenId: drizzle.text("tokenId").notNull(), amount: bigint({ mode: "bigint" }).notNull(), - fromAccountId: drizzle.text("from_account_id").$type
().notNull(), - toAccountId: drizzle.text("to_account_id").$type
().notNull(), + fromAccountId: drizzle.text("fromAccountId").$type
().notNull(), + toAccountId: drizzle.text("toAccountId").$type
().notNull(), timestamp: bigint({ mode: "bigint" }).notNull(), - logIndex: drizzle.integer("log_index").notNull(), - isCex: drizzle.boolean("is_cex").notNull().default(false), - isDex: drizzle.boolean("is_dex").notNull().default(false), - isLending: drizzle.boolean("is_lending").notNull().default(false), - isTotal: drizzle.boolean("is_total").notNull().default(false), + logIndex: drizzle.integer("logIndex").notNull(), + isCex: drizzle.boolean("isCex").notNull().default(false), + isDex: drizzle.boolean("isDex").notNull().default(false), + isLending: drizzle.boolean("isLending").notNull().default(false), + isTotal: drizzle.boolean("isTotal").notNull().default(false), }), (table) => [ - primaryKey({ - columns: [table.transactionHash, table.fromAccountId, table.toAccountId], - }), index().on(table.transactionHash), index().on(table.timestamp), index().on(table.fromAccountId), @@ -183,53 +158,49 @@ export const transfer = pgTable( ); export const votesOnchain = pgTable( - "votes_onchain", + "VoteOnchain", (drizzle) => ({ - txHash: drizzle.text("tx_hash").notNull(), - daoId: drizzle.text("dao_id").notNull(), - voterAccountId: drizzle.text("voter_account_id").$type
().notNull(), - proposalId: drizzle.text("proposal_id").notNull(), + id: drizzle.text().primaryKey(), + txHash: drizzle.text("txHash").notNull(), + daoId: drizzle.text("daoId").notNull(), + voterAccountId: drizzle.text("voterAccountId").$type
().notNull(), + proposalId: drizzle.text("proposalId").notNull(), support: drizzle.text().notNull(), - votingPower: bigint("voting_power", { mode: "bigint" }).notNull(), + votingPower: bigint("votingPower", { mode: "bigint" }).notNull(), reason: drizzle.text(), timestamp: bigint({ mode: "bigint" }).notNull(), }), - (table) => [ - primaryKey({ - columns: [table.voterAccountId, table.proposalId], - }), - ], ); export const proposalsOnchain = pgTable( - "proposals_onchain", + "ProposalOnchain", (drizzle) => ({ id: drizzle.text().primaryKey(), - txHash: drizzle.text("tx_hash").notNull(), - daoId: drizzle.text("dao_id").notNull(), + txHash: drizzle.text("txHash").notNull(), + daoId: drizzle.text("daoId").notNull(), proposerAccountId: drizzle - .text("proposer_account_id") + .text("proposerAccountId") .$type
() .notNull(), targets: drizzle.json().$type().notNull(), values: drizzle.json().$type().notNull(), signatures: drizzle.json().$type().notNull(), calldatas: drizzle.json().$type().notNull(), - startBlock: drizzle.integer("start_block").notNull(), - endBlock: drizzle.integer("end_block").notNull(), + startBlock: drizzle.integer("startBlock").notNull(), + endBlock: drizzle.integer("endBlock").notNull(), title: drizzle.text().notNull(), description: drizzle.text().notNull(), timestamp: bigint({ mode: "bigint" }).notNull(), - endTimestamp: bigint("end_timestamp", { mode: "bigint" }).notNull(), + endTimestamp: bigint("endTimestamp", { mode: "bigint" }).notNull(), status: drizzle.text().notNull(), - forVotes: bigint("for_votes", { mode: "bigint" }).default(0n).notNull(), - againstVotes: bigint("against_votes", { mode: "bigint" }) + forVotes: bigint("forVotes", { mode: "bigint" }).default(0n).notNull(), + againstVotes: bigint("againstVotes", { mode: "bigint" }) .default(0n) .notNull(), - abstainVotes: bigint("abstain_votes", { mode: "bigint" }) + abstainVotes: bigint("abstainVotes", { mode: "bigint" }) .default(0n) .notNull(), - proposalType: drizzle.integer("proposal_type"), + proposalType: drizzle.integer("proposalType"), }), (table) => [index().on(table.proposerAccountId)], ); @@ -244,11 +215,12 @@ export const votesOnchainRelations = relations(votesOnchain, ({ one }) => ({ export const metricType = pgEnum("metricType", MetricTypesArray); export const daoMetricsDayBucket = pgTable( - "dao_metrics_day_buckets", + "DaoMetricsDayBucket", (drizzle) => ({ + id: drizzle.text().primaryKey(), date: bigint({ mode: "bigint" }).notNull(), - daoId: drizzle.text("dao_id").notNull(), - tokenId: drizzle.text("token_id").notNull(), + daoId: drizzle.text("daoId").notNull(), + tokenId: drizzle.text("tokenId").notNull(), metricType: metricType("metricType").notNull(), open: bigint({ mode: "bigint" }).notNull(), close: bigint({ mode: "bigint" }).notNull(), @@ -257,29 +229,26 @@ export const daoMetricsDayBucket = pgTable( average: bigint({ mode: "bigint" }).notNull(), volume: bigint({ mode: "bigint" }).notNull(), count: drizzle.integer().notNull(), - lastUpdate: bigint("last_update", { mode: "bigint" }).notNull(), + lastUpdate: bigint("lastUpdate", { mode: "bigint" }).notNull(), }), - (table) => [ - primaryKey({ - columns: [table.date, table.tokenId, table.metricType], - }), - ], ); -export const transaction = pgTable("transaction", (drizzle) => ({ - transactionHash: drizzle.text("transaction_hash").primaryKey(), - fromAddress: drizzle.text("from_address"), - toAddress: drizzle.text("to_address"), - isCex: drizzle.boolean("is_cex").notNull().default(false), - isDex: drizzle.boolean("is_dex").notNull().default(false), - isLending: drizzle.boolean("is_lending").notNull().default(false), - isTotal: drizzle.boolean("is_total").notNull().default(false), +export const transaction = pgTable("Transaction", (drizzle) => ({ + id: drizzle.text().primaryKey(), + transactionHash: drizzle.text("transactionHash").notNull(), + fromAddress: drizzle.text("fromAddress"), + toAddress: drizzle.text("toAddress"), + isCex: drizzle.boolean("isCex").notNull().default(false), + isDex: drizzle.boolean("isDex").notNull().default(false), + isLending: drizzle.boolean("isLending").notNull().default(false), + isTotal: drizzle.boolean("isTotal").notNull().default(false), timestamp: bigint({ mode: "bigint" }).notNull(), })); -export const tokenPrice = pgTable("token_price", (_drizzle) => ({ - price: bigint({ mode: "bigint" }).notNull(), // price in ETH - timestamp: bigint({ mode: "bigint" }).primaryKey(), +export const tokenPrice = pgTable("TokenPrice", (_drizzle) => ({ + id: _drizzle.text().primaryKey(), + price: bigint({ mode: "bigint" }).notNull(), + timestamp: bigint({ mode: "bigint" }).notNull(), })); export const evenTypeEnum = pgEnum("event_type", [ @@ -292,19 +261,17 @@ export const evenTypeEnum = pgEnum("event_type", [ ]); export const feedEvent = pgTable( - "feed_event", + "FeedEvent", (drizzle) => ({ - txHash: drizzle.text("tx_hash").notNull(), - logIndex: drizzle.integer("log_index").notNull(), + id: drizzle.text().primaryKey(), + txHash: drizzle.text("txHash").notNull(), + logIndex: drizzle.integer("logIndex").notNull(), type: evenTypeEnum("type").notNull(), value: bigint({ mode: "bigint" }).notNull().default(0n), timestamp: bigint({ mode: "number" }).notNull(), metadata: drizzle.json().$type>(), }), (table) => [ - primaryKey({ - columns: [table.txHash, table.logIndex], - }), index().on(table.timestamp), index().on(table.type), index().on(table.value), diff --git a/apps/api/src/repositories/account-balance/historical.ts b/apps/api/src/repositories/account-balance/historical.ts index 20355033f..1ceaa6a0c 100644 --- a/apps/api/src/repositories/account-balance/historical.ts +++ b/apps/api/src/repositories/account-balance/historical.ts @@ -53,8 +53,8 @@ export class HistoricalBalanceRepository { .offset(skip); return result.map((row) => ({ - ...row.balance_history, - transfer: row.transfers, + ...row.BalanceHistory, + transfer: row.Transfer, })); } diff --git a/apps/api/src/repositories/voting-power/aave.ts b/apps/api/src/repositories/voting-power/aave.ts index eba94ea7b..e12405c42 100644 --- a/apps/api/src/repositories/voting-power/aave.ts +++ b/apps/api/src/repositories/voting-power/aave.ts @@ -124,17 +124,17 @@ export class AAVEVotingPowerRepository { .offset(skip); return result.map((row) => ({ - ...row.voting_power_history, + ...row.VotingPowerHistory, delegations: - row.transfers && - row.transfers?.logIndex > (row.delegations?.logIndex || 0) + row.Transfer && + row.Transfer?.logIndex > (row.Delegation?.logIndex || 0) ? null - : row.delegations, + : row.Delegation, transfers: - row.delegations && - row.delegations?.logIndex > (row.transfers?.logIndex || 0) + row.Delegation && + row.Delegation?.logIndex > (row.Transfer?.logIndex || 0) ? null - : row.transfers, + : row.Transfer, })); } @@ -226,6 +226,7 @@ export class AAVEVotingPowerRepository { const items = await this.db .select({ + id: accountPower.id, accountId: allAccountIds.accountId, daoId: accountPower.daoId, votingPower: combinedPowerSql, @@ -287,6 +288,7 @@ export class AAVEVotingPowerRepository { return { items: items.map((row) => ({ ...row, + id: row.id ?? "", daoId: row.daoId ?? "", votesCount: row.votesCount ?? 0, proposalsCount: row.proposalsCount ?? 0, @@ -341,6 +343,7 @@ export class AAVEVotingPowerRepository { const [result] = await this.db .select({ + id: accountPower.id, accountId: accountPower.accountId, daoId: accountPower.daoId, votingPower: combinedPowerSql, @@ -378,6 +381,7 @@ export class AAVEVotingPowerRepository { percentageChange: result.percentageChange, } : { + id: "", accountId: accountId, votingPower: 0n, delegationsCount: 0, diff --git a/apps/api/src/repositories/voting-power/general.ts b/apps/api/src/repositories/voting-power/general.ts index a87771c5e..e78749078 100644 --- a/apps/api/src/repositories/voting-power/general.ts +++ b/apps/api/src/repositories/voting-power/general.ts @@ -124,17 +124,17 @@ export class VotingPowerRepository { .offset(skip); return result.map((row) => ({ - ...row.voting_power_history, + ...row.VotingPowerHistory, delegations: - row.transfers && - row.transfers?.logIndex > (row.delegations?.logIndex || 0) + row.Transfer && + row.Transfer?.logIndex > (row.Delegation?.logIndex || 0) ? null - : row.delegations, + : row.Delegation, transfers: - row.delegations && - row.delegations?.logIndex > (row.transfers?.logIndex || 0) + row.Delegation && + row.Delegation?.logIndex > (row.Transfer?.logIndex || 0) ? null - : row.transfers, + : row.Transfer, })); } @@ -342,6 +342,7 @@ export class VotingPowerRepository { const items = await this.db .select({ + id: accountPower.id, accountId: accountPower.accountId, daoId: accountPower.daoId, votingPower: accountPower.votingPower, @@ -407,6 +408,7 @@ export class VotingPowerRepository { const [result] = await this.db .select({ + id: accountPower.id, accountId: accountPower.accountId, daoId: accountPower.daoId, votingPower: accountPower.votingPower, @@ -438,6 +440,7 @@ export class VotingPowerRepository { percentageChange: String(result.percentageChange ?? "0"), } : { + id: "", accountId: accountId, votingPower: 0n, delegationsCount: 0, diff --git a/apps/api/src/repositories/voting-power/nouns.ts b/apps/api/src/repositories/voting-power/nouns.ts index 10d946c7b..663c52f60 100644 --- a/apps/api/src/repositories/voting-power/nouns.ts +++ b/apps/api/src/repositories/voting-power/nouns.ts @@ -100,17 +100,17 @@ export class NounsVotingPowerRepository { .offset(skip); return result.map((row) => ({ - ...row.voting_power_history, + ...row.VotingPowerHistory, delegations: - row.transfers && - row.transfers?.logIndex < (row.delegations?.logIndex || 0) + row.Transfer && + row.Transfer?.logIndex < (row.Delegation?.logIndex || 0) ? null - : row.delegations, + : row.Delegation, transfers: - row.delegations && - row.delegations?.logIndex > (row.transfers?.logIndex || 0) + row.Delegation && + row.Delegation?.logIndex > (row.Transfer?.logIndex || 0) ? null - : row.transfers, + : row.Transfer, })); } } diff --git a/apps/hypersync-indexer/schema.graphql b/apps/hypersync-indexer/schema.graphql index d7c02d714..c2f10829b 100644 --- a/apps/hypersync-indexer/schema.graphql +++ b/apps/hypersync-indexer/schema.graphql @@ -161,8 +161,8 @@ type DaoMetricsDayBucket { daoId: String! tokenId: String! @index metricType: MetricType! - openValue: BigInt! - closeValue: BigInt! + open: BigInt! + close: BigInt! low: BigInt! high: BigInt! average: BigInt! @@ -195,7 +195,7 @@ type FeedEvent { id: ID! txHash: String! @index logIndex: Int! - eventType: EventType! + type: EventType! value: BigInt! @index timestamp: BigInt! @index metadata: Json diff --git a/apps/indexer/src/lib/blockTime.ts b/apps/indexer/src/lib/blockTime.ts deleted file mode 100644 index e765535e8..000000000 --- a/apps/indexer/src/lib/blockTime.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { DaysEnum } from "./enums"; - -export function calculateHistoricalBlockNumber( - days: DaysEnum, - currentBlockNumber: number, - blockTime: number, -): number { - const blocksToGoBack = Math.floor(days / blockTime); - const historicalBlockNumber = Math.max( - 0, - currentBlockNumber - blocksToGoBack, - ); - - return historicalBlockNumber; -} diff --git a/apps/indexer/src/lib/enums.ts b/apps/indexer/src/lib/enums.ts index 431d750d0..3580fd6f3 100644 --- a/apps/indexer/src/lib/enums.ts +++ b/apps/indexer/src/lib/enums.ts @@ -17,11 +17,3 @@ export enum DaoIdEnum { } export const SECONDS_IN_DAY = 24 * 60 * 60; - -export enum DaysEnum { - "7d" = 7 * 24 * 60 * 60, - "30d" = 30 * 24 * 60 * 60, - "90d" = 90 * 24 * 60 * 60, - "180d" = 180 * 24 * 60 * 60, - "365d" = 365 * 24 * 60 * 60, -} From dbcd82a6ffbf0c319be4597f4d5bb838f58cbbed Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Thu, 9 Apr 2026 15:08:27 -0300 Subject: [PATCH 12/17] fix: adjust tests --- .../historical.integration.test.ts | 2 + .../interactions.integration.test.ts | 2 + .../listing.integration.test.ts | 2 + .../variations.integration.test.ts | 2 + .../index.integration.test.ts | 1 + .../delegations.integration.test.ts | 2 + .../delegators.integration.test.ts | 2 + ...historical.delegations.integration.test.ts | 1 + .../controllers/feed/feed.integration.test.ts | 1 + .../controller.integration.test.ts | 5 ++ .../last-update/index.integration.test.ts | 1 + .../proposals-activity.integration.test.ts | 1 + .../token-metrics/index.integration.test.ts | 1 + .../token-distribution.integration.test.ts | 1 + .../transactions/index.integration.test.ts | 3 + .../transfers/index.integration.test.ts | 1 + .../votes/onchainVotes.integration.test.ts | 2 + .../historical.integration.test.ts | 2 + .../voting-power/listing.integration.test.ts | 2 + .../variations.integration.test.ts | 2 + apps/api/src/database/schema.ts | 83 ++++++++----------- .../account-balance/aave.unit.test.ts | 42 ++++++---- .../account-balance/common.unit.test.ts | 42 ++++++---- .../account-balance/historical.unit.test.ts | 27 +++--- .../account-balance/interactions.unit.test.ts | 42 ++++++---- .../account-balance/listing.unit.test.ts | 27 +++--- .../account-balance/variations.unit.test.ts | 42 ++++++---- .../daoMetricsDayBucket/index.unit.test.ts | 1 + .../delegations/delegators.unit.test.ts | 42 ++++++---- .../delegations/general.unit.test.ts | 2 + .../delegations/historical.unit.test.ts | 1 + .../feed/feed.repository.unit.test.ts | 1 + .../last-update/index.unit.test.ts | 1 + .../proposals-activity/index.unit.test.ts | 26 +++--- .../src/repositories/token/nft.unit.test.ts | 14 ++-- .../transactions/index.unit.test.ts | 20 +++-- .../repositories/transfers/index.unit.test.ts | 1 + .../treasury.repository.integration.test.ts | 1 + .../votes/onchainVotes.unit.test.ts | 53 +++++++----- .../api/src/repositories/voting-power/aave.ts | 3 +- .../voting-power/aave.unit.test.ts | 44 ++++++---- .../src/repositories/voting-power/general.ts | 3 +- .../src/repositories/voting-power/nouns.ts | 3 +- .../voting-power/nouns.unit.test.ts | 28 ++++--- .../voting-power.repository.unit.test.ts | 27 +++--- .../account-balance/historical.unit.test.ts | 2 + .../account-balance/variations.unit.test.ts | 1 + .../delegation-percentage.unit.test.ts | 1 + .../services/delegations/current.unit.test.ts | 1 + .../delegations/historical.unit.test.ts | 1 + apps/api/src/services/feed/feed.unit.test.ts | 1 + .../services/transactions/index.unit.test.ts | 1 + .../src/services/transfers/index.unit.test.ts | 1 + .../services/votes/onchainVotes.unit.test.ts | 1 + .../services/voting-power/aave.unit.test.ts | 2 + .../services/voting-power/index.unit.test.ts | 2 + .../voting-power/voting-power.unit.test.ts | 1 + 57 files changed, 382 insertions(+), 245 deletions(-) diff --git a/apps/api/src/controllers/account-balance/historical.integration.test.ts b/apps/api/src/controllers/account-balance/historical.integration.test.ts index c9673d562..74aee8e4d 100644 --- a/apps/api/src/controllers/account-balance/historical.integration.test.ts +++ b/apps/api/src/controllers/account-balance/historical.integration.test.ts @@ -24,6 +24,7 @@ const DAO_ID = "ENS"; const createHistoryRow = ( overrides: Partial = {}, ): BalanceHistoryInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: DAO_ID, accountId: VALID_ADDRESS, @@ -38,6 +39,7 @@ const createHistoryRow = ( const createTransferRow = ( overrides: Partial = {}, ): TransferInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: DAO_ID, tokenId: TOKEN_ID, diff --git a/apps/api/src/controllers/account-balance/interactions.integration.test.ts b/apps/api/src/controllers/account-balance/interactions.integration.test.ts index 7d0fb877f..0065b5677 100644 --- a/apps/api/src/controllers/account-balance/interactions.integration.test.ts +++ b/apps/api/src/controllers/account-balance/interactions.integration.test.ts @@ -25,6 +25,7 @@ const DAO_ID = "ENS"; const createAccountBalanceRow = ( overrides: Partial = {}, ): AccountBalanceInsert => ({ + id: "test-id", accountId: COUNTERPART, tokenId: TOKEN_ID, balance: 200000000000000000n, @@ -35,6 +36,7 @@ const createAccountBalanceRow = ( const createTransferRow = ( overrides: Partial = {}, ): TransferInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: DAO_ID, tokenId: TOKEN_ID, diff --git a/apps/api/src/controllers/account-balance/listing.integration.test.ts b/apps/api/src/controllers/account-balance/listing.integration.test.ts index d42b9500c..378a0266f 100644 --- a/apps/api/src/controllers/account-balance/listing.integration.test.ts +++ b/apps/api/src/controllers/account-balance/listing.integration.test.ts @@ -34,6 +34,7 @@ const TOKEN_ID = getAddress("0xC18360217D8F7Ab5e7c516566761Ea12Ce7F9D72"); const createAccountBalanceRow = ( overrides: Partial = {}, ): AccountBalanceInsert => ({ + id: "test-id", accountId: VALID_ADDRESS, tokenId: TOKEN_ID, balance: 1200000000000000000n, @@ -44,6 +45,7 @@ const createAccountBalanceRow = ( const createTransferRow = ( overrides: Partial = {}, ): TransferInsert => ({ + id: "test-id", transactionHash: "0xabc1230000000000000000000000000000000000000000000000000000000001", daoId: "ENS", diff --git a/apps/api/src/controllers/account-balance/variations.integration.test.ts b/apps/api/src/controllers/account-balance/variations.integration.test.ts index 2a00f7a01..3c6710e24 100644 --- a/apps/api/src/controllers/account-balance/variations.integration.test.ts +++ b/apps/api/src/controllers/account-balance/variations.integration.test.ts @@ -29,6 +29,7 @@ const DAO_ID = "ENS"; const createAccountBalanceRow = ( overrides: Partial = {}, ): AccountBalanceInsert => ({ + id: "test-id", accountId: VALID_ADDRESS, tokenId: TOKEN_ID, balance: 1200000000000000000n, @@ -39,6 +40,7 @@ const createAccountBalanceRow = ( const createTransferRow = ( overrides: Partial = {}, ): TransferInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: DAO_ID, tokenId: TOKEN_ID, diff --git a/apps/api/src/controllers/delegation-percentage/index.integration.test.ts b/apps/api/src/controllers/delegation-percentage/index.integration.test.ts index d4083cbff..3cde45417 100644 --- a/apps/api/src/controllers/delegation-percentage/index.integration.test.ts +++ b/apps/api/src/controllers/delegation-percentage/index.integration.test.ts @@ -15,6 +15,7 @@ type MetricInsert = typeof daoMetricsDayBucket.$inferInsert; const TEST_DATE = 1699920000n; const createMetric = (overrides: Partial = {}): MetricInsert => ({ + id: "test-id", date: TEST_DATE, daoId: "UNI", tokenId: "uni", diff --git a/apps/api/src/controllers/delegations/delegations.integration.test.ts b/apps/api/src/controllers/delegations/delegations.integration.test.ts index ab39c1bd8..ab38145c9 100644 --- a/apps/api/src/controllers/delegations/delegations.integration.test.ts +++ b/apps/api/src/controllers/delegations/delegations.integration.test.ts @@ -27,6 +27,7 @@ const TX_HASH = const createAccountBalanceRow = ( overrides: Partial = {}, ): AccountBalanceInsert => ({ + id: "test-id", accountId: VALID_ADDRESS, tokenId: "uni", balance: 1000000000000000000n, @@ -37,6 +38,7 @@ const createAccountBalanceRow = ( const createDelegationRow = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: DAO_ID, delegateAccountId: VALID_ADDRESS, diff --git a/apps/api/src/controllers/delegations/delegators.integration.test.ts b/apps/api/src/controllers/delegations/delegators.integration.test.ts index 613ad3d43..eb1343f24 100644 --- a/apps/api/src/controllers/delegations/delegators.integration.test.ts +++ b/apps/api/src/controllers/delegations/delegators.integration.test.ts @@ -29,6 +29,7 @@ const DAO_ID = "uni"; const createDelegationRow = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: "test-id", transactionHash: "0xaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccdd", daoId: DAO_ID, @@ -48,6 +49,7 @@ const createDelegationRow = ( const createAccountBalanceRow = ( overrides: Partial = {}, ): AccountBalanceInsert => ({ + id: "test-id", accountId: DELEGATOR_1, tokenId: "uni", balance: 1000000000000000000n, diff --git a/apps/api/src/controllers/delegations/historical.delegations.integration.test.ts b/apps/api/src/controllers/delegations/historical.delegations.integration.test.ts index 487f11482..e2190140c 100644 --- a/apps/api/src/controllers/delegations/historical.delegations.integration.test.ts +++ b/apps/api/src/controllers/delegations/historical.delegations.integration.test.ts @@ -32,6 +32,7 @@ const TX_HASH_2 = const createDelegationRow = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: DAO_ID, delegatorAccountId: VALID_ADDRESS, diff --git a/apps/api/src/controllers/feed/feed.integration.test.ts b/apps/api/src/controllers/feed/feed.integration.test.ts index 3ba0156ac..02fd9527e 100644 --- a/apps/api/src/controllers/feed/feed.integration.test.ts +++ b/apps/api/src/controllers/feed/feed.integration.test.ts @@ -19,6 +19,7 @@ const nounsThresholds = getDaoRelevanceThreshold(DaoIdEnum.NOUNS); const createEvent = ( overrides: Partial = {}, ): FeedEventInsert => ({ + id: "test-id", txHash: "0xabc123def456abc1", logIndex: 0, type: "VOTE" as const, diff --git a/apps/api/src/controllers/governance-activity/controller.integration.test.ts b/apps/api/src/controllers/governance-activity/controller.integration.test.ts index b6167af8d..0993e375b 100644 --- a/apps/api/src/controllers/governance-activity/controller.integration.test.ts +++ b/apps/api/src/controllers/governance-activity/controller.integration.test.ts @@ -100,6 +100,7 @@ describe("GovernanceActivity Controller", () => { it("should return 200 with activeSupply when recent voters exist", async () => { await db.insert(accountPower).values({ + id: "test-id-1", accountId: "0x1111111111111111111111111111111111111111", daoId: "TEST", votingPower: 1000000000000000000n, @@ -121,6 +122,7 @@ describe("GovernanceActivity Controller", () => { it("should use default days=90d when not provided", async () => { await db.insert(accountPower).values({ + id: "test-id-2", accountId: "0x1111111111111111111111111111111111111111", daoId: "TEST", votingPower: 500n, @@ -203,6 +205,7 @@ describe("GovernanceActivity Controller", () => { it("should return 200 with data and calculated changeRate", async () => { await db.insert(votesOnchain).values([ { + id: "vote-1", txHash: "0xv1", daoId: "TEST", voterAccountId: "0x1111111111111111111111111111111111111111", @@ -212,6 +215,7 @@ describe("GovernanceActivity Controller", () => { timestamp: RECENT_TS, }, { + id: "vote-2", txHash: "0xv2", daoId: "TEST", voterAccountId: "0x2222222222222222222222222222222222222222", @@ -221,6 +225,7 @@ describe("GovernanceActivity Controller", () => { timestamp: RECENT_TS, }, { + id: "vote-3", txHash: "0xv3", daoId: "TEST", voterAccountId: "0x3333333333333333333333333333333333333333", diff --git a/apps/api/src/controllers/last-update/index.integration.test.ts b/apps/api/src/controllers/last-update/index.integration.test.ts index 11232a682..db5433184 100644 --- a/apps/api/src/controllers/last-update/index.integration.test.ts +++ b/apps/api/src/controllers/last-update/index.integration.test.ts @@ -13,6 +13,7 @@ import { lastUpdate } from "./index"; type MetricInsert = typeof daoMetricsDayBucket.$inferInsert; const createMetric = (overrides: Partial = {}): MetricInsert => ({ + id: "test-id", date: 1700000000n, daoId: "UNI", tokenId: "uni", diff --git a/apps/api/src/controllers/proposals/proposals-activity.integration.test.ts b/apps/api/src/controllers/proposals/proposals-activity.integration.test.ts index 4086f3aed..c64da0f7b 100644 --- a/apps/api/src/controllers/proposals/proposals-activity.integration.test.ts +++ b/apps/api/src/controllers/proposals/proposals-activity.integration.test.ts @@ -84,6 +84,7 @@ const createProposal = ( }); const createVote = (overrides: Partial = {}): VoteInsert => ({ + id: "0xvote123", txHash: "0xvote123", daoId: "ENS", voterAccountId: VALID_ADDRESS, diff --git a/apps/api/src/controllers/token-metrics/index.integration.test.ts b/apps/api/src/controllers/token-metrics/index.integration.test.ts index a4a305a05..37d0502d3 100644 --- a/apps/api/src/controllers/token-metrics/index.integration.test.ts +++ b/apps/api/src/controllers/token-metrics/index.integration.test.ts @@ -15,6 +15,7 @@ import { tokenMetrics } from "./index"; type MetricInsert = typeof daoMetricsDayBucket.$inferInsert; const createMetric = (overrides: Partial = {}): MetricInsert => ({ + id: "test-id", date: 1700000000n, daoId: "UNI", tokenId: "uni", diff --git a/apps/api/src/controllers/token/token-distribution.integration.test.ts b/apps/api/src/controllers/token/token-distribution.integration.test.ts index 784fb9a14..da92020b0 100644 --- a/apps/api/src/controllers/token/token-distribution.integration.test.ts +++ b/apps/api/src/controllers/token/token-distribution.integration.test.ts @@ -28,6 +28,7 @@ let repo: DrizzleRepository; let app: Hono; const createMetric = (overrides: Partial = {}): MetricInsert => ({ + id: "test-id", date: BigInt(NOW - 10), daoId: "ENS", tokenId: "ens", diff --git a/apps/api/src/controllers/transactions/index.integration.test.ts b/apps/api/src/controllers/transactions/index.integration.test.ts index 08f2ba534..a3b43cc5a 100644 --- a/apps/api/src/controllers/transactions/index.integration.test.ts +++ b/apps/api/src/controllers/transactions/index.integration.test.ts @@ -19,6 +19,7 @@ type DelegationInsert = typeof delegation.$inferInsert; const createTransaction = ( overrides: Partial = {}, ): TransactionInsert => ({ + id: "test-id", transactionHash: TX_HASH, fromAddress: getAddress("0x1111111111111111111111111111111111111111"), toAddress: getAddress("0x2222222222222222222222222222222222222222"), @@ -33,6 +34,7 @@ const createTransaction = ( const createTransfer = ( overrides: Partial = {}, ): TransferInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: "UNI", tokenId: "uni", @@ -51,6 +53,7 @@ const createTransfer = ( const createDelegation = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: "UNI", delegateAccountId: getAddress("0x3333333333333333333333333333333333333333"), diff --git a/apps/api/src/controllers/transfers/index.integration.test.ts b/apps/api/src/controllers/transfers/index.integration.test.ts index 41957f6cf..c338290cf 100644 --- a/apps/api/src/controllers/transfers/index.integration.test.ts +++ b/apps/api/src/controllers/transfers/index.integration.test.ts @@ -20,6 +20,7 @@ type TransferInsert = typeof transfer.$inferInsert; const createTransfer = ( overrides: Partial = {}, ): TransferInsert => ({ + id: "test-id", transactionHash: "0xabc", daoId: "UNI", tokenId: "uni", diff --git a/apps/api/src/controllers/votes/onchainVotes.integration.test.ts b/apps/api/src/controllers/votes/onchainVotes.integration.test.ts index e6dd83270..f1185287a 100644 --- a/apps/api/src/controllers/votes/onchainVotes.integration.test.ts +++ b/apps/api/src/controllers/votes/onchainVotes.integration.test.ts @@ -49,6 +49,7 @@ const createProposal = ( }); const createVote = (overrides: Partial = {}): VoteInsert => ({ + id: "test-id", txHash: "0xabc123", daoId: "ENS", voterAccountId: VOTER_ADDRESS, @@ -63,6 +64,7 @@ const createVote = (overrides: Partial = {}): VoteInsert => ({ const createAccountPower = ( overrides: Partial = {}, ): AccountPowerInsert => ({ + id: "test-id", accountId: VOTER_ADDRESS, daoId: "ENS", votingPower: 1000000000000000000n, diff --git a/apps/api/src/controllers/voting-power/historical.integration.test.ts b/apps/api/src/controllers/voting-power/historical.integration.test.ts index fb9fe157d..3e09600a4 100644 --- a/apps/api/src/controllers/voting-power/historical.integration.test.ts +++ b/apps/api/src/controllers/voting-power/historical.integration.test.ts @@ -29,6 +29,7 @@ const DAO_ID = "ENS"; const createHistoryRow = ( overrides: Partial = {}, ): VotingPowerHistoryInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: DAO_ID, accountId: VALID_ADDRESS, @@ -43,6 +44,7 @@ const createHistoryRow = ( const createDelegationRow = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: "test-id", transactionHash: TX_HASH, daoId: DAO_ID, delegateAccountId: VALID_ADDRESS, diff --git a/apps/api/src/controllers/voting-power/listing.integration.test.ts b/apps/api/src/controllers/voting-power/listing.integration.test.ts index c10d4f639..f4def71b1 100644 --- a/apps/api/src/controllers/voting-power/listing.integration.test.ts +++ b/apps/api/src/controllers/voting-power/listing.integration.test.ts @@ -25,6 +25,7 @@ const DAO_ID = "test-dao"; const createAccountPowerRow = ( overrides: Partial = {}, ): AccountPowerInsert => ({ + id: "test-id", accountId: TEST_ACCOUNT_1, daoId: DAO_ID, votingPower: 1000n, @@ -38,6 +39,7 @@ const createAccountPowerRow = ( const createHistoryRow = ( overrides: Partial = {}, ): VotingPowerHistoryInsert => ({ + id: "test-id", transactionHash: "0xabc1230000000000000000000000000000000000000000000000000000000000", daoId: DAO_ID, diff --git a/apps/api/src/controllers/voting-power/variations.integration.test.ts b/apps/api/src/controllers/voting-power/variations.integration.test.ts index 07a29af8e..2ce061b3c 100644 --- a/apps/api/src/controllers/voting-power/variations.integration.test.ts +++ b/apps/api/src/controllers/voting-power/variations.integration.test.ts @@ -30,6 +30,7 @@ const TX_4 = const createHistoryRow = ( overrides: Partial = {}, ): VotingPowerHistoryInsert => ({ + id: "test-id", transactionHash: "0xabc1230000000000000000000000000000000000000000000000000000000000", daoId: DAO_ID, @@ -45,6 +46,7 @@ const createHistoryRow = ( const createAccountPowerRow = ( overrides: Partial = {}, ): AccountPowerInsert => ({ + id: "test-id", accountId: VALID_ADDRESS, daoId: DAO_ID, votingPower: 1200000000000000000n, diff --git a/apps/api/src/database/schema.ts b/apps/api/src/database/schema.ts index ac7f41205..6ffe94b2a 100644 --- a/apps/api/src/database/schema.ts +++ b/apps/api/src/database/schema.ts @@ -1,11 +1,5 @@ import { relations } from "drizzle-orm"; -import { - pgTable, - index, - bigint, - pgEnum, - primaryKey, -} from "drizzle-orm/pg-core"; +import { pgTable, index, bigint, pgEnum } from "drizzle-orm/pg-core"; import { Address, zeroAddress } from "viem"; import { MetricTypesArray } from "@/lib/constants"; @@ -67,35 +61,29 @@ export const accountPower = pgTable( (table) => [index().on(table.lastVoteTimestamp)], ); -export const votingPowerHistory = pgTable( - "VotingPowerHistory", - (drizzle) => ({ - id: drizzle.text().primaryKey(), - transactionHash: drizzle.text("transactionHash").notNull(), - daoId: drizzle.text("daoId").notNull(), - accountId: drizzle.text("accountId").$type
().notNull(), - votingPower: bigint("votingPower", { mode: "bigint" }).notNull(), - delta: bigint({ mode: "bigint" }).notNull(), - deltaMod: bigint("deltaMod", { mode: "bigint" }).notNull(), - timestamp: bigint({ mode: "bigint" }).notNull(), - logIndex: drizzle.integer("logIndex").notNull(), - }), -); +export const votingPowerHistory = pgTable("VotingPowerHistory", (drizzle) => ({ + id: drizzle.text().primaryKey(), + transactionHash: drizzle.text("transactionHash").notNull(), + daoId: drizzle.text("daoId").notNull(), + accountId: drizzle.text("accountId").$type
().notNull(), + votingPower: bigint("votingPower", { mode: "bigint" }).notNull(), + delta: bigint({ mode: "bigint" }).notNull(), + deltaMod: bigint("deltaMod", { mode: "bigint" }).notNull(), + timestamp: bigint({ mode: "bigint" }).notNull(), + logIndex: drizzle.integer("logIndex").notNull(), +})); -export const balanceHistory = pgTable( - "BalanceHistory", - (drizzle) => ({ - id: drizzle.text().primaryKey(), - transactionHash: drizzle.text("transactionHash").notNull(), - daoId: drizzle.text("daoId").notNull(), - accountId: drizzle.text("accountId").$type
().notNull(), - balance: bigint({ mode: "bigint" }).notNull(), - delta: bigint({ mode: "bigint" }).notNull(), - deltaMod: bigint("deltaMod", { mode: "bigint" }).notNull(), - timestamp: bigint({ mode: "bigint" }).notNull(), - logIndex: drizzle.integer("logIndex").notNull(), - }), -); +export const balanceHistory = pgTable("BalanceHistory", (drizzle) => ({ + id: drizzle.text().primaryKey(), + transactionHash: drizzle.text("transactionHash").notNull(), + daoId: drizzle.text("daoId").notNull(), + accountId: drizzle.text("accountId").$type
().notNull(), + balance: bigint({ mode: "bigint" }).notNull(), + delta: bigint({ mode: "bigint" }).notNull(), + deltaMod: bigint("deltaMod", { mode: "bigint" }).notNull(), + timestamp: bigint({ mode: "bigint" }).notNull(), + logIndex: drizzle.integer("logIndex").notNull(), +})); export const delegation = pgTable( "Delegation", @@ -157,20 +145,17 @@ export const transfer = pgTable( ], ); -export const votesOnchain = pgTable( - "VoteOnchain", - (drizzle) => ({ - id: drizzle.text().primaryKey(), - txHash: drizzle.text("txHash").notNull(), - daoId: drizzle.text("daoId").notNull(), - voterAccountId: drizzle.text("voterAccountId").$type
().notNull(), - proposalId: drizzle.text("proposalId").notNull(), - support: drizzle.text().notNull(), - votingPower: bigint("votingPower", { mode: "bigint" }).notNull(), - reason: drizzle.text(), - timestamp: bigint({ mode: "bigint" }).notNull(), - }), -); +export const votesOnchain = pgTable("VoteOnchain", (drizzle) => ({ + id: drizzle.text().primaryKey(), + txHash: drizzle.text("txHash").notNull(), + daoId: drizzle.text("daoId").notNull(), + voterAccountId: drizzle.text("voterAccountId").$type
().notNull(), + proposalId: drizzle.text("proposalId").notNull(), + support: drizzle.text().notNull(), + votingPower: bigint("votingPower", { mode: "bigint" }).notNull(), + reason: drizzle.text(), + timestamp: bigint({ mode: "bigint" }).notNull(), +})); export const proposalsOnchain = pgTable( "ProposalOnchain", diff --git a/apps/api/src/repositories/account-balance/aave.unit.test.ts b/apps/api/src/repositories/account-balance/aave.unit.test.ts index b9a8a8354..7db98f656 100644 --- a/apps/api/src/repositories/account-balance/aave.unit.test.ts +++ b/apps/api/src/repositories/account-balance/aave.unit.test.ts @@ -27,26 +27,34 @@ let txCounter = 0; const createAccountBalance = ( overrides: Partial = {}, -): AccountBalanceInsert => ({ - accountId: ACCOUNT_A, - tokenId: `token-${txCounter++}`, - balance: 1000n, - ...overrides, -}); +): AccountBalanceInsert => { + const n = txCounter++; + return { + id: `ab-${n}`, + accountId: ACCOUNT_A, + tokenId: `token-${n}`, + balance: 1000n, + ...overrides, + }; +}; const createTransfer = ( overrides: Partial = {}, -): TransferInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: "test-dao", - tokenId: "token-1", - amount: 100n, - fromAccountId: ACCOUNT_A, - toAccountId: ACCOUNT_B, - timestamp: 1700000000n, - logIndex: 0, - ...overrides, -}); +): TransferInsert => { + const n = txCounter++; + return { + id: `tx-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: "test-dao", + tokenId: "token-1", + amount: 100n, + fromAccountId: ACCOUNT_A, + toAccountId: ACCOUNT_B, + timestamp: 1700000000n, + logIndex: 0, + ...overrides, + }; +}; describe("AAVEAccountBalanceRepository", () => { let client: PGlite; diff --git a/apps/api/src/repositories/account-balance/common.unit.test.ts b/apps/api/src/repositories/account-balance/common.unit.test.ts index e1103ff5a..f8a928d88 100644 --- a/apps/api/src/repositories/account-balance/common.unit.test.ts +++ b/apps/api/src/repositories/account-balance/common.unit.test.ts @@ -20,26 +20,34 @@ let txCounter = 0; const createAccountBalance = ( overrides: Partial = {}, -): AccountBalanceInsert => ({ - accountId: ACCOUNT_A, - tokenId: `token-${txCounter++}`, - balance: 1000n, - ...overrides, -}); +): AccountBalanceInsert => { + const n = txCounter++; + return { + id: `ab-${n}`, + accountId: ACCOUNT_A, + tokenId: `token-${n}`, + balance: 1000n, + ...overrides, + }; +}; const createTransfer = ( overrides: Partial = {}, -): TransferInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: "test-dao", - tokenId: "token-1", - amount: 100n, - fromAccountId: ACCOUNT_A, - toAccountId: ACCOUNT_B, - timestamp: 1700000000n, - logIndex: 0, - ...overrides, -}); +): TransferInsert => { + const n = txCounter++; + return { + id: `tx-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: "test-dao", + tokenId: "token-1", + amount: 100n, + fromAccountId: ACCOUNT_A, + toAccountId: ACCOUNT_B, + timestamp: 1700000000n, + logIndex: 0, + ...overrides, + }; +}; describe("AccountBalanceQueryFragments", () => { let client: PGlite; diff --git a/apps/api/src/repositories/account-balance/historical.unit.test.ts b/apps/api/src/repositories/account-balance/historical.unit.test.ts index 1ad557d61..90a73e0e5 100644 --- a/apps/api/src/repositories/account-balance/historical.unit.test.ts +++ b/apps/api/src/repositories/account-balance/historical.unit.test.ts @@ -19,21 +19,26 @@ let txCounter = 0; const createBalanceHistory = ( overrides: Partial = {}, -): BalanceHistoryInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: "test-dao", - accountId: ACCOUNT_A, - balance: 1000n, - delta: 200n, - deltaMod: 200n, - timestamp: 1700000000n, - logIndex: 0, - ...overrides, -}); +): BalanceHistoryInsert => { + const n = txCounter++; + return { + id: `bh-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: "test-dao", + accountId: ACCOUNT_A, + balance: 1000n, + delta: 200n, + deltaMod: 200n, + timestamp: 1700000000n, + logIndex: 0, + ...overrides, + }; +}; const createTransfer = ( overrides: Partial = {}, ): TransferInsert => ({ + id: `tx-${txCounter}`, transactionHash: `0x${txCounter.toString(16).padStart(64, "0")}`, daoId: "test-dao", tokenId: "token-1", diff --git a/apps/api/src/repositories/account-balance/interactions.unit.test.ts b/apps/api/src/repositories/account-balance/interactions.unit.test.ts index ec779b028..7ae17d865 100644 --- a/apps/api/src/repositories/account-balance/interactions.unit.test.ts +++ b/apps/api/src/repositories/account-balance/interactions.unit.test.ts @@ -23,26 +23,34 @@ let txCounter = 0; const createAccountBalance = ( overrides: Partial = {}, -): AccountBalanceInsert => ({ - accountId: ACCOUNT_A, - tokenId: `token-${txCounter++}`, - balance: 1000n, - ...overrides, -}); +): AccountBalanceInsert => { + const n = txCounter++; + return { + id: `ab-${n}`, + accountId: ACCOUNT_A, + tokenId: `token-${n}`, + balance: 1000n, + ...overrides, + }; +}; const createTransfer = ( overrides: Partial = {}, -): TransferInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: "test-dao", - tokenId: "token-1", - amount: 100n, - fromAccountId: ACCOUNT_A, - toAccountId: ACCOUNT_B, - timestamp: 1700000000n, - logIndex: 0, - ...overrides, -}); +): TransferInsert => { + const n = txCounter++; + return { + id: `tx-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: "test-dao", + tokenId: "token-1", + amount: 100n, + fromAccountId: ACCOUNT_A, + toAccountId: ACCOUNT_B, + timestamp: 1700000000n, + logIndex: 0, + ...overrides, + }; +}; describe("AccountInteractionsRepository", () => { let client: PGlite; diff --git a/apps/api/src/repositories/account-balance/listing.unit.test.ts b/apps/api/src/repositories/account-balance/listing.unit.test.ts index f7cbbe474..8c21076e1 100644 --- a/apps/api/src/repositories/account-balance/listing.unit.test.ts +++ b/apps/api/src/repositories/account-balance/listing.unit.test.ts @@ -27,6 +27,7 @@ let txCounter = 0; const createAccountBalance = ( overrides: Partial = {}, ): AccountBalanceInsert => ({ + id: "test-id", accountId: ACCOUNT_A, tokenId: TOKEN_ID, balance: 1000n, @@ -35,17 +36,21 @@ const createAccountBalance = ( const createTransfer = ( overrides: Partial = {}, -): TransferInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: "test-dao", - tokenId: TOKEN_ID, - amount: 100n, - fromAccountId: ACCOUNT_A, - toAccountId: ACCOUNT_B, - timestamp: 1700000000n, - logIndex: 0, - ...overrides, -}); +): TransferInsert => { + const n = txCounter++; + return { + id: `tx-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: "test-dao", + tokenId: TOKEN_ID, + amount: 100n, + fromAccountId: ACCOUNT_A, + toAccountId: ACCOUNT_B, + timestamp: 1700000000n, + logIndex: 0, + ...overrides, + }; +}; describe("AccountBalanceRepository", () => { let client: PGlite; diff --git a/apps/api/src/repositories/account-balance/variations.unit.test.ts b/apps/api/src/repositories/account-balance/variations.unit.test.ts index a71beae30..04679befa 100644 --- a/apps/api/src/repositories/account-balance/variations.unit.test.ts +++ b/apps/api/src/repositories/account-balance/variations.unit.test.ts @@ -21,26 +21,34 @@ let txCounter = 0; const createAccountBalance = ( overrides: Partial = {}, -): AccountBalanceInsert => ({ - accountId: ACCOUNT_A, - tokenId: `token-${txCounter++}`, - balance: 1000n, - ...overrides, -}); +): AccountBalanceInsert => { + const n = txCounter++; + return { + id: `ab-${n}`, + accountId: ACCOUNT_A, + tokenId: `token-${n}`, + balance: 1000n, + ...overrides, + }; +}; const createTransfer = ( overrides: Partial = {}, -): TransferInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: "test-dao", - tokenId: "token-1", - amount: 100n, - fromAccountId: ACCOUNT_A, - toAccountId: ACCOUNT_B, - timestamp: 1700000000n, - logIndex: 0, - ...overrides, -}); +): TransferInsert => { + const n = txCounter++; + return { + id: `tx-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: "test-dao", + tokenId: "token-1", + amount: 100n, + fromAccountId: ACCOUNT_A, + toAccountId: ACCOUNT_B, + timestamp: 1700000000n, + logIndex: 0, + ...overrides, + }; +}; describe("BalanceVariationsRepository", () => { let client: PGlite; diff --git a/apps/api/src/repositories/daoMetricsDayBucket/index.unit.test.ts b/apps/api/src/repositories/daoMetricsDayBucket/index.unit.test.ts index 58723f15d..8c5ad6df9 100644 --- a/apps/api/src/repositories/daoMetricsDayBucket/index.unit.test.ts +++ b/apps/api/src/repositories/daoMetricsDayBucket/index.unit.test.ts @@ -10,6 +10,7 @@ import { DaoMetricsDayBucketRepository } from "./index"; type MetricInsert = typeof daoMetricsDayBucket.$inferInsert; const createMetric = (overrides: Partial = {}): MetricInsert => ({ + id: "test-id", date: 1700000000n, daoId: "UNI", tokenId: "uni", diff --git a/apps/api/src/repositories/delegations/delegators.unit.test.ts b/apps/api/src/repositories/delegations/delegators.unit.test.ts index 7407888e3..20b933214 100644 --- a/apps/api/src/repositories/delegations/delegators.unit.test.ts +++ b/apps/api/src/repositories/delegations/delegators.unit.test.ts @@ -23,26 +23,34 @@ let txCounter = 0; const createAccountBalance = ( overrides: Partial = {}, -): AccountBalanceInsert => ({ - accountId: DELEGATOR_A, - tokenId: `token-${txCounter++}`, - balance: 1000000000000000000n, - delegate: DELEGATE, - ...overrides, -}); +): AccountBalanceInsert => { + const n = txCounter++; + return { + id: `ab-${n}`, + accountId: DELEGATOR_A, + tokenId: `token-${n}`, + balance: 1000000000000000000n, + delegate: DELEGATE, + ...overrides, + }; +}; const createDelegation = ( overrides: Partial = {}, -): DelegationInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: "uni", - delegateAccountId: DELEGATE, - delegatorAccountId: DELEGATOR_A, - delegatedValue: 0n, - timestamp: 1700000000n, - logIndex: 0, - ...overrides, -}); +): DelegationInsert => { + const n = txCounter++; + return { + id: `d-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: "uni", + delegateAccountId: DELEGATE, + delegatorAccountId: DELEGATOR_A, + delegatedValue: 0n, + timestamp: 1700000000n, + logIndex: 0, + ...overrides, + }; +}; const defaultSort = ( overrides: Partial = {}, diff --git a/apps/api/src/repositories/delegations/general.unit.test.ts b/apps/api/src/repositories/delegations/general.unit.test.ts index 38c4e11d9..b78b02161 100644 --- a/apps/api/src/repositories/delegations/general.unit.test.ts +++ b/apps/api/src/repositories/delegations/general.unit.test.ts @@ -18,6 +18,7 @@ const delegatedAccount: Address = "0x2222222222222222222222222222222222222222"; const createDelegation = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: "test-id", transactionHash: "0xdefault", daoId: "uni", delegateAccountId: accountAddress, @@ -32,6 +33,7 @@ const createDelegation = ( const createAccountBalance = ( overrides: Partial = {}, ): AccountBalanceInsert => ({ + id: "test-id", accountId: accountAddress, tokenId: "uni", balance: 1000n, diff --git a/apps/api/src/repositories/delegations/historical.unit.test.ts b/apps/api/src/repositories/delegations/historical.unit.test.ts index 0f3422c0e..c68ad336b 100644 --- a/apps/api/src/repositories/delegations/historical.unit.test.ts +++ b/apps/api/src/repositories/delegations/historical.unit.test.ts @@ -18,6 +18,7 @@ const delegate2: Address = "0x9999999999999999999999999999999999999999"; const createDelegation = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: "test-id", transactionHash: "0xdefault", daoId: "uni", delegateAccountId: delegate1, diff --git a/apps/api/src/repositories/feed/feed.repository.unit.test.ts b/apps/api/src/repositories/feed/feed.repository.unit.test.ts index 6aa052e30..d94549bbb 100644 --- a/apps/api/src/repositories/feed/feed.repository.unit.test.ts +++ b/apps/api/src/repositories/feed/feed.repository.unit.test.ts @@ -38,6 +38,7 @@ const defaultThresholds = ( const createFeedEvent = ( overrides: Partial = {}, ): FeedEventInsert => ({ + id: "test-id", txHash: "0xabc123", logIndex: 0, type: "VOTE", diff --git a/apps/api/src/repositories/last-update/index.unit.test.ts b/apps/api/src/repositories/last-update/index.unit.test.ts index 447e3ea23..14510a1f1 100644 --- a/apps/api/src/repositories/last-update/index.unit.test.ts +++ b/apps/api/src/repositories/last-update/index.unit.test.ts @@ -11,6 +11,7 @@ import { LastUpdateRepositoryImpl } from "./index"; type MetricInsert = typeof daoMetricsDayBucket.$inferInsert; const createMetric = (overrides: Partial = {}): MetricInsert => ({ + id: "test-id", date: 1700000000n, daoId: "UNI", tokenId: "uni", diff --git a/apps/api/src/repositories/proposals-activity/index.unit.test.ts b/apps/api/src/repositories/proposals-activity/index.unit.test.ts index 392f0c69f..0f73e6bff 100644 --- a/apps/api/src/repositories/proposals-activity/index.unit.test.ts +++ b/apps/api/src/repositories/proposals-activity/index.unit.test.ts @@ -14,17 +14,21 @@ const OTHER_VOTER = getAddress("0x2222222222222222222222222222222222222222"); type VoteInsert = typeof votesOnchain.$inferInsert; type ProposalInsert = typeof proposalsOnchain.$inferInsert; -const createVote = (overrides: Partial = {}): VoteInsert => ({ - txHash: "0xvote1", - daoId: "UNI", - voterAccountId: VOTER, - proposalId: "proposal-1", - support: "1", - votingPower: 1000n, - reason: "", - timestamp: 1700000000n, - ...overrides, -}); +const createVote = (overrides: Partial = {}): VoteInsert => { + const txHash = overrides.txHash ?? "0xvote1"; + return { + id: txHash, + txHash, + daoId: "UNI", + voterAccountId: VOTER, + proposalId: "proposal-1", + support: "1", + votingPower: 1000n, + reason: "", + timestamp: 1700000000n, + ...overrides, + }; +}; const createProposal = ( overrides: Partial = {}, diff --git a/apps/api/src/repositories/token/nft.unit.test.ts b/apps/api/src/repositories/token/nft.unit.test.ts index 0745e9a7a..589474300 100644 --- a/apps/api/src/repositories/token/nft.unit.test.ts +++ b/apps/api/src/repositories/token/nft.unit.test.ts @@ -14,11 +14,15 @@ let counter = 0; const createTokenPrice = ( overrides: Partial = {}, -): TokenPriceInsert => ({ - price: 1000000000000000000n, - timestamp: BigInt(1700000000 + counter++), - ...overrides, -}); +): TokenPriceInsert => { + const n = counter++; + return { + id: `price-${n}`, + price: 1000000000000000000n, + timestamp: BigInt(1700000000 + n), + ...overrides, + }; +}; describe("NFTPriceRepository", () => { let client: PGlite; diff --git a/apps/api/src/repositories/transactions/index.unit.test.ts b/apps/api/src/repositories/transactions/index.unit.test.ts index adfb4f7f2..0cb4605db 100644 --- a/apps/api/src/repositories/transactions/index.unit.test.ts +++ b/apps/api/src/repositories/transactions/index.unit.test.ts @@ -22,17 +22,22 @@ let txCounter = 0; const createTransaction = ( overrides: Partial = {}, -): TransactionInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - fromAddress: ACCOUNT_A, - toAddress: ACCOUNT_B, - timestamp: 1700000000n, - ...overrides, -}); +): TransactionInsert => { + const n = txCounter++; + return { + id: `tx-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + fromAddress: ACCOUNT_A, + toAddress: ACCOUNT_B, + timestamp: 1700000000n, + ...overrides, + }; +}; const createTransfer = ( overrides: Partial = {}, ): TransferInsert => ({ + id: `tf-${txCounter}`, transactionHash: `0x${txCounter.toString(16).padStart(64, "0")}`, daoId: TEST_DAO, tokenId: "token-1", @@ -47,6 +52,7 @@ const createTransfer = ( const createDelegation = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: `d-${txCounter}`, transactionHash: `0x${txCounter.toString(16).padStart(64, "0")}`, daoId: TEST_DAO, delegateAccountId: ACCOUNT_B, diff --git a/apps/api/src/repositories/transfers/index.unit.test.ts b/apps/api/src/repositories/transfers/index.unit.test.ts index b593d979d..4c53f318f 100644 --- a/apps/api/src/repositories/transfers/index.unit.test.ts +++ b/apps/api/src/repositories/transfers/index.unit.test.ts @@ -17,6 +17,7 @@ type TransferInsert = typeof transfer.$inferInsert; const createTransfer = ( overrides: Partial = {}, ): TransferInsert => ({ + id: "test-id", transactionHash: "0xabc", daoId: "UNI", tokenId: "uni", diff --git a/apps/api/src/repositories/treasury/treasury.repository.integration.test.ts b/apps/api/src/repositories/treasury/treasury.repository.integration.test.ts index 706e7112b..1e6fb9f7c 100644 --- a/apps/api/src/repositories/treasury/treasury.repository.integration.test.ts +++ b/apps/api/src/repositories/treasury/treasury.repository.integration.test.ts @@ -14,6 +14,7 @@ type DaoMetricInsert = typeof daoMetricsDayBucket.$inferInsert; const createMetricRow = ( overrides: Partial = {}, ): DaoMetricInsert => ({ + id: "test-id", date: 1600041600n, daoId: "ENS", tokenId: "ENS-token", diff --git a/apps/api/src/repositories/votes/onchainVotes.unit.test.ts b/apps/api/src/repositories/votes/onchainVotes.unit.test.ts index 6516af994..8077f9e40 100644 --- a/apps/api/src/repositories/votes/onchainVotes.unit.test.ts +++ b/apps/api/src/repositories/votes/onchainVotes.unit.test.ts @@ -50,21 +50,26 @@ const createProposal = ( const createVote = ( overrides: Partial = {}, -): VotesOnchainInsert => ({ - txHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: TEST_DAO, - voterAccountId: VOTER_A, - proposalId: "proposal-1", - support: "1", - votingPower: 1000n, - reason: null, - timestamp: 1700000000n, - ...overrides, -}); +): VotesOnchainInsert => { + const n = txCounter++; + return { + id: `vote-${n}`, + txHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: TEST_DAO, + voterAccountId: VOTER_A, + proposalId: "proposal-1", + support: "1", + votingPower: 1000n, + reason: null, + timestamp: 1700000000n, + ...overrides, + }; +}; const createAccountPowerRow = ( overrides: Partial = {}, ): AccountPowerInsert => ({ + id: "test-id", accountId: VOTER_A, daoId: TEST_DAO, votingPower: 1000n, @@ -77,17 +82,21 @@ const createAccountPowerRow = ( const createHistoryRow = ( overrides: Partial = {}, -): VotingPowerHistoryInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: TEST_DAO, - accountId: VOTER_A, - votingPower: 1000n, - delta: 200n, - deltaMod: 200n, - timestamp: 1700000000n, - logIndex: 0, - ...overrides, -}); +): VotingPowerHistoryInsert => { + const n = txCounter++; + return { + id: `h-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: TEST_DAO, + accountId: VOTER_A, + votingPower: 1000n, + delta: 200n, + deltaMod: 200n, + timestamp: 1700000000n, + logIndex: 0, + ...overrides, + }; +}; describe("VotesRepository", () => { let client: PGlite; diff --git a/apps/api/src/repositories/voting-power/aave.ts b/apps/api/src/repositories/voting-power/aave.ts index e12405c42..33b184472 100644 --- a/apps/api/src/repositories/voting-power/aave.ts +++ b/apps/api/src/repositories/voting-power/aave.ts @@ -126,8 +126,7 @@ export class AAVEVotingPowerRepository { return result.map((row) => ({ ...row.VotingPowerHistory, delegations: - row.Transfer && - row.Transfer?.logIndex > (row.Delegation?.logIndex || 0) + row.Transfer && row.Transfer?.logIndex > (row.Delegation?.logIndex || 0) ? null : row.Delegation, transfers: diff --git a/apps/api/src/repositories/voting-power/aave.unit.test.ts b/apps/api/src/repositories/voting-power/aave.unit.test.ts index 3fcc21578..fc619b4b6 100644 --- a/apps/api/src/repositories/voting-power/aave.unit.test.ts +++ b/apps/api/src/repositories/voting-power/aave.unit.test.ts @@ -33,6 +33,7 @@ let txCounter = 0; const createAccountPowerRow = ( overrides: Partial = {}, ): AccountPowerInsert => ({ + id: "test-id", accountId: TEST_ACCOUNT_1, daoId: TEST_DAO, votingPower: 1000n, @@ -45,21 +46,26 @@ const createAccountPowerRow = ( const createHistoryRow = ( overrides: Partial = {}, -): VotingPowerHistoryInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: TEST_DAO, - accountId: TEST_ACCOUNT_1, - votingPower: 1000n, - delta: 200n, - deltaMod: 200n, - timestamp: 1700000000n, - logIndex: 10, - ...overrides, -}); +): VotingPowerHistoryInsert => { + const n = txCounter++; + return { + id: `h-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: TEST_DAO, + accountId: TEST_ACCOUNT_1, + votingPower: 1000n, + delta: 200n, + deltaMod: 200n, + timestamp: 1700000000n, + logIndex: 10, + ...overrides, + }; +}; const createDelegation = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: `d-${txCounter}`, transactionHash: `0x${txCounter.toString(16).padStart(64, "0")}`, daoId: TEST_DAO, delegateAccountId: TEST_ACCOUNT_1, @@ -72,12 +78,16 @@ const createDelegation = ( const createAccountBalance = ( overrides: Partial = {}, -): AccountBalanceInsert => ({ - accountId: TEST_ACCOUNT_1, - tokenId: `token-${txCounter++}`, - balance: 500n, - ...overrides, -}); +): AccountBalanceInsert => { + const n = txCounter++; + return { + id: `ab-${n}`, + accountId: TEST_ACCOUNT_1, + tokenId: `token-${n}`, + balance: 500n, + ...overrides, + }; +}; describe("AAVEVotingPowerRepository", () => { let client: PGlite; diff --git a/apps/api/src/repositories/voting-power/general.ts b/apps/api/src/repositories/voting-power/general.ts index e78749078..42ea70f3b 100644 --- a/apps/api/src/repositories/voting-power/general.ts +++ b/apps/api/src/repositories/voting-power/general.ts @@ -126,8 +126,7 @@ export class VotingPowerRepository { return result.map((row) => ({ ...row.VotingPowerHistory, delegations: - row.Transfer && - row.Transfer?.logIndex > (row.Delegation?.logIndex || 0) + row.Transfer && row.Transfer?.logIndex > (row.Delegation?.logIndex || 0) ? null : row.Delegation, transfers: diff --git a/apps/api/src/repositories/voting-power/nouns.ts b/apps/api/src/repositories/voting-power/nouns.ts index 663c52f60..f1b97f8b9 100644 --- a/apps/api/src/repositories/voting-power/nouns.ts +++ b/apps/api/src/repositories/voting-power/nouns.ts @@ -102,8 +102,7 @@ export class NounsVotingPowerRepository { return result.map((row) => ({ ...row.VotingPowerHistory, delegations: - row.Transfer && - row.Transfer?.logIndex < (row.Delegation?.logIndex || 0) + row.Transfer && row.Transfer?.logIndex < (row.Delegation?.logIndex || 0) ? null : row.Delegation, transfers: diff --git a/apps/api/src/repositories/voting-power/nouns.unit.test.ts b/apps/api/src/repositories/voting-power/nouns.unit.test.ts index 7de46e71d..88abca8fc 100644 --- a/apps/api/src/repositories/voting-power/nouns.unit.test.ts +++ b/apps/api/src/repositories/voting-power/nouns.unit.test.ts @@ -21,21 +21,26 @@ let txCounter = 0; const createHistoryRow = ( overrides: Partial = {}, -): VotingPowerHistoryInsert => ({ - transactionHash: `0x${(txCounter++).toString(16).padStart(64, "0")}`, - daoId: TEST_DAO, - accountId: TEST_ACCOUNT, - votingPower: 1000n, - delta: 200n, - deltaMod: 200n, - timestamp: 1700000000n, - logIndex: 10, - ...overrides, -}); +): VotingPowerHistoryInsert => { + const n = txCounter++; + return { + id: `h-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: TEST_DAO, + accountId: TEST_ACCOUNT, + votingPower: 1000n, + delta: 200n, + deltaMod: 200n, + timestamp: 1700000000n, + logIndex: 10, + ...overrides, + }; +}; const createDelegation = ( overrides: Partial = {}, ): DelegationInsert => ({ + id: `d-${txCounter}`, transactionHash: `0x${txCounter.toString(16).padStart(64, "0")}`, daoId: TEST_DAO, delegateAccountId: TEST_ACCOUNT, @@ -49,6 +54,7 @@ const createDelegation = ( const createTransfer = ( overrides: Partial = {}, ): TransferInsert => ({ + id: `tf-${txCounter}`, transactionHash: `0x${txCounter.toString(16).padStart(64, "0")}`, daoId: TEST_DAO, tokenId: "token-1", diff --git a/apps/api/src/repositories/voting-power/voting-power.repository.unit.test.ts b/apps/api/src/repositories/voting-power/voting-power.repository.unit.test.ts index 69e8214c5..eb66d9855 100644 --- a/apps/api/src/repositories/voting-power/voting-power.repository.unit.test.ts +++ b/apps/api/src/repositories/voting-power/voting-power.repository.unit.test.ts @@ -30,6 +30,7 @@ const TEST_DAO = "test-dao"; const createAccountPowerRow = ( overrides: Partial = {}, ): AccountPowerInsert => ({ + id: "test-id", accountId: TEST_ACCOUNT_1, daoId: TEST_DAO, votingPower: 1000n, @@ -43,6 +44,7 @@ const createAccountPowerRow = ( const createHistoryRow = ( overrides: Partial = {}, ): VotingPowerHistoryInsert => ({ + id: "test-id", transactionHash: "0xtx1", daoId: TEST_DAO, accountId: TEST_ACCOUNT_1, @@ -58,20 +60,25 @@ let vpTxCounter = 0; const createDelegationRow = ( overrides: Partial = {}, -): DelegationInsert => ({ - transactionHash: `0x${(vpTxCounter++).toString(16).padStart(64, "0")}`, - daoId: TEST_DAO, - delegateAccountId: TEST_ACCOUNT_1, - delegatorAccountId: TEST_ACCOUNT_2, - delegatedValue: 0n, - timestamp: 1700000000n, - logIndex: 5, - ...overrides, -}); +): DelegationInsert => { + const n = vpTxCounter++; + return { + id: `d-${n}`, + transactionHash: `0x${n.toString(16).padStart(64, "0")}`, + daoId: TEST_DAO, + delegateAccountId: TEST_ACCOUNT_1, + delegatorAccountId: TEST_ACCOUNT_2, + delegatedValue: 0n, + timestamp: 1700000000n, + logIndex: 5, + ...overrides, + }; +}; const createTransferRow = ( overrides: Partial = {}, ): TransferInsert => ({ + id: `tf-${vpTxCounter}`, transactionHash: `0x${vpTxCounter.toString(16).padStart(64, "0")}`, daoId: TEST_DAO, tokenId: "token-1", diff --git a/apps/api/src/services/account-balance/historical.unit.test.ts b/apps/api/src/services/account-balance/historical.unit.test.ts index 8e9354c94..234b87fe5 100644 --- a/apps/api/src/services/account-balance/historical.unit.test.ts +++ b/apps/api/src/services/account-balance/historical.unit.test.ts @@ -8,6 +8,7 @@ const MOCK_ACCOUNT = getAddress("0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045"); const createMockHistoricalBalance = ( overrides: Partial = {}, ): DBHistoricalBalanceWithRelations => ({ + id: "test-id", transactionHash: "0xabc", daoId: "UNI", accountId: MOCK_ACCOUNT, @@ -17,6 +18,7 @@ const createMockHistoricalBalance = ( timestamp: 1700000000n, logIndex: 0, transfer: { + id: "test-id", transactionHash: "0xabc", daoId: "UNI", tokenId: "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", diff --git a/apps/api/src/services/account-balance/variations.unit.test.ts b/apps/api/src/services/account-balance/variations.unit.test.ts index 878d8c677..6bc9dfa30 100644 --- a/apps/api/src/services/account-balance/variations.unit.test.ts +++ b/apps/api/src/services/account-balance/variations.unit.test.ts @@ -21,6 +21,7 @@ const createMockVariation = ( const createMockBalance = ( overrides: Partial = {}, ): DBAccountBalance => ({ + id: "test-id", accountId: ADDR_A, tokenId: MOCK_TOKEN, balance: 500n, diff --git a/apps/api/src/services/delegation-percentage/delegation-percentage.unit.test.ts b/apps/api/src/services/delegation-percentage/delegation-percentage.unit.test.ts index 0eec8797d..239fca587 100644 --- a/apps/api/src/services/delegation-percentage/delegation-percentage.unit.test.ts +++ b/apps/api/src/services/delegation-percentage/delegation-percentage.unit.test.ts @@ -18,6 +18,7 @@ import { const createMockRow = ( overrides: Partial = {}, ): DBTokenMetric => ({ + id: "test-id", date: 0n, daoId: "uniswap", tokenId: "uni", diff --git a/apps/api/src/services/delegations/current.unit.test.ts b/apps/api/src/services/delegations/current.unit.test.ts index 56230594c..7597d8ac3 100644 --- a/apps/api/src/services/delegations/current.unit.test.ts +++ b/apps/api/src/services/delegations/current.unit.test.ts @@ -8,6 +8,7 @@ import { DelegationsService } from "./current"; const createMockDelegation = ( overrides: Partial = {}, ): DBDelegation => ({ + id: "test-id", transactionHash: "0xabc123", daoId: "uni", delegateAccountId: "0x1234567890123456789012345678901234567890", diff --git a/apps/api/src/services/delegations/historical.unit.test.ts b/apps/api/src/services/delegations/historical.unit.test.ts index 92124812b..ac3a3f3c1 100644 --- a/apps/api/src/services/delegations/historical.unit.test.ts +++ b/apps/api/src/services/delegations/historical.unit.test.ts @@ -11,6 +11,7 @@ import { const createMockDelegation = ( overrides: Partial = {}, ): DBDelegation => ({ + id: "test-id", transactionHash: "0xabc123", daoId: "uni", delegateAccountId: "0x1234567890123456789012345678901234567890" as Address, diff --git a/apps/api/src/services/feed/feed.unit.test.ts b/apps/api/src/services/feed/feed.unit.test.ts index 19c7ebbf5..d83b2c087 100644 --- a/apps/api/src/services/feed/feed.unit.test.ts +++ b/apps/api/src/services/feed/feed.unit.test.ts @@ -11,6 +11,7 @@ import { FeedService } from "."; const createFeedEvent = ( overrides: Partial = {}, ): DBFeedEvent => ({ + id: "test-id", txHash: "0xabc123", logIndex: 0, type: "VOTE", diff --git a/apps/api/src/services/transactions/index.unit.test.ts b/apps/api/src/services/transactions/index.unit.test.ts index 6e9237eae..91af6e88e 100644 --- a/apps/api/src/services/transactions/index.unit.test.ts +++ b/apps/api/src/services/transactions/index.unit.test.ts @@ -16,6 +16,7 @@ function createStubRepo(items: DBTransaction[] = [], count = 0) { } const makeDBTransaction = (overrides = {}): DBTransaction => ({ + id: "test-id", transactionHash: "0xabc123", fromAddress: "0x1234567890123456789012345678901234567890", toAddress: "0x0987654321098765432109876543210987654321", diff --git a/apps/api/src/services/transfers/index.unit.test.ts b/apps/api/src/services/transfers/index.unit.test.ts index f597fe315..2e6282aed 100644 --- a/apps/api/src/services/transfers/index.unit.test.ts +++ b/apps/api/src/services/transfers/index.unit.test.ts @@ -11,6 +11,7 @@ function createStubRepo(items: DBTransfer[] = [], count = 0) { } const makeDBTransfer = (overrides = {}): DBTransfer => ({ + id: "test-id", transactionHash: "0xdeadbeef", daoId: "UNI", tokenId: "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", diff --git a/apps/api/src/services/votes/onchainVotes.unit.test.ts b/apps/api/src/services/votes/onchainVotes.unit.test.ts index 3f344d37c..dd7ef7b95 100644 --- a/apps/api/src/services/votes/onchainVotes.unit.test.ts +++ b/apps/api/src/services/votes/onchainVotes.unit.test.ts @@ -17,6 +17,7 @@ type VoteWithTitle = DBVote & { const createMockDBVote = ( overrides: Partial = {}, ): VoteWithTitle => ({ + id: "test-id", voterAccountId: VOTER_A, txHash: TX_HASH, transactionHash: TX_HASH, diff --git a/apps/api/src/services/voting-power/aave.unit.test.ts b/apps/api/src/services/voting-power/aave.unit.test.ts index 64a829e1a..e2658177b 100644 --- a/apps/api/src/services/voting-power/aave.unit.test.ts +++ b/apps/api/src/services/voting-power/aave.unit.test.ts @@ -12,6 +12,7 @@ const MOCK_ADDRESS = getAddress("0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045"); const createMockHistoricalVP = ( overrides: Partial = {}, ): DBHistoricalVotingPowerWithRelations => ({ + id: "test-id", transactionHash: "0xabc", daoId: "AAVE", accountId: MOCK_ADDRESS, @@ -28,6 +29,7 @@ const createMockHistoricalVP = ( const createMockAccountPower = ( overrides: Partial = {}, ): DBAccountPowerWithVariation => ({ + id: "test-id", accountId: MOCK_ADDRESS, daoId: "AAVE", votingPower: 1000n, diff --git a/apps/api/src/services/voting-power/index.unit.test.ts b/apps/api/src/services/voting-power/index.unit.test.ts index 0ea5698d7..f6695adf2 100644 --- a/apps/api/src/services/voting-power/index.unit.test.ts +++ b/apps/api/src/services/voting-power/index.unit.test.ts @@ -38,6 +38,7 @@ function createStubVotingPowersRepo( const makeDBHistoricalVotingPower = ( overrides = {}, ): DBHistoricalVotingPowerWithRelations => ({ + id: "test-id", transactionHash: "0xabc", daoId: "UNI", accountId: "0x1111111111111111111111111111111111111111" as Address, @@ -252,6 +253,7 @@ describe("VotingPowerService (index)", () => { describe("getVotingPowersByAccountId", () => { it("returns the voting power for the given account", async () => { const repoResult: DBAccountPowerWithVariation = { + id: "test-id", accountId: addr1, daoId: "UNI", votingPower: 1000n, diff --git a/apps/api/src/services/voting-power/voting-power.unit.test.ts b/apps/api/src/services/voting-power/voting-power.unit.test.ts index 4a7a7c94d..0a12beafd 100644 --- a/apps/api/src/services/voting-power/voting-power.unit.test.ts +++ b/apps/api/src/services/voting-power/voting-power.unit.test.ts @@ -16,6 +16,7 @@ function createStubVariationRepo(variations: DBVotingPowerVariation[] = []) { } const makeDBAccountPower = (overrides = {}): DBAccountPower => ({ + id: "test-id", accountId: "0x1111111111111111111111111111111111111111" as Address, daoId: "UNI", votingPower: 1000n, From 6287b3d08accf6510635311dd658811d75f6f72a Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Fri, 10 Apr 2026 13:51:10 -0300 Subject: [PATCH 13/17] fix: enum types --- .../controllers/feed/feed.integration.test.ts | 23 +++++++++-------- apps/api/src/database/schema.ts | 19 +++----------- .../feed/feed.repository.unit.test.ts | 23 +++++++++-------- apps/api/src/services/feed/feed.unit.test.ts | 25 ++++++++++--------- 4 files changed, 41 insertions(+), 49 deletions(-) diff --git a/apps/api/src/controllers/feed/feed.integration.test.ts b/apps/api/src/controllers/feed/feed.integration.test.ts index 02fd9527e..52eaef825 100644 --- a/apps/api/src/controllers/feed/feed.integration.test.ts +++ b/apps/api/src/controllers/feed/feed.integration.test.ts @@ -17,17 +17,18 @@ type FeedEventInsert = typeof feedEvent.$inferInsert; const nounsThresholds = getDaoRelevanceThreshold(DaoIdEnum.NOUNS); const createEvent = ( - overrides: Partial = {}, -): FeedEventInsert => ({ - id: "test-id", - txHash: "0xabc123def456abc1", - logIndex: 0, - type: "VOTE" as const, - value: nounsThresholds[FeedEventType.VOTE][FeedRelevance.MEDIUM], - timestamp: 1700000000, - metadata: null, - ...overrides, -}); + overrides: Partial & { type: string }> = {}, +): FeedEventInsert => + ({ + id: "test-id", + txHash: "0xabc123def456abc1", + logIndex: 0, + type: "VOTE", + value: nounsThresholds[FeedEventType.VOTE][FeedRelevance.MEDIUM], + timestamp: 1700000000, + metadata: null, + ...overrides, + }) as FeedEventInsert; let client: PGlite; let db: Drizzle; diff --git a/apps/api/src/database/schema.ts b/apps/api/src/database/schema.ts index 6ffe94b2a..d2666f211 100644 --- a/apps/api/src/database/schema.ts +++ b/apps/api/src/database/schema.ts @@ -1,8 +1,8 @@ import { relations } from "drizzle-orm"; -import { pgTable, index, bigint, pgEnum } from "drizzle-orm/pg-core"; +import { pgTable, index, bigint } from "drizzle-orm/pg-core"; import { Address, zeroAddress } from "viem"; -import { MetricTypesArray } from "@/lib/constants"; +import { FeedEventType } from "@/lib/constants"; export const token = pgTable("Token", (drizzle) => ({ id: drizzle.text().primaryKey(), @@ -197,8 +197,6 @@ export const votesOnchainRelations = relations(votesOnchain, ({ one }) => ({ }), })); -export const metricType = pgEnum("metricType", MetricTypesArray); - export const daoMetricsDayBucket = pgTable( "DaoMetricsDayBucket", (drizzle) => ({ @@ -206,7 +204,7 @@ export const daoMetricsDayBucket = pgTable( date: bigint({ mode: "bigint" }).notNull(), daoId: drizzle.text("daoId").notNull(), tokenId: drizzle.text("tokenId").notNull(), - metricType: metricType("metricType").notNull(), + metricType: drizzle.text("metricType").notNull(), open: bigint({ mode: "bigint" }).notNull(), close: bigint({ mode: "bigint" }).notNull(), low: bigint({ mode: "bigint" }).notNull(), @@ -236,22 +234,13 @@ export const tokenPrice = pgTable("TokenPrice", (_drizzle) => ({ timestamp: bigint({ mode: "bigint" }).notNull(), })); -export const evenTypeEnum = pgEnum("event_type", [ - "VOTE", - "PROPOSAL", - "DELEGATION", - "TRANSFER", - "DELEGATION_VOTES_CHANGED", - "PROPOSAL_EXTENDED", -]); - export const feedEvent = pgTable( "FeedEvent", (drizzle) => ({ id: drizzle.text().primaryKey(), txHash: drizzle.text("txHash").notNull(), logIndex: drizzle.integer("logIndex").notNull(), - type: evenTypeEnum("type").notNull(), + type: drizzle.text("type").$type().notNull(), value: bigint({ mode: "bigint" }).notNull().default(0n), timestamp: bigint({ mode: "number" }).notNull(), metadata: drizzle.json().$type>(), diff --git a/apps/api/src/repositories/feed/feed.repository.unit.test.ts b/apps/api/src/repositories/feed/feed.repository.unit.test.ts index d94549bbb..dd4d7eaeb 100644 --- a/apps/api/src/repositories/feed/feed.repository.unit.test.ts +++ b/apps/api/src/repositories/feed/feed.repository.unit.test.ts @@ -36,17 +36,18 @@ const defaultThresholds = ( }); const createFeedEvent = ( - overrides: Partial = {}, -): FeedEventInsert => ({ - id: "test-id", - txHash: "0xabc123", - logIndex: 0, - type: "VOTE", - value: 1000n, - timestamp: 1700000000, - metadata: null, - ...overrides, -}); + overrides: Partial & { type: string }> = {}, +): FeedEventInsert => + ({ + id: "test-id", + txHash: "0xabc123", + logIndex: 0, + type: "VOTE", + value: 1000n, + timestamp: 1700000000, + metadata: null, + ...overrides, + }) as FeedEventInsert; describe("FeedRepository", () => { let client: PGlite; diff --git a/apps/api/src/services/feed/feed.unit.test.ts b/apps/api/src/services/feed/feed.unit.test.ts index d83b2c087..25a8f88b4 100644 --- a/apps/api/src/services/feed/feed.unit.test.ts +++ b/apps/api/src/services/feed/feed.unit.test.ts @@ -9,17 +9,18 @@ import { DBFeedEvent, FeedRequest } from "@/mappers"; import { FeedService } from "."; const createFeedEvent = ( - overrides: Partial = {}, -): DBFeedEvent => ({ - id: "test-id", - txHash: "0xabc123", - logIndex: 0, - type: "VOTE", - value: parseEther("100000"), - timestamp: 1700000000, - metadata: null, - ...overrides, -}); + overrides: Partial & { type: string }> = {}, +): DBFeedEvent => + ({ + id: "test-id", + txHash: "0xabc123", + logIndex: 0, + type: "VOTE", + value: parseEther("100000"), + timestamp: 1700000000, + metadata: null, + ...overrides, + }) as DBFeedEvent; const createRequest = (overrides: Partial = {}): FeedRequest => ({ skip: 0, @@ -38,7 +39,7 @@ class SimpleFeedRepository { valueThresholds: Partial>, ) { const filtered = this.items.filter((e) => { - if (e.type === "DELEGATION_VOTES_CHANGED") return false; + if ((e.type as string) === "DELEGATION_VOTES_CHANGED") return false; const threshold = valueThresholds[e.type]; return threshold === undefined || e.value >= threshold; }); From f246df79ca3f3ef7aeeb11a86ab33d15ec7c93b0 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Fri, 10 Apr 2026 14:44:53 -0300 Subject: [PATCH 14/17] fix: remove snake_case casing from drizzle client HyperIndex uses PascalCase table names and camelCase column names. The casing: "snake_case" option was overriding the explicit column name strings in the schema, causing all table names to be lowercased (e.g. "ProposalOnchain" -> "proposal_onchain"). Co-Authored-By: Claude Sonnet 4.6 --- apps/api/cmd/aave.ts | 2 +- apps/api/cmd/index.ts | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/apps/api/cmd/aave.ts b/apps/api/cmd/aave.ts index 99df3bd1d..dd7d5889d 100644 --- a/apps/api/cmd/aave.ts +++ b/apps/api/cmd/aave.ts @@ -105,7 +105,7 @@ if (!daoClient) { throw new Error(`Client not found for DAO ${env.DAO_ID}`); } -const pgClient = drizzle(env.DATABASE_URL, { schema, casing: "snake_case" }); +const pgClient = drizzle(env.DATABASE_URL, { schema }); health(app, pgClient); diff --git a/apps/api/cmd/index.ts b/apps/api/cmd/index.ts index 2593e4682..a07c4f272 100644 --- a/apps/api/cmd/index.ts +++ b/apps/api/cmd/index.ts @@ -174,7 +174,7 @@ if (!daoClient) { throw new Error(`Client not found for DAO ${env.DAO_ID}`); } -const pgClient = drizzle(env.DATABASE_URL, { schema, casing: "snake_case" }); +const pgClient = drizzle(env.DATABASE_URL, { schema }); health(app, pgClient); @@ -351,7 +351,6 @@ tokenMetrics(app, tokenMetricsService); if (daoClient.supportOffchainData()) { const pgUnifiedClient = drizzle(env.DATABASE_URL, { schema: { ...schema, ...offchainSchema }, - casing: "snake_case", }); const offchainProposalsRepo = wrapWithTracing( From e891269621d9cc828194faa693c601cd9bc02ce8 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Fri, 10 Apr 2026 14:45:12 -0300 Subject: [PATCH 15/17] fix: env --- apps/api/src/env.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/apps/api/src/env.ts b/apps/api/src/env.ts index 547eb0959..3f4179673 100644 --- a/apps/api/src/env.ts +++ b/apps/api/src/env.ts @@ -7,9 +7,7 @@ dotenv.config(); const envSchema = z.object({ RPC_URL: z.string(), - DATABASE_URL: z - .string() - .transform((val) => `${val}?options=-c%20search_path%3Danticapture`), + DATABASE_URL: z.string(), DAO_ID: z.enum(DaoIdEnum), CHAIN_ID: z.coerce.number(), From d41613e3f2ff2a1c035e1336cac5f431eb927d19 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Tue, 14 Apr 2026 16:07:36 -0300 Subject: [PATCH 16/17] fix: pin envio to 2.31.1 to avoid reserved keyword breaking change --- apps/hypersync-indexer/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/hypersync-indexer/package.json b/apps/hypersync-indexer/package.json index 0ee592b5d..c5744eb8d 100644 --- a/apps/hypersync-indexer/package.json +++ b/apps/hypersync-indexer/package.json @@ -17,7 +17,7 @@ "devDependencies": { "@types/node": "^20.16.5", "dotenv": "^16.5.0", - "envio": "^2.32.12", + "envio": "2.31.1", "eslint": "^9", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.2.1", From 5de7a9ff56379cab7d7527f2c4cc0820142e5555 Mon Sep 17 00:00:00 2001 From: Pedro Binotto Date: Tue, 14 Apr 2026 16:11:56 -0300 Subject: [PATCH 17/17] fix: ignore generated code typecheck --- apps/hypersync-indexer/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/hypersync-indexer/package.json b/apps/hypersync-indexer/package.json index c5744eb8d..305bc05b5 100644 --- a/apps/hypersync-indexer/package.json +++ b/apps/hypersync-indexer/package.json @@ -8,7 +8,7 @@ "codegen": "envio codegen", "lint": "eslint .", "lint:fix": "eslint . --fix", - "typecheck": "tsc --noEmit", + "typecheck": "[ -d generated ] && tsc --noEmit || echo 'Skipping typecheck: run envio codegen first'", "clean": "rm -rf node_modules generated *.tsbuildinfo" }, "dependencies": {