From eea49aa638388084acab34f87eb6b9439d135ae9 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Wed, 21 Jan 2026 17:42:15 +0400 Subject: [PATCH 1/2] Migrate to alloy from rust-web3 --- Cargo.lock | 1404 +++++++++++++++-- Cargo.toml | 4 +- chain/ethereum/Cargo.toml | 1 + chain/ethereum/build.rs | 1 + chain/ethereum/proto/ethereum.proto | 701 +++++++- chain/ethereum/src/adapter.rs | 271 ++-- chain/ethereum/src/call_helper.rs | 141 ++ chain/ethereum/src/chain.rs | 402 ++++- chain/ethereum/src/codec.rs | 960 ++++++++--- chain/ethereum/src/data_source.rs | 236 +-- chain/ethereum/src/ethereum_adapter.rs | 1310 ++++++++------- chain/ethereum/src/ingestor.rs | 12 +- chain/ethereum/src/lib.rs | 1 + .../src/protobuf/sf.ethereum.r#type.v2.rs | 780 ++++++++- chain/ethereum/src/runtime/abi.rs | 233 +-- chain/ethereum/src/runtime/runtime_adapter.rs | 76 +- chain/ethereum/src/tests.rs | 128 +- chain/ethereum/src/transport.rs | 180 +-- chain/ethereum/src/trigger.rs | 242 ++- chain/near/src/codec.rs | 13 +- chain/near/src/trigger.rs | 8 +- graph/Cargo.toml | 8 +- graph/src/abi/event_ext.rs | 169 ++ graph/src/abi/function_ext.rs | 303 ++++ graph/src/abi/mod.rs | 20 + graph/src/abi/param.rs | 7 + graph/src/abi/value_ext.rs | 277 ++++ graph/src/blockchain/mock.rs | 8 +- graph/src/blockchain/mod.rs | 16 +- graph/src/blockchain/types.rs | 156 +- graph/src/cheap_clone.rs | 2 +- graph/src/components/ethereum/mod.rs | 5 +- graph/src/components/ethereum/types.rs | 211 ++- graph/src/components/store/mod.rs | 3 +- graph/src/components/store/traits.rs | 10 +- .../subgraph/proof_of_indexing/mod.rs | 22 +- .../subgraph/proof_of_indexing/online.rs | 4 +- graph/src/components/transaction_receipt.rs | 41 +- graph/src/data/graphql/values.rs | 13 +- graph/src/data/store/ethereum.rs | 22 +- graph/src/data/store/scalar/bigint.rs | 28 +- graph/src/data/store/scalar/bytes.rs | 14 +- graph/src/data/subgraph/mod.rs | 2 +- graph/src/data_source/common.rs | 526 ++++-- graph/src/lib.rs | 6 +- graph/src/runtime/mod.rs | 2 +- graph/src/util/mod.rs | 3 + graph/src/util/test_utils.rs | 57 + graphql/src/store/resolver.rs | 7 +- node/src/manager/commands/chain.rs | 16 +- node/src/manager/commands/check_blocks.rs | 33 +- runtime/test/src/common.rs | 8 +- runtime/test/src/test.rs | 9 +- runtime/test/src/test/abi.rs | 42 +- runtime/wasm/Cargo.toml | 1 - runtime/wasm/src/asc_abi/class.rs | 31 +- runtime/wasm/src/host_exports.rs | 41 +- runtime/wasm/src/module/context.rs | 2 +- runtime/wasm/src/module/instance.rs | 2 +- runtime/wasm/src/to_from/external.rs | 167 +- server/index-node/src/resolver.rs | 4 +- store/postgres/src/chain_store.rs | 59 +- store/postgres/src/deployment.rs | 16 +- store/postgres/src/deployment_store.rs | 4 +- store/postgres/src/detail.rs | 5 +- store/postgres/src/store.rs | 4 +- store/postgres/src/subgraph_store.rs | 12 +- store/postgres/src/transaction_receipt.rs | 20 +- store/test-store/Cargo.toml | 1 + store/test-store/src/block_store.rs | 41 +- store/test-store/src/store.rs | 11 +- .../tests/chain/ethereum/manifest.rs | 12 +- store/test-store/tests/graph/entity_cache.rs | 4 +- store/test-store/tests/postgres/chain_head.rs | 22 +- store/test-store/tests/postgres/relational.rs | 11 +- .../tests/postgres/relational_bytes.rs | 13 +- store/test-store/tests/postgres/store.rs | 42 +- store/test-store/tests/postgres/writable.rs | 4 +- tests/Cargo.toml | 4 + tests/src/contract.rs | 21 +- tests/src/fixture/ethereum.rs | 116 +- tests/src/fixture/mod.rs | 5 +- tests/tests/integration_tests.rs | 2 +- tests/tests/runner_tests.rs | 9 +- 84 files changed, 7082 insertions(+), 2758 deletions(-) create mode 100644 chain/ethereum/src/call_helper.rs create mode 100644 graph/src/abi/event_ext.rs create mode 100644 graph/src/abi/function_ext.rs create mode 100644 graph/src/abi/mod.rs create mode 100644 graph/src/abi/param.rs create mode 100644 graph/src/abi/value_ext.rs create mode 100644 graph/src/util/test_utils.rs diff --git a/Cargo.lock b/Cargo.lock index f1f46a4d264..96f76218119 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -72,12 +72,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf23ee5a0d40c75ade22bf33f117058461fc30a95e84d48b01c845c28f4ea7c5" dependencies = [ "alloy-consensus", + "alloy-contract", "alloy-core", "alloy-eips", + "alloy-genesis", + "alloy-json-rpc", + "alloy-network", + "alloy-provider", + "alloy-pubsub", + "alloy-rpc-client", + "alloy-rpc-types", "alloy-serde", + "alloy-signer", + "alloy-signer-local", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", "alloy-trie", ] +[[package]] +name = "alloy-chains" +version = "0.2.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef3a72a2247c34a8545ee99e562b1b9b69168e5000567257ae51e91b4e6b1193" +dependencies = [ + "alloy-primitives", + "num_enum", + "strum", +] + [[package]] name = "alloy-consensus" version = "1.3.0" @@ -90,7 +115,9 @@ dependencies = [ "alloy-serde", "alloy-trie", "alloy-tx-macros", + "arbitrary", "auto_impl", + "borsh", "c-kzg", "derive_more 2.0.1", "either", @@ -100,6 +127,45 @@ dependencies = [ "secp256k1 0.30.0", "serde", "serde_json", + "serde_with", + "thiserror 2.0.17", +] + +[[package]] +name = "alloy-consensus-any" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07001b1693af794c7526aab400b42e38075f986ef8fef78841e5ebc745473e56" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "arbitrary", + "serde", +] + +[[package]] +name = "alloy-contract" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ef1b07c3ff5bf4fab5b8e6c46190cd40b2f2fd2cd72b5b02527a38125d0bff4" +dependencies = [ + "alloy-consensus", + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-network", + "alloy-network-primitives", + "alloy-primitives", + "alloy-provider", + "alloy-pubsub", + "alloy-rpc-types-eth", + "alloy-sol-types", + "alloy-transport", + "futures 0.3.31", + "futures-util", + "serde_json", "thiserror 2.0.17", ] @@ -108,10 +174,30 @@ name = "alloy-core" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a651e1d9e50e6d0a78bd23cd08facb70459a94501c4036c7799a093e569a310" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-primitives", + "alloy-rlp", + "alloy-sol-types", +] + +[[package]] +name = "alloy-dyn-abi" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d48a9101f4a67c22fae57489f1ddf3057b8ab4a368d8eac3be088b6e9d9c9d9" dependencies = [ "alloy-json-abi", "alloy-primitives", + "alloy-sol-type-parser", "alloy-sol-types", + "arbitrary", + "itoa", + "proptest", + "serde", + "serde_json", + "winnow 0.7.13", ] [[package]] @@ -122,7 +208,9 @@ checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" dependencies = [ "alloy-primitives", "alloy-rlp", + "arbitrary", "crc", + "rand 0.8.5", "serde", "thiserror 2.0.17", ] @@ -135,6 +223,9 @@ checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" dependencies = [ "alloy-primitives", "alloy-rlp", + "arbitrary", + "borsh", + "rand 0.8.5", "serde", ] @@ -146,6 +237,10 @@ checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" dependencies = [ "alloy-primitives", "alloy-rlp", + "arbitrary", + "borsh", + "k256", + "rand 0.8.5", "serde", "thiserror 2.0.17", ] @@ -162,7 +257,9 @@ dependencies = [ "alloy-primitives", "alloy-rlp", "alloy-serde", + "arbitrary", "auto_impl", + "borsh", "c-kzg", "derive_more 2.0.1", "either", @@ -172,6 +269,21 @@ dependencies = [ "thiserror 2.0.17", ] +[[package]] +name = "alloy-genesis" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ba7afffa225272cf50c62ff04ac574adc7bfa73af2370db556340f26fcff5c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-serde", + "alloy-trie", + "borsh", + "serde", + "serde_with", +] + [[package]] name = "alloy-json-abi" version = "1.5.1" @@ -184,6 +296,60 @@ dependencies = [ "serde_json", ] +[[package]] +name = "alloy-json-rpc" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b626409c98ba43aaaa558361bca21440c88fd30df7542c7484b9c7a1489cdb" +dependencies = [ + "alloy-primitives", + "alloy-sol-types", + "http 1.4.0", + "serde", + "serde_json", + "thiserror 2.0.17", + "tracing", +] + +[[package]] +name = "alloy-network" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "364a5eaa598437d7a57bcbcb4b7fcb0518e192cf809a19b09b2b5cf73b9ba1cd" +dependencies = [ + "alloy-consensus", + "alloy-consensus-any", + "alloy-eips", + "alloy-json-rpc", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rpc-types-any", + "alloy-rpc-types-eth", + "alloy-serde", + "alloy-signer", + "alloy-sol-types", + "async-trait", + "auto_impl", + "derive_more 2.0.1", + "futures-utils-wasm", + "serde", + "serde_json", + "thiserror 2.0.17", +] + +[[package]] +name = "alloy-network-primitives" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21af5255bd276e528ee625d97033884916e879a1c6edcd5b70a043bd440c0710" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-serde", + "serde", +] + [[package]] name = "alloy-primitives" version = "1.5.1" @@ -191,20 +357,97 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7db950a29746be9e2f2c6288c8bd7a6202a81f999ce109a2933d2379970ec0fa" dependencies = [ "alloy-rlp", + "arbitrary", "bytes", "cfg-if 1.0.0", "const-hex", "derive_more 2.0.1", + "foldhash 0.2.0", "hashbrown 0.16.1", "indexmap 2.11.4", "itoa", + "k256", + "keccak-asm", "paste", + "proptest", + "proptest-derive 0.6.0", "rand 0.9.2", + "rapidhash", "ruint", + "rustc-hash 2.1.1", "serde", + "sha3", "tiny-keccak 2.0.2", ] +[[package]] +name = "alloy-provider" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc919fe241f9dd28c4c7f7dcff9e66e550c280bafe3545e1019622e1239db38" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-network", + "alloy-network-primitives", + "alloy-primitives", + "alloy-pubsub", + "alloy-rpc-client", + "alloy-rpc-types-anvil", + "alloy-rpc-types-debug", + "alloy-rpc-types-eth", + "alloy-rpc-types-trace", + "alloy-rpc-types-txpool", + "alloy-signer", + "alloy-sol-types", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", + "async-stream", + "async-trait", + "auto_impl", + "dashmap", + "either", + "futures 0.3.31", + "futures-utils-wasm", + "lru", + "parking_lot", + "pin-project", + "reqwest", + "serde", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-pubsub" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94813abbd7baa30c700ea02e7f92319dbcb03bff77aeea92a3a9af7ba19c5c70" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives", + "alloy-transport", + "auto_impl", + "bimap", + "futures 0.3.31", + "parking_lot", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tracing", + "wasmtimer", +] + [[package]] name = "alloy-rlp" version = "0.3.12" @@ -227,6 +470,150 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "alloy-rpc-client" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff01723afc25ec4c5b04de399155bef7b6a96dfde2475492b1b7b4e7a4f46445" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives", + "alloy-pubsub", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", + "futures 0.3.31", + "pin-project", + "reqwest", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-rpc-types" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3000edc72a300048cf461df94bfa29fc5d7760ddd88ca7d56ea6fc8b28729" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-anvil", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-rpc-types-trace", + "alloy-rpc-types-txpool", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-rpc-types-anvil" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1207e852f30297d6918f91df3e76f758fa7b519ea1e49fbd7d961ce796663f9" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-rpc-types-any" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ebc96cf29095c10a183fb7106a097fe12ca8dd46733895582da255407f54b29" +dependencies = [ + "alloy-consensus-any", + "alloy-rpc-types-eth", + "alloy-serde", +] + +[[package]] +name = "alloy-rpc-types-debug" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab1ebed118b701c497e6541d2d11dfa6f3c6ae31a3c52999daa802fcdcc16b7" +dependencies = [ + "alloy-primitives", + "derive_more 2.0.1", + "serde", + "serde_with", +] + +[[package]] +name = "alloy-rpc-types-engine" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f35af673cc14e89813ab33671d79b6e73fe38788c5f3a8ec3a75476b58225f53" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "arbitrary", + "derive_more 2.0.1", + "rand 0.8.5", + "serde", + "strum", +] + +[[package]] +name = "alloy-rpc-types-eth" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cc3f354a5079480acca0a6533d1d3838177a03ea494ef0ae8d1679efea88274" +dependencies = [ + "alloy-consensus", + "alloy-consensus-any", + "alloy-eips", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "alloy-sol-types", + "arbitrary", + "itertools 0.14.0", + "serde", + "serde_json", + "serde_with", + "thiserror 2.0.17", +] + +[[package]] +name = "alloy-rpc-types-trace" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d782d80221dfaa5a2f8a7bf277370bdec10e4e8119f5a60d2e2b1adb2e806ca" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", + "serde_json", + "thiserror 2.0.17", +] + +[[package]] +name = "alloy-rpc-types-txpool" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3076c226bb4365f9c3ac0cd4082ba86208aaa1485cbf664383a90aba7c36b26" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", +] + [[package]] name = "alloy-serde" version = "1.3.0" @@ -234,10 +621,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a438ce4cd49ec4bc213868c1fe94f2fe103d4c3f22f6a42073db974f9c0962da" dependencies = [ "alloy-primitives", + "arbitrary", "serde", "serde_json", ] +[[package]] +name = "alloy-signer" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb10ccd49d0248df51063fce6b716f68a315dd912d55b32178c883fd48b4021d" +dependencies = [ + "alloy-primitives", + "async-trait", + "auto_impl", + "either", + "elliptic-curve", + "k256", + "thiserror 2.0.17", +] + +[[package]] +name = "alloy-signer-local" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69c260e78b9c104c444f8a202f283d5e8c6637e6fa52a83f649ad6aaa0b91fd0" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives", + "alloy-signer", + "async-trait", + "k256", + "rand 0.8.5", + "thiserror 2.0.17", +] + [[package]] name = "alloy-sol-macro" version = "1.5.1" @@ -290,135 +709,407 @@ dependencies = [ ] [[package]] -name = "alloy-sol-type-parser" -version = "1.5.2" +name = "alloy-sol-type-parser" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af67a0b0dcebe14244fc92002cd8d96ecbf65db4639d479f5fcd5805755a4c27" +dependencies = [ + "serde", + "winnow 0.7.13", +] + +[[package]] +name = "alloy-sol-types" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1038284171df8bfd48befc0c7b78f667a7e2be162f45f07bd1c378078ebe58" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-macro", + "serde", +] + +[[package]] +name = "alloy-transport" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f50a9516736d22dd834cc2240e5bf264f338667cc1d9e514b55ec5a78b987ca" +dependencies = [ + "alloy-json-rpc", + "auto_impl", + "base64 0.22.1", + "derive_more 2.0.1", + "futures 0.3.31", + "futures-utils-wasm", + "parking_lot", + "serde", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tower 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-transport-http" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a18b541a6197cf9a084481498a766fdf32fefda0c35ea6096df7d511025e9f1" +dependencies = [ + "alloy-json-rpc", + "alloy-transport", + "reqwest", + "serde_json", + "tower 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tracing", + "url", +] + +[[package]] +name = "alloy-transport-ipc" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8075911680ebc537578cacf9453464fd394822a0f68614884a9c63f9fbaf5e89" +dependencies = [ + "alloy-json-rpc", + "alloy-pubsub", + "alloy-transport", + "bytes", + "futures 0.3.31", + "interprocess", + "pin-project", + "serde", + "serde_json", + "tokio", + "tokio-util 0.7.18", + "tracing", +] + +[[package]] +name = "alloy-transport-ws" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "921d37a57e2975e5215f7dd0f28873ed5407c7af630d4831a4b5c737de4b0b8b" +dependencies = [ + "alloy-pubsub", + "alloy-transport", + "futures 0.3.31", + "http 1.4.0", + "serde_json", + "tokio", + "tokio-tungstenite 0.26.2", + "tracing", + "ws_stream_wasm", +] + +[[package]] +name = "alloy-trie" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "428aa0f0e0658ff091f8f667c406e034b431cb10abd39de4f507520968acc499" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "arrayvec 0.7.4", + "derive_arbitrary", + "derive_more 2.0.1", + "nybbles", + "proptest", + "proptest-derive 0.5.1", + "serde", + "smallvec", + "tracing", +] + +[[package]] +name = "alloy-tx-macros" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99dac443033e83b14f68fac56e8c27e76421f1253729574197ceccd06598f3ef" +dependencies = [ + "darling 0.21.3", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" + +[[package]] +name = "anstyle-parse" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + +[[package]] +name = "arcstr" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03918c3dbd7701a85c6b9887732e2921175f26c350b4563841d0958c21d57e6d" + +[[package]] +name = "ark-ff" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" +dependencies = [ + "ark-ff-asm 0.3.0", + "ark-ff-macros 0.3.0", + "ark-serialize 0.3.0", + "ark-std 0.3.0", + "derivative", + "num-bigint 0.4.6", + "num-traits", + "paste", + "rustc_version 0.3.3", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af67a0b0dcebe14244fc92002cd8d96ecbf65db4639d479f5fcd5805755a4c27" +checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" dependencies = [ - "serde", - "winnow 0.7.13", + "ark-ff-asm 0.4.2", + "ark-ff-macros 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "derivative", + "digest 0.10.7", + "itertools 0.10.5", + "num-bigint 0.4.6", + "num-traits", + "paste", + "rustc_version 0.4.0", + "zeroize", ] [[package]] -name = "alloy-sol-types" -version = "1.5.1" +name = "ark-ff" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1038284171df8bfd48befc0c7b78f667a7e2be162f45f07bd1c378078ebe58" +checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" dependencies = [ - "alloy-json-abi", - "alloy-primitives", - "alloy-sol-macro", + "ark-ff-asm 0.5.0", + "ark-ff-macros 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", + "arrayvec 0.7.4", + "digest 0.10.7", + "educe", + "itertools 0.13.0", + "num-bigint 0.4.6", + "num-traits", + "paste", + "zeroize", ] [[package]] -name = "alloy-trie" -version = "0.9.3" +name = "ark-ff-asm" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428aa0f0e0658ff091f8f667c406e034b431cb10abd39de4f507520968acc499" +checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arrayvec 0.7.4", - "derive_more 2.0.1", - "nybbles", - "serde", - "smallvec", - "tracing", + "quote", + "syn 1.0.109", ] [[package]] -name = "alloy-tx-macros" -version = "1.3.0" +name = "ark-ff-asm" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99dac443033e83b14f68fac56e8c27e76421f1253729574197ceccd06598f3ef" +checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" dependencies = [ - "darling 0.21.3", - "proc-macro2", "quote", - "syn 2.0.114", + "syn 1.0.109", ] [[package]] -name = "android_system_properties" -version = "0.1.5" +name = "ark-ff-asm" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" dependencies = [ - "libc", + "quote", + "syn 2.0.114", ] [[package]] -name = "anstream" -version = "0.6.14" +name = "ark-ff-macros" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", + "num-bigint 0.4.6", + "num-traits", + "quote", + "syn 1.0.109", ] [[package]] -name = "anstyle" -version = "1.0.7" +name = "ark-ff-macros" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" +dependencies = [ + "num-bigint 0.4.6", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] [[package]] -name = "anstyle-parse" -version = "0.2.4" +name = "ark-ff-macros" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" dependencies = [ - "utf8parse", + "num-bigint 0.4.6", + "num-traits", + "proc-macro2", + "quote", + "syn 2.0.114", ] [[package]] -name = "anstyle-query" -version = "1.1.0" +name = "ark-serialize" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" dependencies = [ - "windows-sys 0.52.0", + "ark-std 0.3.0", + "digest 0.9.0", ] [[package]] -name = "anstyle-wincon" -version = "3.0.3" +name = "ark-serialize" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" dependencies = [ - "anstyle", - "windows-sys 0.52.0", + "ark-std 0.4.0", + "digest 0.10.7", + "num-bigint 0.4.6", ] [[package]] -name = "anyhow" -version = "1.0.100" +name = "ark-serialize" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" +checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" +dependencies = [ + "ark-std 0.5.0", + "arrayvec 0.7.4", + "digest 0.10.7", + "num-bigint 0.4.6", +] [[package]] -name = "arbitrary" -version = "1.4.1" +name = "ark-std" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" +dependencies = [ + "num-traits", + "rand 0.8.5", +] [[package]] -name = "arc-swap" -version = "1.7.1" +name = "ark-std" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand 0.8.5", +] [[package]] -name = "arcstr" -version = "1.2.0" +name = "ark-std" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03918c3dbd7701a85c6b9887732e2921175f26c350b4563841d0958c21d57e6d" +checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" +dependencies = [ + "num-traits", + "rand 0.8.5", +] [[package]] name = "arrayref" @@ -827,6 +1518,17 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "async_io_stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" +dependencies = [ + "futures 0.3.31", + "pharos", + "rustc_version 0.4.0", +] + [[package]] name = "asynk-strim" version = "0.1.5" @@ -942,7 +1644,7 @@ dependencies = [ "sha1", "sync_wrapper 1.0.1", "tokio", - "tokio-tungstenite", + "tokio-tungstenite 0.28.0", "tower 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "tower-layer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "tower-service 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1080,6 +1782,27 @@ dependencies = [ "num-traits", ] +[[package]] +name = "bimap" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + [[package]] name = "bitcoin-io" version = "0.1.4" @@ -1178,6 +1901,29 @@ dependencies = [ "zeroize", ] +[[package]] +name = "borsh" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1da5ab77c1437701eeff7c88d968729e7766172279eab0676857b3d63af7a6f" +dependencies = [ + "borsh-derive", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0686c856aa6aac0c4498f936d7d6a02df690f614c03e4d906d1018062b5c5e2c" +dependencies = [ + "once_cell", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.114", +] + [[package]] name = "bs58" version = "0.4.0" @@ -1239,6 +1985,7 @@ version = "2.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e00bf4b112b07b505472dbefd19e37e53307e2bfed5a79e0cc161d58ccd0e687" dependencies = [ + "arbitrary", "blst", "cc", "glob", @@ -1272,6 +2019,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" version = "0.4.42" @@ -1553,7 +2306,7 @@ dependencies = [ "log", "pulley-interpreter", "regalloc2", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "serde", "smallvec", "target-lexicon", @@ -1877,6 +2630,20 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "data-encoding" version = "2.6.0" @@ -1970,6 +2737,17 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive_arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + [[package]] name = "derive_builder" version = "0.20.2" @@ -2010,7 +2788,7 @@ dependencies = [ "convert_case 0.4.0", "proc-macro2", "quote", - "rustc_version", + "rustc_version 0.4.0", "syn 2.0.114", ] @@ -2222,6 +3000,12 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "doctest-file" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562" + [[package]] name = "downcast-rs" version = "2.0.2" @@ -2260,6 +3044,19 @@ dependencies = [ "rfc6979", "serdect", "signature", + "spki", +] + +[[package]] +name = "educe" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" +dependencies = [ + "enum-ordinalize", + "proc-macro2", + "quote", + "syn 2.0.114", ] [[package]] @@ -2318,6 +3115,26 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "enum-ordinalize" +version = "4.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" +dependencies = [ + "enum-ordinalize-derive", +] + +[[package]] +name = "enum-ordinalize-derive" +version = "4.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + [[package]] name = "env_filter" version = "0.1.0" @@ -2410,7 +3227,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11da94e443c60508eb62cf256243a64da87304c2802ac2528847f79d750007ef" dependencies = [ "crunchy", - "fixed-hash", + "fixed-hash 0.7.0", "impl-rlp", "impl-serde", "tiny-keccak 2.0.2", @@ -2423,10 +3240,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2827b94c556145446fcce834ca86b7abf0c39a805883fe20e72c5bfdb5a0dc6" dependencies = [ "ethbloom", - "fixed-hash", + "fixed-hash 0.7.0", "impl-rlp", "impl-serde", - "primitive-types", + "primitive-types 0.11.1", "uint 0.9.5", ] @@ -2457,6 +3274,28 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +[[package]] +name = "fastrlp" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" +dependencies = [ + "arrayvec 0.7.4", + "auto_impl", + "bytes", +] + +[[package]] +name = "fastrlp" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" +dependencies = [ + "arrayvec 0.7.4", + "auto_impl", + "bytes", +] + [[package]] name = "ff" version = "0.13.1" @@ -2497,6 +3336,18 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "fixed-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" +dependencies = [ + "byteorder", + "rand 0.8.5", + "rustc-hex", + "static_assertions", +] + [[package]] name = "fixedbitset" version = "0.5.7" @@ -2510,7 +3361,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35f6839d7b3b98adde531effaf34f0c2badc6f4735d26fe74709d8e513a96ef3" dependencies = [ "bitflags 2.9.0", - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -2682,6 +3533,12 @@ dependencies = [ "slab", ] +[[package]] +name = "futures-utils-wasm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" + [[package]] name = "fxhash" version = "0.2.1" @@ -2859,7 +3716,7 @@ dependencies = [ "hyper 1.8.1", "hyper-util", "indoc", - "itertools", + "itertools 0.14.0", "lazy-regex", "lazy_static", "lru_time_cache", @@ -2879,7 +3736,7 @@ dependencies = [ "redis", "regex", "reqwest", - "semver", + "semver 1.0.27", "serde", "serde_derive", "serde_json", @@ -2907,7 +3764,6 @@ dependencies = [ "tonic-build", "url", "wasmparser 0.118.2", - "web3", "wiremock", ] @@ -2933,17 +3789,18 @@ dependencies = [ "graph-runtime-derive", "graph-runtime-wasm", "hex", - "itertools", + "itertools 0.14.0", "jsonrpc-core", "prost", "prost-types", - "semver", + "semver 1.0.27", "serde", "thiserror 2.0.17", "tiny-keccak 1.5.0", "tokio", "tokio-stream", "tonic-build", + "tower 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2979,7 +3836,7 @@ dependencies = [ "graph-chain-near", "graph-runtime-wasm", "indoc", - "itertools", + "itertools 0.14.0", "parking_lot", "prometheus", "serde_yaml", @@ -3032,7 +3889,7 @@ dependencies = [ "graph-store-postgres", "graphman", "graphman-server", - "itertools", + "itertools 0.14.0", "json-structural-diff", "lazy_static", "prometheus", @@ -3061,7 +3918,7 @@ dependencies = [ "graph-chain-ethereum", "graph-runtime-wasm", "rand 0.9.2", - "semver", + "semver 1.0.27", "test-store", "wasmtime", ] @@ -3073,13 +3930,12 @@ dependencies = [ "anyhow", "async-trait", "bs58 0.4.0", - "ethabi", "graph", "graph-runtime-derive", "hex", "never", "parity-wasm", - "semver", + "semver 1.0.27", "serde_yaml", "wasm-instrument", "wasmtime", @@ -3149,7 +4005,7 @@ dependencies = [ "graphman-store", "graphql-parser", "hex", - "itertools", + "itertools 0.14.0", "lazy_static", "lru_time_cache", "openssl", @@ -3190,6 +4046,7 @@ dependencies = [ "tokio", "tokio-stream", "tokio-util 0.7.18", + "web3", ] [[package]] @@ -3212,7 +4069,7 @@ dependencies = [ "graph", "graph-store-postgres", "graphman-store", - "itertools", + "itertools 0.14.0", "thiserror 2.0.17", "tokio", ] @@ -3360,12 +4217,20 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ + "allocator-api2", + "equivalent", "foldhash 0.1.5", "serde", ] @@ -3617,6 +4482,7 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "webpki-roots 0.26.11", ] [[package]] @@ -3927,6 +4793,7 @@ version = "2.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ + "arbitrary", "equivalent", "hashbrown 0.16.1", "serde", @@ -3962,6 +4829,21 @@ dependencies = [ "libc", ] +[[package]] +name = "interprocess" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d941b405bd2322993887859a8ee6ac9134945a24ec5ec763a8a962fc64dfec2d" +dependencies = [ + "doctest-file", + "futures-core", + "libc", + "recvmsg", + "tokio", + "widestring", + "windows-sys 0.52.0", +] + [[package]] name = "io-uring" version = "0.7.10" @@ -4006,6 +4888,24 @@ version = "1.70.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.14.0" @@ -4191,6 +5091,7 @@ dependencies = [ "cfg-if 1.0.0", "ecdsa", "elliptic-curve", + "once_cell", "serdect", "sha2", ] @@ -4204,6 +5105,16 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "keccak-asm" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6" +dependencies = [ + "digest 0.10.7", + "sha3-asm", +] + [[package]] name = "kqueue" version = "1.1.1" @@ -4377,6 +5288,15 @@ version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +[[package]] +name = "lru" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "227748d55f2f0ab4735d87fd623798cb6b664512fe979705f829c9f81c934465" +dependencies = [ + "hashbrown 0.15.2", +] + [[package]] name = "lru_time_cache" version = "0.11.11" @@ -4718,13 +5638,37 @@ dependencies = [ "libc", ] +[[package]] +name = "num_enum" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" +dependencies = [ + "num_enum_derive", + "rustversion", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + [[package]] name = "nybbles" version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5676b5c379cf5b03da1df2b3061c4a4e2aa691086a56ac923e08c143f53f59" dependencies = [ + "alloy-rlp", + "arbitrary", "cfg-if 1.0.0", + "proptest", "ruint", "serde", "smallvec", @@ -4758,7 +5702,7 @@ dependencies = [ "http-body-util", "humantime", "hyper 1.8.1", - "itertools", + "itertools 0.14.0", "parking_lot", "percent-encoding", "quick-xml", @@ -4995,6 +5939,16 @@ dependencies = [ "url", ] +[[package]] +name = "pharos" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" +dependencies = [ + "futures 0.3.31", + "rustc_version 0.4.0", +] + [[package]] name = "phf" version = "0.13.1" @@ -5204,13 +6158,24 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e28720988bff275df1f51b171e1b2a18c30d194c4d2b61defdacecd625a5d94a" dependencies = [ - "fixed-hash", + "fixed-hash 0.7.0", "impl-codec", "impl-rlp", "impl-serde", "uint 0.9.5", ] +[[package]] +name = "primitive-types" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" +dependencies = [ + "fixed-hash 0.8.0", + "impl-codec", + "uint 0.9.5", +] + [[package]] name = "priority-queue" version = "2.7.0" @@ -5296,14 +6261,41 @@ version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" dependencies = [ + "bit-set", + "bit-vec", "bitflags 2.9.0", "num-traits", "rand 0.9.2", "rand_chacha 0.9.0", "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", "unarray", ] +[[package]] +name = "proptest-derive" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "proptest-derive" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + [[package]] name = "prost" version = "0.13.5" @@ -5321,7 +6313,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ "heck 0.4.1", - "itertools", + "itertools 0.14.0", "log", "multimap", "once_cell", @@ -5341,7 +6333,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools", + "itertools 0.14.0", "proc-macro2", "quote", "syn 2.0.114", @@ -5424,6 +6416,12 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quick-xml" version = "0.38.2" @@ -5460,7 +6458,7 @@ dependencies = [ "bytes", "rand 0.8.5", "ring", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "rustls", "slab", "thiserror 1.0.61", @@ -5578,6 +6576,15 @@ dependencies = [ "rand_core 0.9.3", ] +[[package]] +name = "rapidhash" +version = "4.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d8b5b858a440a0bc02625b62dd95131b9201aa9f69f411195dd4a7cfb1de3d7" +dependencies = [ + "rustversion", +] + [[package]] name = "rayon" version = "1.10.0" @@ -5618,6 +6625,12 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "recvmsg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" + [[package]] name = "redis" version = "1.0.2" @@ -5684,7 +6697,7 @@ dependencies = [ "bumpalo", "hashbrown 0.15.2", "log", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "smallvec", ] @@ -5764,6 +6777,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", + "webpki-roots 1.0.5", ] [[package]] @@ -5807,9 +6821,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" dependencies = [ "alloy-rlp", + "arbitrary", + "ark-ff 0.3.0", + "ark-ff 0.4.2", + "ark-ff 0.5.0", + "bytes", + "fastrlp 0.3.1", + "fastrlp 0.4.0", + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "parity-scale-codec", + "primitive-types 0.12.2", "proptest", "rand 0.8.5", "rand 0.9.2", + "rlp", "ruint-macro", "serde_core", "valuable", @@ -5836,9 +6863,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc-hex" @@ -5846,13 +6873,22 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver 0.11.0", +] + [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver", + "semver 1.0.27", ] [[package]] @@ -5954,6 +6990,18 @@ version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +[[package]] +name = "rusty-fork" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + [[package]] name = "ryu" version = "1.0.18" @@ -6092,6 +7140,15 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser", +] + [[package]] name = "semver" version = "1.0.27" @@ -6102,6 +7159,21 @@ dependencies = [ "serde_core", ] +[[package]] +name = "semver-parser" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" +dependencies = [ + "pest", +] + +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "serde" version = "1.0.226" @@ -6306,6 +7378,16 @@ dependencies = [ "keccak", ] +[[package]] +name = "sha3-asm" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" +dependencies = [ + "cc", + "cfg-if 1.0.0", +] + [[package]] name = "shellexpand" version = "3.1.1" @@ -6441,6 +7523,7 @@ version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" dependencies = [ + "arbitrary", "serde", ] @@ -6800,6 +7883,7 @@ dependencies = [ "lazy_static", "pretty_assertions", "prost-types", + "serde_json", "tokio", ] @@ -7062,6 +8146,22 @@ dependencies = [ "tokio-stream", ] +[[package]] +name = "tokio-tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" +dependencies = [ + "futures-util", + "log", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tungstenite 0.26.2", + "webpki-roots 0.26.11", +] + [[package]] name = "tokio-tungstenite" version = "0.28.0" @@ -7071,7 +8171,7 @@ dependencies = [ "futures-util", "log", "tokio", - "tungstenite", + "tungstenite 0.28.0", ] [[package]] @@ -7263,9 +8363,13 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", + "hdrhistogram", + "indexmap 2.11.4", "pin-project-lite", + "slab", "sync_wrapper 1.0.1", "tokio", + "tokio-util 0.7.18", "tower-layer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "tower-service 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "tracing", @@ -7401,6 +8505,25 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +dependencies = [ + "bytes", + "data-encoding", + "http 1.4.0", + "httparse", + "log", + "rand 0.9.2", + "rustls", + "rustls-pki-types", + "sha1", + "thiserror 2.0.17", + "utf-8", +] + [[package]] name = "tungstenite" version = "0.28.0" @@ -7598,6 +8721,15 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + [[package]] name = "walkdir" version = "2.5.0" @@ -7757,7 +8889,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77f1154f1ab868e2a01d9834a805faca7bf8b50d041b4ca714d005d0dab1c50c" dependencies = [ "indexmap 2.11.4", - "semver", + "semver 1.0.27", ] [[package]] @@ -7769,7 +8901,7 @@ dependencies = [ "bitflags 2.9.0", "hashbrown 0.15.2", "indexmap 2.11.4", - "semver", + "semver 1.0.27", "serde", ] @@ -7781,7 +8913,7 @@ checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" dependencies = [ "bitflags 2.9.0", "indexmap 2.11.4", - "semver", + "semver 1.0.27", ] [[package]] @@ -7824,7 +8956,7 @@ dependencies = [ "pulley-interpreter", "rayon", "rustix 1.0.7", - "semver", + "semver 1.0.27", "serde", "serde_derive", "serde_json", @@ -7867,7 +8999,7 @@ dependencies = [ "object", "postcard", "rustc-demangle", - "semver", + "semver 1.0.27", "serde", "serde_derive", "smallvec", @@ -7942,7 +9074,7 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "gimli 0.31.1", - "itertools", + "itertools 0.14.0", "log", "object", "pulley-interpreter", @@ -8063,6 +9195,20 @@ dependencies = [ "wit-parser", ] +[[package]] +name = "wasmtimer" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" +dependencies = [ + "futures 0.3.31", + "js-sys", + "parking_lot", + "pin-utils", + "slab", + "wasm-bindgen", +] + [[package]] name = "wast" version = "244.0.0" @@ -8152,6 +9298,24 @@ dependencies = [ "url", ] +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.5", +] + +[[package]] +name = "webpki-roots" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "which" version = "4.4.2" @@ -8175,6 +9339,12 @@ dependencies = [ "web-sys", ] +[[package]] +name = "widestring" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" + [[package]] name = "winapi" version = "0.3.9" @@ -8521,6 +9691,9 @@ name = "winnow" version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" +dependencies = [ + "memchr", +] [[package]] name = "wiremock" @@ -8564,7 +9737,7 @@ dependencies = [ "id-arena", "indexmap 2.11.4", "log", - "semver", + "semver 1.0.27", "serde", "serde_derive", "serde_json", @@ -8584,6 +9757,25 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +[[package]] +name = "ws_stream_wasm" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c173014acad22e83f16403ee360115b38846fe754e735c5d9d3803fe70c6abc" +dependencies = [ + "async_io_stream", + "futures 0.3.31", + "js-sys", + "log", + "pharos", + "rustc_version 0.4.0", + "send_wrapper", + "thiserror 2.0.17", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wyz" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index d09e9affb53..4322323741f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,6 +35,8 @@ repository = "https://github.com/graphprotocol/graph-node" license = "MIT OR Apache-2.0" [workspace.dependencies] +alloy = { version = "1.0.33", features = ["dyn-abi", "json-abi", "full", "arbitrary", "json-rpc", "serde"] } +alloy-rpc-types = "1.0.33" anyhow = "1.0" async-graphql = { version = "7.1.0", features = ["chrono"] } async-graphql-axum = "7.0.17" @@ -94,6 +96,7 @@ tokio-retry = "0.3.0" tonic = { version = "0.12.3", features = ["tls-roots", "gzip"] } tonic-build = { version = "0.12.3", features = ["prost"] } tower-http = { version = "0.6.8", features = ["cors"] } +tower = { version = "0.5.1", features = ["full"] } wasmparser = "0.118.1" wasmtime = { version = "35.0.0", features = ["async"] } rand = { version = "0.9.2", features = ["os_rng"] } @@ -101,7 +104,6 @@ prometheus = "0.14.0" # Dependencies related to Amp subgraphs ahash = "0.8.11" -alloy = { version = "1.0.12", default-features = false, features = ["json-abi", "serde"] } arrow = { version = "=55.0.0" } arrow-flight = { version = "=55.0.0", features = ["flight-sql-experimental"] } futures = "0.3.31" diff --git a/chain/ethereum/Cargo.toml b/chain/ethereum/Cargo.toml index 17eafbeaaa0..2639cfdff12 100644 --- a/chain/ethereum/Cargo.toml +++ b/chain/ethereum/Cargo.toml @@ -18,6 +18,7 @@ semver = "1.0.27" thiserror = { workspace = true } tokio = { workspace = true } tokio-stream = { workspace = true } +tower = { workspace = true } itertools = "0.14.0" diff --git a/chain/ethereum/build.rs b/chain/ethereum/build.rs index 227a50914a6..cb2257bc845 100644 --- a/chain/ethereum/build.rs +++ b/chain/ethereum/build.rs @@ -3,6 +3,7 @@ fn main() { tonic_build::configure() .out_dir("src/protobuf") + .protoc_arg("--experimental_allow_proto3_optional") .compile_protos(&["proto/ethereum.proto"], &["proto"]) .expect("Failed to compile Firehose Ethereum proto(s)"); } diff --git a/chain/ethereum/proto/ethereum.proto b/chain/ethereum/proto/ethereum.proto index 42adbd0ffa6..50c10f921f0 100644 --- a/chain/ethereum/proto/ethereum.proto +++ b/chain/ethereum/proto/ethereum.proto @@ -2,15 +2,62 @@ syntax = "proto3"; package sf.ethereum.type.v2; -option go_package = "github.com/streamingfast/sf-ethereum/types/pb/sf/ethereum/type/v2;pbeth"; +option go_package = "github.com/streamingfast/firehose-ethereum/types/pb/sf/ethereum/type/v2;pbeth"; import "google/protobuf/timestamp.proto"; +// Block is the representation of the tracing of a block in the Ethereum +// blockchain. A block is a collection of [TransactionTrace] that are grouped +// together and processed as an atomic unit. Each [TransactionTrace] is composed +// of a series of [Call] (a.k.a internal transactions) and there is also at +// least one call per transaction a.k.a the root call which essentially has the +// same parameters as the transaction itself (e.g. `from`, `to`, `gas`, `value`, +// etc.). +// +// The exact tracing method used to build the block must be checked against +// [DetailLevel] field. There is two levels of details available, `BASE` and +// `EXTENDED`. The `BASE` level has been extracted using archive node RPC calls +// and will contain only the block header, transaction receipts and event logs. +// Refers to the Firehose service provider to know which blocks are offered on +// each network. +// +// The `EXTENDED` level has been extracted using the Firehose tracer and all +// fields are available in this Protobuf. +// +// The Ethereum block model is used across many chains which means that it +// happen that certain fields are not available in one chain but are available +// in another. Each field should be documented when necesssary if it's available +// on a subset of chains. +// +// One major concept to get about the Block is the concept of 'ordinal'. The +// ordinal is a number that is used to globally order every element of execution +// that happened throughout the processing of the block like +// [TransactionTracer], [Call], [Log], [BalanceChange], [StateChange], etc. +// Element that have a start and end interval, [Transaction] and [Call], will +// have two ordinals: `begin_ordinal` and `end_ordinal`. Element that are +// executed as "point in time" [Log], [BalanceChange], [StateChange], etc. will +// have only one ordinal named `ordinal`. If you take all of the message in the +// Block that have an 'ordinal' field in an array and you sort each element +// against the `ordinal` field, you will get the exact order of execution of +// each element in the block. +// +// All the 'ordinal' fields in a block are globally unique for the given block, +// it is **not** a chain-wide global ordering. Furthermore, caution must be take +// with reverted elements due to execution failure. For anything attached to a +// [Call] that has a `state_reverted` field set to `true`, the `ordinal` field +// is not reliable and should not be used to order the element against other +// elements in the block as those element might have 0 as the ordinal. Only +// successful calls have a reliable `ordinal` field. message Block { - int32 ver = 1; + // Hash is the block's hash. bytes hash = 2; + // Number is the block's height at which this block was mined. uint64 number = 3; + // Size is the size in bytes of the RLP encoding of the block according to Ethereum + // rules. uint64 size = 4; + // Header contain's the block's header information like its parent hash, the merkel root hash + // and all other information the form a block. BlockHeader header = 5; // Uncles represents block produced with a valid solution but were not actually chosen @@ -20,46 +67,79 @@ message Block { // field will actually be always empty. repeated BlockHeader uncles = 6; + // TransactionTraces hold the execute trace of all the transactions that were executed + // in this block. In in there that you will find most of the Ethereum data model. + // + // They are ordered by the order of execution of the transaction in the block. repeated TransactionTrace transaction_traces = 10; + + // BalanceChanges here is the array of ETH transfer that happened at the block level + // outside of the normal transaction flow of a block. The best example of this is mining + // reward for the block mined, the transfer of ETH to the miner happens outside the normal + // transaction flow of the chain and is recorded as a `BalanceChange` here since we cannot + // attached it to any transaction. + // + // Only available in DetailLevel: EXTENDED repeated BalanceChange balance_changes = 11; - repeated CodeChange code_changes = 20; - reserved 40; // bool filtering_applied = 40 [deprecated = true]; - reserved 41; // string filtering_include_filter_expr = 41 [deprecated = true]; - reserved 42; // string filtering_exclude_filter_expr = 42 [deprecated = true]; -} + enum DetailLevel{ + DETAILLEVEL_EXTENDED = 0; + // DETAILLEVEL_TRACE = 1; // TBD + DETAILLEVEL_BASE = 2; + } -// HeaderOnlyBlock is used to optimally unpack the [Block] structure (note the -// corresponding message number for the `header` field) while consuming less -// memory, when only the `header` is desired. -// -// WARN: this is a client-side optimization pattern and should be moved in the -// consuming code. -message HeaderOnlyBlock { - BlockHeader header = 5; -} + // DetailLevel affects the data available in this block. + // + // ## DetailLevel_EXTENDED + // + // Describes the most complete block, with traces, balance changes, storage + // changes. It is extracted during the execution of the block. + // + // ## DetailLevel_BASE + // + // Describes a block that contains only the block header, transaction receipts + // and event logs: everything that can be extracted using the base JSON-RPC + // interface + // (https://ethereum.org/en/developers/docs/apis/json-rpc/#json-rpc-methods) + // Furthermore, the eth_getTransactionReceipt call has been avoided because it + // brings only minimal improvements at the cost of requiring an archive node + // or a full node with complete transaction index. + DetailLevel detail_level = 12; + + // CodeChanges here is the array of smart code change that happened that happened at the block level + // outside of the normal transaction flow of a block. Some Ethereum's fork like BSC and Polygon + // has some capabilities to upgrade internal smart contracts used usually to track the validator + // list. + // + // On hard fork, some procedure runs to upgrade the smart contract code to a new version. In those + // network, a `CodeChange` for each modified smart contract on upgrade would be present here. Note + // that this happen rarely, so the vast majority of block will have an empty list here. + // + // Only available in DetailLevel: EXTENDED + repeated CodeChange code_changes = 20; -// BlockWithRefs is a lightweight block, with traces and transactions -// purged from the `block` within, and only. It is used in transports -// to pass block data around. -message BlockWithRefs { - string id = 1; - Block block = 2; - TransactionRefs transaction_trace_refs = 3; - bool irreversible = 4; -} + // System calls are introduced in Cancun, along with blobs. They are executed outside of transactions but affect the state. + // + // Only available in DetailLevel: EXTENDED + repeated Call system_calls = 21; -message TransactionRefs { - repeated bytes hashes = 1; -} + // Withdrawals represents the list of validator balance withdrawals processed in this block. + // Introduced in the Shanghai hard fork (EIP-4895). + // + // This field has been added because Geth blocks include withdrawals after Shanghai fork, + // but our previous Firehose model didn't capture this data. Currently experimental - + // NOT ready for production use yet as we validate the tracing implementation. + // + // Only available when Shanghai fork is active on the chain. + repeated Withdrawal withdrawals = 22; -message UnclesHeaders { - repeated BlockHeader uncles = 1; -} + reserved 40; // bool filtering_applied = 40 [deprecated = true]; + reserved 41; // string filtering_include_filter_expr = 41 [deprecated = true]; + reserved 42; // string filtering_exclude_filter_expr = 42 [deprecated = true]; -message BlockRef { - bytes hash = 1; - uint64 number = 2; + // Ver represents that data model version of the block, it is used internally by Firehose on Ethereum + // as a validation that we are reading the correct version. + int32 ver = 1; } message BlockHeader { @@ -84,13 +164,10 @@ message BlockHeader { // consensus algorithm, this field will actually be constant and set to `0x00`. BigInt difficulty = 8; - // TotalDifficulty is the sum of all previous blocks difficulty including this block difficulty. + // TotalDifficulty used to be the sum of all previous blocks difficulty including this block difficulty. // - // If the Block containing this `BlockHeader` has been produced using the Proof of Stake - // consensus algorithm, this field will actually be constant and set to the terminal total difficulty - // that was required to transition to Proof of Stake algorithm, which varies per network. It is set to - // 58 750 000 000 000 000 000 000 on Ethereum Mainnet and to 10 790 000 on Ethereum Testnet Goerli. - BigInt total_difficulty = 17; + // It has been deprecated in geth v1.15.0 but was already removed from the JSON-RPC interface for a while + BigInt total_difficulty = 17 [deprecated = true]; uint64 number = 9; uint64 gas_limit = 10; @@ -134,19 +211,84 @@ message BlockHeader { // extra_data, // mix_hash, // nonce, - // base_fee_per_gas + // base_fee_per_gas (to be included only if London fork is active) + // withdrawals_root (to be included only if Shangai fork is active) + // blob_gas_used (to be included only if Cancun fork is active) + // excess_blob_gas (to be included only if Cancun fork is active) + // parent_beacon_root (to be included only if Cancun fork is active) + // requests_hash (to be included only if Prague fork is active) // ])) // bytes hash = 16; // Base fee per gas according to EIP-1559 (e.g. London Fork) rules, only set if London is present/active on the chain. BigInt base_fee_per_gas = 18; + + // Withdrawals root hash according to EIP-4895 (e.g. Shangai Fork) rules, only set if Shangai is present/active on the chain. + // + // Only available in DetailLevel: EXTENDED + bytes withdrawals_root = 19; + + // TxDependency is list of transaction indexes that are dependent on each other in the block + // header. This is metadata only that was used by the internal Polygon parallel execution engine. + // + // This field was available in a few versions on Polygon Mainnet and Polygon Mumbai chains. It was actually + // removed and is not populated anymore. It's now embedded in the `extraData` field, refer to Polygon source + // code to determine how to extract it if you need it. + // + // Only available in DetailLevel: EXTENDED + Uint64NestedArray tx_dependency = 20; + + // BlobGasUsed was added by EIP-4844 and is ignored in legacy headers. + optional uint64 blob_gas_used = 22; + + // ExcessBlobGas was added by EIP-4844 and is ignored in legacy headers. + optional uint64 excess_blob_gas = 23; + + // ParentBeaconRoot was added by EIP-4788 and is ignored in legacy headers. + bytes parent_beacon_root = 24; + + // RequestsHash was added by EIP-7685 and is ignored in legacy headers. + bytes requests_hash = 25; +} + +message Uint64NestedArray { + repeated Uint64Array val = 1; +} + +message Uint64Array { + repeated uint64 val = 1; } message BigInt { bytes bytes = 1; } +// TransactionTrace is full trace of execution of the transaction when the +// it actually executed on chain. +// +// It contains all the transaction details like `from`, `to`, `gas`, etc. +// as well as all the internal calls that were made during the transaction. +// +// The `calls` vector contains Call objects which have balance changes, events +// storage changes, etc. +// +// If ordering is important between elements, almost each message like `Log`, +// `Call`, `StorageChange`, etc. have an ordinal field that is represents "execution" +// order of the said element against all other elements in this block. +// +// Due to how the call tree works doing "naively", looping through all calls then +// through a Call's element like `logs` while not yielding the elements in the order +// they were executed on chain. A log in call could have been done before or after +// another in another call depending on the actual call tree. +// +// The `calls` are ordered by creation order and the call tree can be re-computing +// using fields found in `Call` object (parent/child relationship). +// +// Another important thing to note is that even if a transaction succeed, some calls +// within it could have been reverted internally, if this is important to you, you must +// check the field `state_reverted` on the `Call` to determine if it was fully committed +// to the chain or not. message TransactionTrace { // consensus bytes to = 1; @@ -192,19 +334,47 @@ message TransactionTrace { // All transactions that ever existed prior Berlin fork before EIP-2718 was implemented. TRX_TYPE_LEGACY = 0; - // Field that specifies an access list of contract/storage_keys that is going to be used + // Transaction that specicy an access list of contract/storage_keys that is going to be used // in this transaction. // // Added in Berlin fork (EIP-2930). TRX_TYPE_ACCESS_LIST = 1; - // Transaction that specifies an access list just like TRX_TYPE_ACCESS_LIST but in addition defines the + // Transaction that specifis an access list just like TRX_TYPE_ACCESS_LIST but in addition defines the // max base gas gee and max priority gas fee to pay for this transaction. Transaction's of those type are // executed against EIP-1559 rules which dictates a dynamic gas cost based on the congestion of the network. TRX_TYPE_DYNAMIC_FEE = 2; + + // Transaction which contain a large amount of data that cannot be accessed by EVM execution, but whose commitment + // can be accessed. The format is intended to be fully compatible with the format that will be used in full sharding. + // + // Transaction that defines an access list just like TRX_TYPE_ACCESS_LIST and enables dynamic fee just like + // TRX_TYPE_DYNAMIC_FEE but in addition defines the fields 'max_fee_per_data_gas' of type 'uint256' and the fields + // 'blob_versioned_hashes' which represents a list of hash outputs from 'kzg_to_versioned_hash'. + // + // Activated in Cancun fork (EIP-4844) + TRX_TYPE_BLOB = 3; + + // Transaction that sets code to an EOA (Externally Owned Accounts) + // + // Activated in Prague (EIP-7702) + TRX_TYPE_SET_CODE = 4; + + // Arbitrum-specific transactions + TRX_TYPE_ARBITRUM_DEPOSIT = 100; + TRX_TYPE_ARBITRUM_UNSIGNED = 101; + TRX_TYPE_ARBITRUM_CONTRACT = 102; + TRX_TYPE_ARBITRUM_RETRY = 104; + TRX_TYPE_ARBITRUM_SUBMIT_RETRYABLE = 105; + TRX_TYPE_ARBITRUM_INTERNAL = 106; + TRX_TYPE_ARBITRUM_LEGACY = 120; + + // OPTIMISM-specific transactions + TRX_TYPE_OPTIMISM_DEPOSIT = 126; + } - // AcccessList represents the storage access this transaction has agreed to do in which case those storage + // AccessList represents the storage access this transaction has agreed to do in which case those storage // access cost less gas unit per access. // // This will is populated only if `TransactionTrace.Type == TRX_TYPE_ACCESS_LIST || TRX_TYPE_DYNAMIC_FEE` which @@ -215,6 +385,8 @@ message TransactionTrace { // // This will is populated only if `TransactionTrace.Type == TRX_TYPE_DYNAMIC_FEE` which is possible only // if London fork is active on the chain. + // + // Only available in DetailLevel: EXTENDED BigInt max_fee_per_gas = 11; // MaxPriorityFeePerGas is priority fee per gas the user to pay in extra to the miner on top of the block's @@ -222,22 +394,106 @@ message TransactionTrace { // // This will is populated only if `TransactionTrace.Type == TRX_TYPE_DYNAMIC_FEE` which is possible only // if London fork is active on the chain. + // + // Only available in DetailLevel: EXTENDED BigInt max_priority_fee_per_gas = 13; // meta uint32 index = 20; bytes hash = 21; bytes from = 22; + + // Only available in DetailLevel: EXTENDED + // Known Issues + // - Version 3: + // Field not populated. It will be empty. + // + // Fixed in `Version 4`, see https://docs.substreams.dev/reference-material/chains-and-endpoints/ethereum-data-model for information about block versions. bytes return_data = 23; + + // Only available in DetailLevel: EXTENDED bytes public_key = 24; + + // The block's global ordinal when the transaction started executing, refer to + // [Block] documentation for further information about ordinals and total ordering. uint64 begin_ordinal = 25; + + // The block's global ordinal when the transaction finished executing, refer to + // [Block] documentation for further information about ordinals and total ordering. uint64 end_ordinal = 26; + // TransactionTraceStatus is the status of the transaction execution and will let you know if the transaction + // was successful or not. + // + // ## Explanation relevant only for blocks with `DetailLevel: EXTENDED` + // + // A successful transaction has been recorded to the blockchain's state for calls in it that were successful. + // This means it's possible only a subset of the calls were properly recorded, refer to [calls[].state_reverted] field + // to determine which calls were reverted. + // + // A quirks of the Ethereum protocol is that a transaction `FAILED` or `REVERTED` still affects the blockchain's + // state for **some** of the state changes. Indeed, in those cases, the transactions fees are still paid to the miner + // which means there is a balance change for the transaction's emitter (e.g. `from`) to pay the gas fees, an optional + // balance change for gas refunded to the transaction's emitter (e.g. `from`) and a balance change for the miner who + // received the transaction fees. There is also a nonce change for the transaction's emitter (e.g. `from`). + // + // This means that to properly record the state changes for a transaction, you need to conditionally procees the + // transaction's status. + // + // For a `SUCCEEDED` transaction, you iterate over the `calls` array and record the state changes for each call for + // which `state_reverted == false` (if a transaction succeeded, the call at #0 will always `state_reverted == false` + // because it aligns with the transaction). + // + // For a `FAILED` or `REVERTED` transaction, you iterate over the root call (e.g. at #0, will always exist) for + // balance changes you process those where `reason` is either `REASON_GAS_BUY`, `REASON_GAS_REFUND` or + // `REASON_REWARD_TRANSACTION_FEE` and for nonce change, still on the root call, you pick the nonce change which the + // smallest ordinal (if more than one). TransactionTraceStatus status = 30; + TransactionReceipt receipt = 31; + + // Only available in DetailLevel: EXTENDED repeated Call calls = 32; -} + // BlobGas is the amount of gas the transaction is going to pay for the blobs, this is a computed value + // equivalent to `self.blob_gas_fee_cap * len(self.blob_hashes)` and provided in the model for convenience. + // + // This is specified by https://eips.ethereum.org/EIPS/eip-4844 + // + // This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + // if Cancun fork is active on the chain. + optional uint64 blob_gas = 33; + + // BlobGasFeeCap is the maximum fee per data gas the user is willing to pay for the data gas used. + // + // This is specified by https://eips.ethereum.org/EIPS/eip-4844 + // + // This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + // if Cancun fork is active on the chain. + optional BigInt blob_gas_fee_cap = 34; + + // BlobHashes field represents a list of hash outputs from 'kzg_to_versioned_hash' which + // essentially is a version byte + the sha256 hash of the blob commitment (e.g. + // `BLOB_COMMITMENT_VERSION_KZG + sha256(commitment)[1:]`. + // + // This is specified by https://eips.ethereum.org/EIPS/eip-4844 + // + // This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + // if Cancun fork is active on the chain. + repeated bytes blob_hashes = 35; + + // SetCodeAuthorizations represents the authorizations of a transaction to set code to an EOA (Externally Owned Accounts) + // as defined in EIP-7702. The list will contain all the authorizations as they were specified in the + // transaction itself regardless of their validity. If you need to determined if a given authorization was + // correctly applied on chain's state, refer to [SetCodeAuthorization.discarded] field that records + // if the authorization was discarded or not by the chain due to invalidity. + // + // This is specified by https://eips.ethereum.org/EIPS/eip-7702 + // + // This will is populated only if `TransactionTrace.Type == TRX_TYPE_SET_CODE` which is possible only + // if Prague fork is active on the chain. + repeated SetCodeAuthorization set_code_authorizations = 36; +} // AccessTuple represents a list of storage keys for a given contract's address and is used // for AccessList construction. @@ -246,10 +502,64 @@ message AccessTuple { repeated bytes storage_keys = 2; } -// TransactionTraceWithBlockRef -message TransactionTraceWithBlockRef { - TransactionTrace trace = 1; - BlockRef block_ref = 2; +// SetCodeAuthorization represents the authorization of a transaction to set code of an EOA (Externally Owned Account) +// as defined in EIP-7702. +// +// The 'authority' field is the address that is authorizing the delegation mechanism. The 'authority' value is computed +// from the signature contained in the message using the computation +// `authority = ecrecover(keccak(MAGIC || rlp([chain_id, address, nonce])), y_parity, r, s)` +// where `MAGIC` is `0x5`, `||` is the bytes concatenation operator, `ecrecover` is the Ethereum signature recovery +// and `y_parity` is the recovery ID value denoted `v` in the message below. Checking the go-ethereum implementation +// at https://github.com/ethereum/go-ethereum/blob/v1.15.0/core/types/tx_setcode.go#L117 might prove easier to "read". +// +// We do extract the 'authority' value from the signature in the message and store it in the 'authority' field for +// convenience so you don't need to perform the computation yourself. +message SetCodeAuthorization { + // Discarded determines if this authorization was skipped due to being invalid. As EIP-7702 states, + // if the authorization is invalid (invalid signature, nonce mismatch, etc.) it must be simply + // discarded and the transaction is processed as if the authorization was not present in the + // authorization list. + // + // This boolean records if the authorization was discarded or not by the chain due to invalidity. + bool discarded = 1; + + // ChainID is the chain ID of the chain where the transaction was executed, used + // to recover the authority from the signature. + bytes chain_id = 2; + + // Address contains the address this account is delegating to. This address usually + // contain code that this account essentially "delegates" to. + // + // Note: This was missing when EIP-7702 was first activated on Holesky, Sepolia, BSC Chapel, + // BSC Mainnet and Arbitrum Sepolia but was ready for Ethereum Mainnet hard fork. We will backfill + // those missing values in the near future at which point we will remove this note. + bytes address = 8; + + // Nonce is the nonce of the account that is authorizing delegation mechanism, EIP-7702 rules + // states that nonce should be verified using this rule: + // + // - Verify the nonce of authority is equal to nonce. In case authority does not exist in the trie, + // verify that nonce is equal to 0. + // + // Read SetCodeAuthorization to know how to recover the `authority` value. + uint64 nonce = 3; + + // V is the recovery ID value for the signature Y point. While it's defined as a + // `uint32`, it's actually bounded by a `uint8` data type withing the Ethereum protocol. + uint32 v = 4; + + // R is the signature's X point on the elliptic curve (32 bytes). + bytes r = 5; + + // S is the signature's Y point on the elliptic curve (32 bytes). + bytes s = 6; + + // Authority is the address of the account that is authorizing delegation mechanism, it + // is computed from the signature contained in the message and stored for convenience. + // + // If the authority cannot be recovered from the signature, this field will be empty and + // the `discarded` field will be set to `true`. + optional bytes authority = 7; } enum TransactionTraceStatus { @@ -263,9 +573,7 @@ message TransactionReceipt { // State root is an intermediate state_root hash, computed in-between transactions to make // **sure** you could build a proof and point to state in the middle of a block. Geth client // uses `PostState + root + PostStateOrStatus`` while Parity used `status_code, root...`` this piles - // hardforks, see (read the EIPs first): - // - https://github.com/eoscanada/go-ethereum-private/blob/deep-mind/core/types/receipt.go#L147 - // - https://github.com/eoscanada/go-ethereum-private/blob/deep-mind/core/types/receipt.go#L50-L86 + // hard forks, see (read the EIPs first): // - https://github.com/ethereum/EIPs/blob/master/EIPS/eip-658.md // // Moreover, the notion of `Outcome`` in parity, which segregates the two concepts, which are @@ -277,6 +585,23 @@ message TransactionReceipt { uint64 cumulative_gas_used = 2; bytes logs_bloom = 3; repeated Log logs = 4; + + // BlobGasUsed is the amount of blob gas that has been used within this transaction. At time + // of writing, this is equal to `self.blob_gas_fee_cap * len(self.blob_hashes)`. + // + // This is specified by https://eips.ethereum.org/EIPS/eip-4844 + // + // This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + // if Cancun fork is active on the chain. + optional uint64 blob_gas_used = 5; + + // BlobGasPrice is the amount to pay per blob item in the transaction. + // + // This is specified by https://eips.ethereum.org/EIPS/eip-4844 + // + // This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + // if Cancun fork is active on the chain. + optional BigInt blob_gas_price = 6; } message Log { @@ -285,8 +610,10 @@ message Log { bytes data = 3; // Index is the index of the log relative to the transaction. This index - // is always populated regardless of the state reversion of the call + // is always populated regardless of the state reversion of the the call // that emitted this log. + // + // Only available in DetailLevel: EXTENDED uint32 index = 4; // BlockIndex represents the index of the log relative to the Block. @@ -294,7 +621,7 @@ message Log { // An **important** notice is that this field will be 0 when the call // that emitted the log has been reverted by the chain. // - // Currently, there are two locations where a Log can be obtained: + // Currently, there is two locations where a Log can be obtained: // - block.transaction_traces[].receipt.logs[] // - block.transaction_traces[].calls[].logs[] // @@ -306,6 +633,8 @@ message Log { // the `blockIndex` value will always be 0. uint32 blockIndex = 6; + // The block's global ordinal when the log was recorded, refer to [Block] + // documentation for further information about ordinals and total ordering. uint64 ordinal = 7; } @@ -316,25 +645,76 @@ message Call { CallType call_type = 4; bytes caller = 5; bytes address = 6; + + // AddressDelegatesTo contains the address from which the actual code to execute will be loaded + // as defined per EIP-7702 rules. If the Call's address value resolves to a code + // that delegates to another address, this field will be populated with the address + // that the call is delegated to. It will be empty in all other situations. + // + // Assumes that a 'SetCode' transaction set address `0xA` to delegates to address `0xB`, + // then when a call is made to `0xA`, the Call object would have: + // + // - caller = + // - address = 0xA + // - address_delegates_to = 0xB + // + // Again, it's important to emphasize that this field relates to EIP-7702, if the call is + // a DELEGATE or CALLCODE type, this field will not be populated and will remain empty. + // + // It will be populated only if EIP-7702 is active on the chain (Prague fork) and if the + // 'address' of the call was pointing to another address at time of execution. + optional bytes address_delegates_to = 34; + BigInt value = 7; uint64 gas_limit = 8; uint64 gas_consumed = 9; bytes return_data = 13; + + // Known Issues + // - Version 3: + // When call is `CREATE` or `CREATE2`, this field is not populated. A couple of suggestions: + // 1. You can get the contract's code in the `code_changes` field. + // 2. In the root `CREATE` call, you can directly use the `TransactionTrace`'s input field. + // + // Fixed in `Version 4`, see https://docs.substreams.dev/reference-material/chains-and-endpoints/ethereum-data-model for information about block versions. bytes input = 14; + + // Indicates whether the call executed code. + // + // Known Issues + // - Version 3: + // This may be incorrectly set to `false` for accounts with code handling native value transfers, + // as well as for certain precompiles with no input. + // The value is initially set based on `call.type != CREATE && len(call.input) > 0` + // and later adjusted if the tracer detects an account without code. + // + // Fixed in `Version 4`, see https://docs.substreams.dev/reference-material/chains-and-endpoints/ethereum-data-model for information about block versions. bool executed_code = 15; bool suicide = 16; /* hex representation of the hash -> preimage */ map keccak_preimages = 20; + + // Known Issues + // - Version 3: + // The data might be not be in order. + // + // Fixed in `Version 4`, see https://docs.substreams.dev/reference-material/chains-and-endpoints/ethereum-data-model for information about block versions. repeated StorageChange storage_changes = 21; repeated BalanceChange balance_changes = 22; repeated NonceChange nonce_changes = 24; repeated Log logs = 25; repeated CodeChange code_changes = 26; - // Deprecated: repeated bytes created_accounts reserved 27; + // Known Issues + // - Version 3: + // Some gas changes are not correctly tracked: + // 1. Gas refunded due to data returned to the chain (occurs at the end of a transaction, before buyback). + // 2. Initial gas allocation (0 -> GasLimit) at the start of a call. + // 3. Final gas deduction (LeftOver -> 0) at the end of a call (if applicable). + // Fixed in `Version 4`, see https://docs.substreams.dev/reference-material/chains-and-endpoints/ethereum-data-model for information about block versions. repeated GasChange gas_changes = 28; // Deprecated: repeated GasEvent gas_events @@ -375,13 +755,44 @@ message Call { // ``` // // In the transaction above, while Call #2 and Call #3 would have the - // status `EXECUTED` + // status `EXECUTED`. + // + // If you check all calls and check only `state_reverted` flag, you might be missing + // some balance changes and nonce changes. This is because when a full transaction fails + // in ethereum (e.g. `calls.all(x.state_reverted == true)`), there is still the transaction + // fee that are recorded to the chain. + // + // Refer to [TransactionTrace#status] field for more details about the handling you must + // perform. bool state_reverted = 30; + // Known Issues + // - Version 3: + // 1. The block's global ordinal when the call started executing, refer to + // [Block] documentation for further information about ordinals and total ordering. + // 2. The transaction root call `begin_ordial` is always `0` (also in the GENESIS block), which can cause issues + // when sorting by this field. To ensure proper execution order, set it as follows: + // `trx.Calls[0].BeginOrdinal = trx.BeginOrdinal`. + // + // Fixed in `Version 4`, see https://docs.substreams.dev/reference-material/chains-and-endpoints/ethereum-data-model for information about block versions. uint64 begin_ordinal = 31; + + // Known Issues + // - Version 3: + // 1. The block's global ordinal when the call finished executing, refer to + // [Block] documentation for further information about ordinals and total ordering. + // 2. The root call of the GENESIS block is always `0`. To fix it, you can set it as follows: + // `rx.Calls[0].EndOrdinal = max.Uint64`. + // + // Fixed in `Version 4`, see https://docs.substreams.dev/reference-material/chains-and-endpoints/ethereum-data-model for information about block versions. uint64 end_ordinal = 32; - repeated AccountCreation account_creations = 33; + // Known Issues + // - Version 4: + // AccountCreations are NOT SUPPORTED anymore. DO NOT rely on them. + repeated AccountCreation account_creations = 33 [deprecated = true]; + + // The identifier 34 is taken by 'address_delegates_to' field above. reserved 50; // repeated ERC20BalanceChange erc20_balance_changes = 50 [deprecated = true]; reserved 51; // repeated ERC20TransferEvent erc20_transfer_events = 51 [deprecated = true]; @@ -403,20 +814,42 @@ message StorageChange { bytes old_value = 3; bytes new_value = 4; + // The block's global ordinal when the storage change was recorded, refer to [Block] + // documentation for further information about ordinals and total ordering. uint64 ordinal = 5; } message BalanceChange { + // Address is the address of the account that has changed balance. bytes address = 1; + + // OldValue is the balance of the address before the change. This value + // can be **nil/null/None** if there was no previous balance for the address. + // It is safe in those case(s) to consider the balance as being 0. + // + // If you consume this from a Substreams, you can safely use: + // + // ```ignore + // let old_value = old_value.unwrap_or_default(); + // ``` BigInt old_value = 2; - BigInt new_value = 3; - Reason reason = 4; - // Obtain all balance change reasons under deep mind repository: + // NewValue is the balance of the address after the change. This value + // can be **nil/null/None** if there was no previous balance for the address + // after the change. It is safe in those case(s) to consider the balance as being + // 0. + // + // If you consume this from a Substreams, you can safely use: // - // ```shell - // ack -ho 'BalanceChangeReason\(".*"\)' | grep -Eo '".*"' | sort | uniq + // ```ignore + // let new_value = new_value.unwrap_or_default(); // ``` + BigInt new_value = 3; + + // Reason is the reason why the balance has changed. This is useful to determine + // why the balance has changed and what is the context of the change. + Reason reason = 4; + enum Reason { REASON_UNKNOWN = 0; REASON_REWARD_MINE_UNCLE = 1; @@ -435,8 +868,20 @@ message BalanceChange { REASON_CALL_BALANCE_OVERRIDE = 12; // Used on chain(s) where some Ether burning happens REASON_BURN = 15; + REASON_WITHDRAWAL = 16; + + // Rewards for Blob processing on BNB chain added in Tycho hard-fork, refers + // to BNB documentation to check the timestamp at which it was activated. + REASON_REWARD_BLOB_FEE = 17; + + // This reason is used only on Optimism chain. + REASON_INCREASE_MINT = 18; + // This reason is used only on Optimism chain. + REASON_REVERT = 19; } + // The block's global ordinal when the balance change was recorded, refer to [Block] + // documentation for further information about ordinals and total ordering. uint64 ordinal = 5; } @@ -444,11 +889,17 @@ message NonceChange { bytes address = 1; uint64 old_value = 2; uint64 new_value = 3; + + // The block's global ordinal when the nonce change was recorded, refer to [Block] + // documentation for further information about ordinals and total ordering. uint64 ordinal = 4; } message AccountCreation { bytes account = 1; + + // The block's global ordinal when the account creation was recorded, refer to [Block] + // documentation for further information about ordinals and total ordering. uint64 ordinal = 2; } @@ -459,6 +910,8 @@ message CodeChange { bytes new_hash = 4; bytes new_code = 5; + // The block's global ordinal when the code change was recorded, refer to [Block] + // documentation for further information about ordinals and total ordering. uint64 ordinal = 6; } @@ -466,43 +919,155 @@ message CodeChange { // The gas is computed per actual op codes. Doing them completely might prove // overwhelming in most cases. // -// Hence, we only index some of them, those that are costly like all the calls +// Hence, we only index some of them, those that are costy like all the calls // one, log events, return data, etc. message GasChange { uint64 old_value = 1; uint64 new_value = 2; Reason reason = 3; - // Obtain all gas change reasons under deep mind repository: - // - // ```shell - // ack -ho 'GasChangeReason\(".*"\)' | grep -Eo '".*"' | sort | uniq - // ``` enum Reason { REASON_UNKNOWN = 0; + // REASON_CALL is the amount of gas that will be charged for a 'CALL' opcode executed by the EVM REASON_CALL = 1; + // REASON_CALL_CODE is the amount of gas that will be charged for a 'CALLCODE' opcode executed by the EVM REASON_CALL_CODE = 2; + // REASON_CALL_DATA_COPY is the amount of gas that will be charged for a 'CALLDATACOPY' opcode executed by the EVM REASON_CALL_DATA_COPY = 3; + // REASON_CODE_COPY is the amount of gas that will be charged for a 'CALLDATACOPY' opcode executed by the EVM REASON_CODE_COPY = 4; + // REASON_CODE_STORAGE is the amount of gas that will be charged for code storage REASON_CODE_STORAGE = 5; + // REASON_CONTRACT_CREATION is the amount of gas that will be charged for a 'CREATE' opcode executed by the EVM and for the gas + // burned for a CREATE, today controlled by EIP150 rules REASON_CONTRACT_CREATION = 6; + // REASON_CONTRACT_CREATION2 is the amount of gas that will be charged for a 'CREATE2' opcode executed by the EVM and for the gas + // burned for a CREATE2, today controlled by EIP150 rules REASON_CONTRACT_CREATION2 = 7; + // REASON_DELEGATE_CALL is the amount of gas that will be charged for a 'DELEGATECALL' opcode executed by the EVM REASON_DELEGATE_CALL = 8; + // REASON_EVENT_LOG is the amount of gas that will be charged for a 'LOG' opcode executed by the EVM REASON_EVENT_LOG = 9; + // REASON_EXT_CODE_COPY is the amount of gas that will be charged for a 'LOG' opcode executed by the EVM REASON_EXT_CODE_COPY = 10; + // REASON_FAILED_EXECUTION is the burning of the remaining gas when the execution failed without a revert REASON_FAILED_EXECUTION = 11; + // REASON_INTRINSIC_GAS is the amount of gas that will be charged for the intrinsic cost of the transaction, there is + // always exactly one of those per transaction REASON_INTRINSIC_GAS = 12; + // GasChangePrecompiledContract is the amount of gas that will be charged for a precompiled contract execution REASON_PRECOMPILED_CONTRACT = 13; + // REASON_REFUND_AFTER_EXECUTION is the amount of gas that will be refunded to the caller after the execution of the call, + // if there is left over at the end of execution REASON_REFUND_AFTER_EXECUTION = 14; + // REASON_RETURN is the amount of gas that will be charged for a 'RETURN' opcode executed by the EVM REASON_RETURN = 15; + // REASON_RETURN_DATA_COPY is the amount of gas that will be charged for a 'RETURNDATACOPY' opcode executed by the EVM REASON_RETURN_DATA_COPY = 16; + // REASON_REVERT is the amount of gas that will be charged for a 'REVERT' opcode executed by the EVM REASON_REVERT = 17; + // REASON_SELF_DESTRUCT is the amount of gas that will be charged for a 'SELFDESTRUCT' opcode executed by the EVM REASON_SELF_DESTRUCT = 18; + // REASON_STATIC_CALL is the amount of gas that will be charged for a 'STATICALL' opcode executed by the EVM REASON_STATIC_CALL = 19; + // REASON_STATE_COLD_ACCESS is the amount of gas that will be charged for a cold storage access as controlled by EIP2929 rules + // // Added in Berlin fork (Geth 1.10+) REASON_STATE_COLD_ACCESS = 20; + + // REASON_TX_INITIAL_BALANCE is the initial balance for the call which will be equal to the gasLimit of the call + // + // Added as new tracing reason in Geth, available only on some chains + REASON_TX_INITIAL_BALANCE = 21; + // REASON_TX_REFUNDS is the sum of all refunds which happened during the tx execution (e.g. storage slot being cleared) + // this generates an increase in gas. There is only one such gas change per transaction. + // + // Added as new tracing reason in Geth, available only on some chains + REASON_TX_REFUNDS = 22; + // REASON_TX_LEFT_OVER_RETURNED is the amount of gas left over at the end of transaction's execution that will be returned + // to the chain. This change will always be a negative change as we "drain" left over gas towards 0. If there was no gas + // left at the end of execution, no such even will be emitted. The returned gas's value in Wei is returned to caller. + // There is at most one of such gas change per transaction. + // + // Added as new tracing reason in Geth, available only on some chains + REASON_TX_LEFT_OVER_RETURNED = 23; + + // REASON_CALL_INITIAL_BALANCE is the initial balance for the call which will be equal to the gasLimit of the call. There is only + // one such gas change per call. + // + // Added as new tracing reason in Geth, available only on some chains + REASON_CALL_INITIAL_BALANCE = 24; + // REASON_CALL_LEFT_OVER_RETURNED is the amount of gas left over that will be returned to the caller, this change will always + // be a negative change as we "drain" left over gas towards 0. If there was no gas left at the end of execution, no such even + // will be emitted. + REASON_CALL_LEFT_OVER_RETURNED = 25; + + // REASON_WITNESS_CONTRACT_INIT flags the event of adding to the witness during the contract creation initialization step. + REASON_WITNESS_CONTRACT_INIT = 26; + + // REASON_WITNESS_CONTRACT_CREATION flags the event of adding to the witness during the contract creation finalization step. + REASON_WITNESS_CONTRACT_CREATION = 27; + // REASON_WITNESS_CODE_CHUNK flags the event of adding one or more contract code chunks to the witness. + REASON_WITNESS_CODE_CHUNK = 28; + // REASON_WITNESS_CONTRACT_COLLISION_CHECK flags the event of adding to the witness when checking for contract address collision. + REASON_WITNESS_CONTRACT_COLLISION_CHECK = 29; + // REASON_TX_DATA_FLOOR is the amount of extra gas the transaction has to pay to reach the minimum gas requirement for the + // transaction data. This change will always be a negative change. + REASON_TX_DATA_FLOOR = 30; } + // The block's global ordinal when the gas change was recorded, refer to [Block] + // documentation for further information about ordinals and total ordering. uint64 ordinal = 4; } + +// HeaderOnlyBlock is used to optimally unpack the [Block] structure (note the +// corresponding message number for the `header` field) while consuming less +// memory, when only the `header` is desired. +// +// WARN: this is a client-side optimization pattern and should be moved in the +// consuming code. +message HeaderOnlyBlock { + BlockHeader header = 5; +} + +// BlockWithRefs is a lightweight block, with traces and transactions +// purged from the `block` within, and only. It is used in transports +// to pass block data around. +message BlockWithRefs { + string id = 1; + Block block = 2; + TransactionRefs transaction_trace_refs = 3; + bool irreversible = 4; +} + +message TransactionTraceWithBlockRef { + TransactionTrace trace = 1; + BlockRef block_ref = 2; +} + +message TransactionRefs { + repeated bytes hashes = 1; +} + +message BlockRef { + bytes hash = 1; + uint64 number = 2; +} + +// Withdrawal represents a validator withdrawal from the beacon chain to the EVM. +// Introduced in EIP-4895 (Shanghai hard fork). +message Withdrawal { + // Index is the monotonically increasing identifier of the withdrawal + uint64 index = 1; + + // ValidatorIndex is the index of the validator that is withdrawing + uint64 validator_index = 2; + + // Address is the Ethereum address receiving the withdrawn funds + bytes address = 3; + + // Amount is the value of the withdrawal in gwei (1 gwei = 1e9 wei) + uint64 amount = 4; +} \ No newline at end of file diff --git a/chain/ethereum/src/adapter.rs b/chain/ethereum/src/adapter.rs index 93bb2908894..d161462b933 100644 --- a/chain/ethereum/src/adapter.rs +++ b/chain/ethereum/src/adapter.rs @@ -1,16 +1,17 @@ use anyhow::Error; use async_trait::async_trait; -use ethabi::{Error as ABIError, ParamType, Token}; +use graph::abi; use graph::blockchain::ChainIdentifier; +use graph::components::ethereum::AnyBlock; use graph::components::subgraph::MappingError; use graph::data::store::ethereum::call; use graph::data_source::common::ContractCall; use graph::firehose::CallToFilter; use graph::firehose::CombinedFilter; use graph::firehose::LogFilter; -use graph::prelude::web3::types::Bytes; -use graph::prelude::web3::types::H160; -use graph::prelude::web3::types::U256; +use graph::prelude::alloy::primitives::{Address, B256}; +use graph::prelude::alloy::rpc::types::Log; +use graph::prelude::alloy::transports::{RpcError, TransportErrorKind}; use itertools::Itertools; use prost::Message; use prost_types::Any; @@ -19,7 +20,6 @@ use std::collections::{HashMap, HashSet}; use std::fmt; use thiserror::Error; use tiny_keccak::keccak256; -use web3::types::{Address, Log, H256}; use graph::prelude::*; use graph::{ @@ -28,6 +28,8 @@ use graph::{ petgraph::{self, graphmap::GraphMap}, }; +use graph::blockchain::BlockPtr; + const COMBINED_FILTER_TYPE_URL: &str = "type.googleapis.com/sf.ethereum.transform.v1.CombinedFilter"; @@ -35,7 +37,7 @@ use crate::capabilities::NodeCapabilities; use crate::data_source::{BlockHandlerFilter, DataSource}; use crate::{Chain, Mapping, ENV_VARS}; -pub type EventSignature = H256; +pub type EventSignature = B256; pub type FunctionSelector = [u8; 4]; /// `EventSignatureWithTopics` is used to match events with @@ -44,19 +46,19 @@ pub type FunctionSelector = [u8; 4]; #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct EventSignatureWithTopics { pub address: Option
, - pub signature: H256, - pub topic1: Option>, - pub topic2: Option>, - pub topic3: Option>, + pub signature: B256, + pub topic1: Option>, + pub topic2: Option>, + pub topic3: Option>, } impl EventSignatureWithTopics { pub fn new( address: Option
, - signature: H256, - topic1: Option>, - topic2: Option>, - topic3: Option>, + signature: B256, + topic1: Option>, + topic2: Option>, + topic3: Option>, ) -> Self { EventSignatureWithTopics { address, @@ -71,7 +73,7 @@ impl EventSignatureWithTopics { /// If self.address is None, it's considered a wildcard match. /// Otherwise, it must match the provided address. /// It must also match the topics if they are Some - pub fn matches(&self, address: Option<&H160>, sig: H256, topics: &[H256]) -> bool { + pub fn matches(&self, address: Option<&Address>, sig: B256, topics: &[B256]) -> bool { // If self.address is None, it's considered a wildcard match. Otherwise, it must match the provided address. let address_matches = match self.address { Some(ref self_addr) => address == Some(self_addr), @@ -98,22 +100,21 @@ impl EventSignatureWithTopics { #[derive(Error, Debug)] pub enum EthereumRpcError { #[error("call error: {0}")] - Web3Error(web3::Error), + AlloyError(RpcError), #[error("ethereum node took too long to perform call")] Timeout, } #[derive(Error, Debug)] pub enum ContractCallError { - #[error("ABI error: {0}")] - ABIError(#[from] ABIError), - /// `Token` is not of expected `ParamType` - #[error("type mismatch, token {0:?} is not of kind {1:?}")] - TypeError(Token, ParamType), - #[error("error encoding input call data: {0}")] - EncodingError(ethabi::Error), + #[error("ABI error: {0:#}")] + ABIError(anyhow::Error), + #[error("type mismatch, decoded value {0:?} is not of kind {1:?}")] + TypeError(abi::DynSolValue, abi::DynSolType), + #[error("error encoding input call data: {0:#}")] + EncodingError(anyhow::Error), #[error("call error: {0}")] - Web3Error(web3::Error), + AlloyError(RpcError), #[error("ethereum node took too long to perform call")] Timeout, #[error("internal error: {0}")] @@ -126,7 +127,7 @@ impl From for MappingError { // Any error reported by the Ethereum node could be due to the block no longer being on // the main chain. This is very unespecific but we don't want to risk failing a // subgraph due to a transient error such as a reorg. - ContractCallError::Web3Error(e) => MappingError::PossibleReorg(anyhow::anyhow!( + ContractCallError::AlloyError(e) => MappingError::PossibleReorg(anyhow::anyhow!( "Ethereum node returned an error for an eth_call: {e}" )), // Also retry on timeouts. @@ -148,13 +149,34 @@ enum LogFilterNode { #[derive(Clone, Debug)] pub struct EthGetLogsFilter { pub contracts: Vec
, - pub event_signatures: Vec, - pub topic1: Option>, - pub topic2: Option>, - pub topic3: Option>, + pub event_signatures: Vec, + pub topic1: Option>, + pub topic2: Option>, + pub topic3: Option>, } impl EthGetLogsFilter { + /// Convert to alloy Filter for the given block range + pub fn to_alloy_filter(&self, from: BlockNumber, to: BlockNumber) -> alloy::rpc::types::Filter { + let mut filter_builder = alloy::rpc::types::Filter::new() + .from_block(alloy::rpc::types::BlockNumberOrTag::Number(from as u64)) + .to_block(alloy::rpc::types::BlockNumberOrTag::Number(to as u64)) + .address(self.contracts.clone()) + .event_signature(self.event_signatures.clone()); + + if let Some(ref topic1) = self.topic1 { + filter_builder = filter_builder.topic1(topic1.clone()); + } + if let Some(ref topic2) = self.topic2 { + filter_builder = filter_builder.topic2(topic2.clone()); + } + if let Some(ref topic3) = self.topic3 { + filter_builder = filter_builder.topic3(topic3.clone()); + } + + filter_builder + } + fn from_contract(address: Address) -> Self { EthGetLogsFilter { contracts: vec![address], @@ -165,7 +187,7 @@ impl EthGetLogsFilter { } } - fn from_event(event: EventSignature) -> Self { + fn from_event(event: B256) -> Self { EthGetLogsFilter { contracts: vec![], event_signatures: vec![event], @@ -205,7 +227,7 @@ impl fmt::Display for EthGetLogsFilter { }; // Helper to format topics as strings - let format_topics = |topics: &Option>| -> String { + let format_topics = |topics: &Option>| -> String { topics.as_ref().map_or_else( || "None".to_string(), |ts| { @@ -351,11 +373,11 @@ impl From for Vec { }| LogFilter { addresses: contracts .iter() - .map(|addr| addr.to_fixed_bytes().to_vec()) + .map(|addr| addr.to_vec()) .collect_vec(), event_signatures: event_signatures .iter() - .map(|sig| sig.to_fixed_bytes().to_vec()) + .map(|sig| sig.to_vec()) .collect_vec(), }, ) @@ -367,14 +389,14 @@ impl EthereumLogFilter { /// Check if this filter matches the specified `Log`. pub fn matches(&self, log: &Log) -> bool { // First topic should be event sig - match log.topics.first() { + match log.topics().first() { None => false, Some(sig) => { // The `Log` matches the filter either if the filter contains // a (contract address, event signature) pair that matches the // `Log`, or if the filter contains wildcard event that matches. - let contract = LogFilterNode::Contract(log.address); + let contract = LogFilterNode::Contract(log.address()); let event = LogFilterNode::Event(*sig); self.contracts_and_events_graph .all_edges() @@ -383,7 +405,7 @@ impl EthereumLogFilter { || self .events_with_topic_filters .iter() - .any(|(e, _)| e.matches(Some(&log.address), *sig, &log.topics)) + .any(|(e, _)| e.matches(Some(&log.address()), *sig, log.topics())) } } } @@ -391,9 +413,9 @@ impl EthereumLogFilter { /// Similar to [`matches`], checks if a transaction receipt is required for this log filter. pub fn requires_transaction_receipt( &self, - event_signature: &H256, + event_signature: &B256, contract_address: Option<&Address>, - topics: &[H256], + topics: &[B256], ) -> bool { // Check for wildcard events first. if self.wildcard_events.get(event_signature) == Some(&true) { @@ -629,7 +651,7 @@ impl From for Vec { let mut filters: Vec = contract_addresses_function_signatures .into_iter() .map(|(addr, (_, sigs))| CallToFilter { - addresses: vec![addr.to_fixed_bytes().to_vec()], + addresses: vec![addr.to_vec()], signatures: sigs.into_iter().map(|x| x.to_vec()).collect_vec(), }) .collect(); @@ -816,7 +838,7 @@ impl From for Vec { .sorted() .dedup_by(|x, y| x == y) .map(|addr| CallToFilter { - addresses: vec![addr.to_fixed_bytes().to_vec()], + addresses: vec![addr.to_vec()], signatures: vec![], }) .collect_vec() @@ -1077,20 +1099,8 @@ pub trait EthereumAdapter: Send + Sync + 'static { /// connected to. async fn net_identifiers(&self) -> Result; - /// Get the latest block, including full transactions. - async fn latest_block(&self, logger: &Logger) -> Result; - /// Get the latest block, with only the header and transaction hashes. - async fn latest_block_header( - &self, - logger: &Logger, - ) -> Result, bc::IngestorError>; - - async fn load_block( - &self, - logger: &Logger, - block_hash: H256, - ) -> Result; + async fn latest_block_ptr(&self, logger: &Logger) -> Result; /// Load Ethereum blocks in bulk, returning results as they come back as a Stream. /// May use the `chain_store` as a cache. @@ -1098,44 +1108,29 @@ pub trait EthereumAdapter: Send + Sync + 'static { &self, logger: Logger, chain_store: Arc, - block_hashes: HashSet, + block_hashes: HashSet, ) -> Result>, Error>; /// Find a block by its hash. async fn block_by_hash( &self, logger: &Logger, - block_hash: H256, - ) -> Result, Error>; + block_hash: B256, + ) -> Result, Error>; async fn block_by_number( &self, logger: &Logger, block_number: BlockNumber, - ) -> Result, Error>; + ) -> Result, Error>; /// Load full information for the specified `block` (in particular, transaction receipts). async fn load_full_block( &self, logger: &Logger, - block: LightEthereumBlock, + block: AnyBlock, ) -> Result; - /// Find a block by its number, according to the Ethereum node. - /// - /// Careful: don't use this function without considering race conditions. - /// Chain reorgs could happen at any time, and could affect the answer received. - /// Generally, it is only safe to use this function with blocks that have received enough - /// confirmations to guarantee no further reorgs, **and** where the Ethereum node is aware of - /// those confirmations. - /// If the Ethereum node is far behind in processing blocks, even old blocks can be subject to - /// reorgs. - async fn block_hash_by_block_number( - &self, - logger: &Logger, - block_number: BlockNumber, - ) -> Result, Error>; - /// Finds the hash and number of the lowest non-null block with height greater than or equal to /// the given number. /// @@ -1155,7 +1150,7 @@ pub trait EthereumAdapter: Send + Sync + 'static { logger: &Logger, call: &ContractCall, cache: Arc, - ) -> Result<(Option>, call::Source), ContractCallError>; + ) -> Result<(Option>, call::Source), ContractCallError>; /// Make multiple contract calls in a single batch. The returned `Vec` /// has results in the same order as the calls in `calls` on input. The @@ -1165,22 +1160,22 @@ pub trait EthereumAdapter: Send + Sync + 'static { logger: &Logger, calls: &[&ContractCall], cache: Arc, - ) -> Result>, call::Source)>, ContractCallError>; + ) -> Result>, call::Source)>, ContractCallError>; async fn get_balance( &self, logger: &Logger, - address: H160, + address: Address, block_ptr: BlockPtr, - ) -> Result; + ) -> Result; // Returns the compiled bytecode of a smart contract async fn get_code( &self, logger: &Logger, - address: H160, + address: Address, block_ptr: BlockPtr, - ) -> Result; + ) -> Result; } #[cfg(test)] @@ -1194,9 +1189,7 @@ mod tests { use graph::blockchain::TriggerFilter as _; use graph::firehose::{CallToFilter, CombinedFilter, LogFilter, MultiLogFilter}; use graph::petgraph::graphmap::GraphMap; - use graph::prelude::ethabi::ethereum_types::H256; - use graph::prelude::web3::types::Address; - use graph::prelude::web3::types::Bytes; + use graph::prelude::alloy::primitives::{Address, Bytes, B256, U256}; use graph::prelude::EthereumCall; use hex::ToHex; use itertools::Itertools; @@ -1223,7 +1216,7 @@ mod tests { .map(|addr| { format!( "0x{}", - H256::from_str(addr) + B256::from_str(addr) .expect("unable to parse addr") .encode_hex::() ) @@ -1234,16 +1227,11 @@ mod tests { let sigs = event_sigs .iter() - .map(|addr| { - H256::from_str(addr) - .expect("unable to parse addr") - .to_fixed_bytes() - .to_vec() - }) + .map(|addr| B256::from_str(addr).expect("unable to parse addr").to_vec()) .collect_vec(); let filter = LogFilter { - addresses: vec![address.to_fixed_bytes().to_vec()], + addresses: vec![address.to_vec()], event_signatures: sigs, }; // This base64 was provided by Streamingfast as a binding example of the expected encoded for the @@ -1272,7 +1260,6 @@ mod tests { let filter = LogFilter { addresses: vec![Address::from_str(hex_addr) .expect("failed to parse address") - .to_fixed_bytes() .to_vec()], event_signatures: vec![fs.to_vec()], }; @@ -1287,8 +1274,7 @@ mod tests { #[test] fn ethereum_trigger_filter_to_firehose() { - let address = Address::from_low_u64_be; - let sig = H256::from_low_u64_le; + let sig = |value: u64| B256::from(U256::from(value)); let mut filter = TriggerFilter { log: EthereumLogFilter { contracts_and_events_graph: GraphMap::new(), @@ -1318,27 +1304,27 @@ mod tests { let expected_call_filters = vec![ CallToFilter { - addresses: vec![address(0).to_fixed_bytes().to_vec()], + addresses: vec![address(0).to_vec()], signatures: vec![[0u8; 4].to_vec()], }, CallToFilter { - addresses: vec![address(1).to_fixed_bytes().to_vec()], + addresses: vec![address(1).to_vec()], signatures: vec![[1u8; 4].to_vec()], }, CallToFilter { - addresses: vec![address(2).to_fixed_bytes().to_vec()], + addresses: vec![address(2).to_vec()], signatures: vec![], }, CallToFilter { - addresses: vec![address(1000).to_fixed_bytes().to_vec()], + addresses: vec![address(1000).to_vec()], signatures: vec![], }, CallToFilter { - addresses: vec![address(2000).to_fixed_bytes().to_vec()], + addresses: vec![address(2000).to_vec()], signatures: vec![], }, CallToFilter { - addresses: vec![address(3000).to_fixed_bytes().to_vec()], + addresses: vec![address(3000).to_vec()], signatures: vec![], }, ]; @@ -1361,15 +1347,12 @@ mod tests { let expected_log_filters = vec![ LogFilter { - addresses: vec![address(10).to_fixed_bytes().to_vec()], - event_signatures: vec![sig(101).to_fixed_bytes().to_vec()], + addresses: vec![address(10).to_vec()], + event_signatures: vec![sig(101).to_vec()], }, LogFilter { - addresses: vec![ - address(10).to_fixed_bytes().to_vec(), - address(20).to_fixed_bytes().to_vec(), - ], - event_signatures: vec![sig(100).to_fixed_bytes().to_vec()], + addresses: vec![address(10).to_vec(), address(20).to_vec()], + event_signatures: vec![sig(100).to_vec()], }, ]; @@ -1413,8 +1396,8 @@ mod tests { #[test] fn ethereum_trigger_filter_to_firehose_every_block_plus_logfilter() { - let address = Address::from_low_u64_be; - let sig = H256::from_low_u64_le; + let address = |value: u64| Address::left_padding_from(&value.to_le_bytes()); + let sig = |value: u64| B256::left_padding_from(&value.to_le_bytes()); let mut filter = TriggerFilter { log: EthereumLogFilter { contracts_and_events_graph: GraphMap::new(), @@ -1439,8 +1422,8 @@ mod tests { ); let expected_log_filters = vec![LogFilter { - addresses: vec![address(10).to_fixed_bytes().to_vec()], - event_signatures: vec![sig(101).to_fixed_bytes().to_vec()], + addresses: vec![address(10).to_vec()], + event_signatures: vec![sig(101).to_vec()], }]; let firehose_filter = filter.clone().to_firehose_filter(); @@ -1717,51 +1700,36 @@ mod tests { fn extending_ethereum_call_filter() { let mut base = EthereumCallFilter { contract_addresses_function_signatures: HashMap::from_iter(vec![ - ( - Address::from_low_u64_be(0), - (0, HashSet::from_iter(vec![[0u8; 4]])), - ), - ( - Address::from_low_u64_be(1), - (1, HashSet::from_iter(vec![[1u8; 4]])), - ), + (address(0), (0, HashSet::from_iter(vec![[0u8; 4]]))), + (address(1), (1, HashSet::from_iter(vec![[1u8; 4]]))), ]), wildcard_signatures: HashSet::new(), }; let extension = EthereumCallFilter { contract_addresses_function_signatures: HashMap::from_iter(vec![ - ( - Address::from_low_u64_be(0), - (2, HashSet::from_iter(vec![[2u8; 4]])), - ), - ( - Address::from_low_u64_be(3), - (3, HashSet::from_iter(vec![[3u8; 4]])), - ), + (address(0), (2, HashSet::from_iter(vec![[2u8; 4]]))), + (address(3), (3, HashSet::from_iter(vec![[3u8; 4]]))), ]), wildcard_signatures: HashSet::new(), }; base.extend(extension); assert_eq!( - base.contract_addresses_function_signatures - .get(&Address::from_low_u64_be(0)), + base.contract_addresses_function_signatures.get(&address(0)), Some(&(0, HashSet::from_iter(vec![[0u8; 4], [2u8; 4]]))) ); assert_eq!( - base.contract_addresses_function_signatures - .get(&Address::from_low_u64_be(3)), + base.contract_addresses_function_signatures.get(&address(3)), Some(&(3, HashSet::from_iter(vec![[3u8; 4]]))) ); assert_eq!( - base.contract_addresses_function_signatures - .get(&Address::from_low_u64_be(1)), + base.contract_addresses_function_signatures.get(&address(1)), Some(&(1, HashSet::from_iter(vec![[1u8; 4]]))) ); } - fn address(id: u64) -> Address { - Address::from_low_u64_be(id) + fn address(value: u64) -> Address { + Address::left_padding_from(&value.to_be_bytes()) } fn bytes(value: Vec) -> Bytes { @@ -1777,10 +1745,10 @@ fn complete_log_filter() { // Test a few combinations of complete graphs. for i in [1, 2] { - let events: BTreeSet<_> = (0..i).map(H256::from_low_u64_le).collect(); + let events: BTreeSet<_> = (0..i).map(|n| B256::from([n as u8; 32])).collect(); for j in [1, 1000, 2000, 3000] { - let contracts: BTreeSet<_> = (0..j).map(Address::from_low_u64_le).collect(); + let contracts: BTreeSet<_> = (0..j).map(|n| Address::from([n as u8; 20])).collect(); // Construct the complete bipartite graph with i events and j contracts. let mut contracts_and_events_graph = GraphMap::new(); @@ -1832,9 +1800,9 @@ fn complete_log_filter() { #[test] fn test_call_filter_first_signature_not_lost() { use crate::adapter::{EthereumCallFilter, FunctionSelector}; - use graph::prelude::web3::types::Address; + use alloy::primitives::Address; - let addr = Address::from_low_u64_be(1); + let addr = Address::left_padding_from(&1u64.to_be_bytes()); let sig1: FunctionSelector = [0xaa, 0xbb, 0xcc, 0xdd]; let sig2: FunctionSelector = [0x11, 0x22, 0x33, 0x44]; @@ -1853,16 +1821,19 @@ fn test_call_filter_first_signature_not_lost() { #[test] fn log_filter_require_transacion_receipt_method() { + let address = |value: u64| Address::left_padding_from(&value.to_be_bytes()); + let b256 = |value: u64| B256::left_padding_from(&value.to_be_bytes()); + // test data - let event_signature_a = H256::zero(); - let event_signature_b = H256::from_low_u64_be(1); - let event_signature_c = H256::from_low_u64_be(2); - let contract_a = Address::from_low_u64_be(3); - let contract_b = Address::from_low_u64_be(4); - let contract_c = Address::from_low_u64_be(5); - - let wildcard_event_with_receipt = H256::from_low_u64_be(6); - let wildcard_event_without_receipt = H256::from_low_u64_be(7); + let event_signature_a = b256(0); + let event_signature_b = b256(1); + let event_signature_c = b256(2); + let contract_a = address(3); + let contract_b = address(4); + let contract_c = address(5); + + let wildcard_event_with_receipt = b256(6); + let wildcard_event_without_receipt = b256(7); let wildcard_events = [ (wildcard_event_with_receipt, true), (wildcard_event_without_receipt, false), @@ -1872,8 +1843,8 @@ fn log_filter_require_transacion_receipt_method() { let events_with_topic_filters = HashMap::new(); // TODO(krishna): Test events with topic filters - let alien_event_signature = H256::from_low_u64_be(8); // those will not be inserted in the graph - let alien_contract_address = Address::from_low_u64_be(9); + let alien_event_signature = b256(8); // those will not be inserted in the graph + let alien_contract_address = address(9); // test graph nodes let event_a_node = LogFilterNode::Event(event_signature_a); @@ -1918,7 +1889,7 @@ fn log_filter_require_transacion_receipt_method() { events_with_topic_filters, }; - let empty_vec: Vec = vec![]; + let empty_vec: Vec = vec![]; // connected contracts and events graph assert!(filter.requires_transaction_receipt(&event_signature_a, Some(&contract_a), &empty_vec)); diff --git a/chain/ethereum/src/call_helper.rs b/chain/ethereum/src/call_helper.rs new file mode 100644 index 00000000000..5fc6ed80fd0 --- /dev/null +++ b/chain/ethereum/src/call_helper.rs @@ -0,0 +1,141 @@ +use crate::{ContractCallError, ENV_VARS}; +use graph::{ + abi, + data::store::ethereum::call, + prelude::{ + alloy::transports::{RpcError, TransportErrorKind}, + serde_json, Logger, + }, + slog::info, +}; + +// ------------------------------------------------------------------ +// Constants and helper utilities used across eth_call handling +// ------------------------------------------------------------------ + +// Try to check if the call was reverted. The JSON-RPC response for reverts is +// not standardized, so we have ad-hoc checks for each Ethereum client. + +// 0xfe is the "designated bad instruction" of the EVM, and Solidity uses it for +// asserts. +const PARITY_BAD_INSTRUCTION_FE: &str = "Bad instruction fe"; + +// 0xfd is REVERT, but on some contracts, and only on older blocks, +// this happens. Makes sense to consider it a revert as well. +const PARITY_BAD_INSTRUCTION_FD: &str = "Bad instruction fd"; + +const PARITY_BAD_JUMP_PREFIX: &str = "Bad jump"; +const PARITY_STACK_LIMIT_PREFIX: &str = "Out of stack"; + +// See f0af4ab0-6b7c-4b68-9141-5b79346a5f61. +const PARITY_OUT_OF_GAS: &str = "Out of gas"; + +// Also covers Nethermind reverts +const PARITY_VM_EXECUTION_ERROR: i64 = -32015; +const PARITY_REVERT_PREFIX: &str = "revert"; + +const XDAI_REVERT: &str = "revert"; + +// Deterministic Geth execution errors. We might need to expand this as +// subgraphs come across other errors. See +// https://github.com/ethereum/go-ethereum/blob/cd57d5cd38ef692de8fbedaa56598b4e9fbfbabc/core/vm/errors.go +const GETH_EXECUTION_ERRORS: &[&str] = &[ + // The "revert" substring covers a few known error messages, including: + // Hardhat: "error: transaction reverted", + // Ganache and Moonbeam: "vm exception while processing transaction: revert", + // Geth: "execution reverted" + // And others. + "revert", + "invalid jump destination", + "invalid opcode", + // Ethereum says 1024 is the stack sizes limit, so this is deterministic. + "stack limit reached 1024", + // See f0af4ab0-6b7c-4b68-9141-5b79346a5f61 for why the gas limit is considered deterministic. + "out of gas", + "stack underflow", +]; + +/// Helper that checks if a geth style RPC error message corresponds to a revert. +fn is_geth_revert_message(message: &str) -> bool { + let env_geth_call_errors = ENV_VARS.geth_eth_call_errors.iter(); + let mut execution_errors = GETH_EXECUTION_ERRORS + .iter() + .copied() + .chain(env_geth_call_errors.map(|s| s.as_str())); + execution_errors.any(|e| message.to_lowercase().contains(e)) +} + +/// Decode a Solidity revert(reason) payload, returning the reason string when possible. +fn as_solidity_revert_reason(bytes: &[u8]) -> Option { + let selector = &tiny_keccak::keccak256(b"Error(string)")[..4]; + if bytes.len() >= 4 && &bytes[..4] == selector { + abi::DynSolType::String + .abi_decode(&bytes[4..]) + .ok() + .and_then(|val| val.clone().as_str().map(ToOwned::to_owned)) + } else { + None + } +} + +/// Interpret the error returned by `eth_call`, distinguishing genuine failures from +/// EVM reverts. Returns `Ok(Null)` for reverts or a proper error otherwise. +pub fn interpret_eth_call_error( + logger: &Logger, + err: RpcError, +) -> Result { + fn reverted(logger: &Logger, reason: &str) -> Result { + info!(logger, "Contract call reverted"; "reason" => reason); + Ok(call::Retval::Null) + } + + if let RpcError::ErrorResp(rpc_error) = &err { + if is_geth_revert_message(&rpc_error.message) { + return reverted(logger, &rpc_error.message); + } + } + + if let RpcError::ErrorResp(rpc_error) = &err { + let code = rpc_error.code; + let data: Option = rpc_error + .data + .as_ref() + .and_then(|d| serde_json::from_str(d.get()).ok()); + + if code == PARITY_VM_EXECUTION_ERROR { + if let Some(data) = data { + if is_parity_revert(&data) { + return reverted(logger, &parity_revert_reason(&data)); + } + } + } + } + + Err(ContractCallError::AlloyError(err)) +} + +fn is_parity_revert(data: &str) -> bool { + data.to_lowercase().starts_with(PARITY_REVERT_PREFIX) + || data.starts_with(PARITY_BAD_JUMP_PREFIX) + || data.starts_with(PARITY_STACK_LIMIT_PREFIX) + || data == PARITY_BAD_INSTRUCTION_FE + || data == PARITY_BAD_INSTRUCTION_FD + || data == PARITY_OUT_OF_GAS + || data == XDAI_REVERT +} + +/// Checks if the given data corresponds to a Parity / Nethermind style EVM +/// revert and, if so, tries to extract a human-readable revert reason. Returns `Some` +/// with the reason when the error is identified as a revert, otherwise `None`. +fn parity_revert_reason(data: &str) -> String { + if data == PARITY_BAD_INSTRUCTION_FE { + return PARITY_BAD_INSTRUCTION_FE.to_owned(); + } + + // Otherwise try to decode a Solidity revert reason payload. + let payload = data.trim_start_matches(PARITY_REVERT_PREFIX); + hex::decode(payload) + .ok() + .and_then(|decoded| as_solidity_revert_reason(&decoded)) + .unwrap_or_else(|| "no reason".to_owned()) +} diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 3efbd0c8a2e..964f5ca4df8 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -10,7 +10,7 @@ use graph::blockchain::{ use graph::components::network_provider::ChainName; use graph::components::store::{DeploymentCursorTracker, SourceableStore}; use graph::data::subgraph::UnifiedMappingApiVersion; -use graph::firehose::{FirehoseEndpoint, ForkStep}; +use graph::firehose::{FirehoseEndpoint, FirehoseEndpoints, ForkStep}; use graph::futures03::TryStreamExt; use graph::prelude::{ retry, BlockHash, ComponentLoggerConfig, ElasticComponentLoggerConfig, EthereumBlock, @@ -341,6 +341,38 @@ impl std::fmt::Debug for Chain { } } +/// Walk back from a block pointer by following parent pointers. +/// This is the core logic used as a fallback when the cache doesn't have ancestor block. +/// +async fn walk_back_ancestor( + start_ptr: BlockPtr, + offset: BlockNumber, + root: Option, + mut parent_getter: F, +) -> Result, E> +where + F: FnMut(BlockPtr) -> Fut, + Fut: std::future::Future, E>>, +{ + let mut current_ptr = start_ptr; + + for _ in 0..offset { + match parent_getter(current_ptr.clone()).await? { + Some(parent) => { + if let Some(root_hash) = &root { + if parent.hash == *root_hash { + break; + } + } + current_ptr = parent; + } + None => return Ok(None), + } + } + + Ok(Some(current_ptr)) +} + impl Chain { /// Creates a new Ethereum [`Chain`]. pub fn new( @@ -646,16 +678,6 @@ impl BlockFinality { } } -impl<'a> From<&'a BlockFinality> for BlockPtr { - fn from(block: &'a BlockFinality) -> BlockPtr { - match block { - BlockFinality::Final(b) => BlockPtr::from(&**b), - BlockFinality::NonFinal(b) => BlockPtr::from(&b.ethereum_block), - BlockFinality::Ptr(b) => BlockPtr::new(b.hash.clone(), b.number), - } - } -} - impl Block for BlockFinality { fn ptr(&self) -> BlockPtr { match self { @@ -711,11 +733,11 @@ impl Block for BlockFinality { fn timestamp(&self) -> BlockTime { match self { BlockFinality::Final(block) => { - let ts = i64::try_from(block.timestamp.as_u64()).unwrap(); + let ts = i64::try_from(block.timestamp_u64()).unwrap(); BlockTime::since_epoch(ts, 0) } BlockFinality::NonFinal(block) => { - let ts = i64::try_from(block.ethereum_block.block.timestamp.as_u64()).unwrap(); + let ts = i64::try_from(block.ethereum_block.block.timestamp_u64()).unwrap(); BlockTime::since_epoch(ts, 0) } BlockFinality::Ptr(block) => block.timestamp, @@ -1026,26 +1048,108 @@ impl TriggersAdapterTrait for TriggersAdapter { } } + // Find an ancestor block at the specified offset from the given block pointer. + // Primarily used for reorg detection to verify if the indexed position remains + // on the main chain. + // + // Parameters: + // - ptr: Starting block pointer from which to walk backwards (typically the chain head) + // - offset: Number of blocks to traverse backwards (0 returns ptr, 1 returns parent, etc.) + // - root: Optional block hash that serves as a boundary for traversal. This is ESSENTIAL + // for chains with skipped blocks (e.g., Filecoin EVM) where block numbers are not + // consecutive. When provided, traversal stops upon reaching the child of root, + // ensuring correct ancestor relationships even with gaps in block numbers. + // + // The function attempts to use the database cache first for performance, + // with RPC fallback implemented to handle cases where the cache is unavailable. async fn ancestor_block( &self, ptr: BlockPtr, offset: BlockNumber, root: Option, ) -> Result, Error> { - let block: Option = self + let ptr_for_log = ptr.clone(); + let cached = self .chain_store .cheap_clone() - .ancestor_block(ptr, offset, root) - .await? - .map(|x| x.0) - .map(json::from_value) - .transpose()?; - Ok(block.map(|block| { - BlockFinality::NonFinal(EthereumBlockWithCalls { - ethereum_block: block, - calls: None, - }) - })) + .ancestor_block(ptr.clone(), offset, root.clone()) + .await?; + + // First check if we have the ancestor in cache and can deserialize it + let block_ptr = match cached { + Some((json, ptr)) => { + // Try to deserialize the cached block + match json::from_value::(json.clone()) { + Ok(block) => { + // Successfully cached and deserialized + return Ok(Some(BlockFinality::NonFinal(EthereumBlockWithCalls { + ethereum_block: block, + calls: None, + }))); + } + Err(e) => { + // Cache hit but deserialization failed + warn!( + self.logger, + "Failed to deserialize cached ancestor block #{} {} (offset {} from #{}): {}. \ + This may indicate stale cache data from a previous version. \ + Falling back to Firehose/RPC.", + ptr.number, + ptr.hash_hex(), + offset, + ptr_for_log.number, + e + ); + ptr + } + } + } + None => { + // Cache miss - fall back to walking the chain via parent_ptr() calls. + // This provides resilience when the block cache is empty (e.g., after truncation). + debug!( + self.logger, + "ancestor_block cache miss for {} at offset {}, walking back via parent_ptr", + ptr_for_log.hash_hex(), + offset + ); + + match walk_back_ancestor( + ptr.clone(), + offset, + root.clone(), + |block_ptr| async move { self.parent_ptr(&block_ptr).await }, + ) + .await? + { + Some(ptr) => ptr, + None => return Ok(None), + } + } + }; + + // Fetch the actual block data for the identified block pointer. + // This path is taken for both cache misses and deserialization failures. + match self.chain_client.as_ref() { + ChainClient::Firehose(endpoints) => { + let block = self + .fetch_block_with_firehose(endpoints, &block_ptr) + .await?; + let ethereum_block: EthereumBlockWithCalls = (&block).try_into()?; + Ok(Some(BlockFinality::NonFinal(ethereum_block))) + } + ChainClient::Rpc(adapters) => { + match self.fetch_full_block_with_rpc(adapters, &block_ptr).await? { + Some(ethereum_block) => { + Ok(Some(BlockFinality::NonFinal(EthereumBlockWithCalls { + ethereum_block, + calls: None, + }))) + } + None => Ok(None), + } + } + } } async fn parent_ptr(&self, block: &BlockPtr) -> Result, Error> { @@ -1056,60 +1160,114 @@ impl TriggersAdapterTrait for TriggersAdapter { let chain_store = self.chain_store.cheap_clone(); // First try to get the block from the store if let Ok(blocks) = chain_store.blocks(vec![block.hash.clone()]).await { - if let Some(block) = blocks.first() { - if let Ok(block) = json::from_value::(block.clone()) { - return Ok(block.parent_ptr()); + if let Some(cached_json) = blocks.first() { + match json::from_value::(cached_json.clone()) { + Ok(block) => { + return Ok(block.parent_ptr()); + } + Err(e) => { + warn!( + self.logger, + "Failed to deserialize cached block #{} {}: {}. \ + This may indicate stale cache data from a previous version. \ + Falling back to Firehose.", + block.number, + block.hash_hex(), + e + ); + } } } } // If not in store, fetch from Firehose - let endpoint = endpoints.endpoint().await?; - let logger = self.logger.clone(); - let retry_log_message = - format!("get_block_by_ptr for block {} with firehose", block); - let block = block.clone(); - - retry(retry_log_message, &logger) - .limit(ENV_VARS.request_retries) - .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) - .run(move || { - let endpoint = endpoint.cheap_clone(); - let logger = logger.cheap_clone(); - let block = block.clone(); - async move { - endpoint - .get_block_by_ptr::(&block, &logger) - .await - .context(format!( - "Failed to fetch block by ptr {} from firehose", - block - )) - } - }) + self.fetch_block_with_firehose(endpoints, block) .await? .parent_ptr() } - ChainClient::Rpc(adapters) => { - let blocks = adapters - .cheapest_with(&self.capabilities) - .await? - .load_blocks( - self.logger.cheap_clone(), - self.chain_store.cheap_clone(), - HashSet::from_iter(Some(block.hash_as_h256())), - ) - .await?; - assert_eq!(blocks.len(), 1); - - blocks[0].parent_ptr() - } + ChainClient::Rpc(adapters) => self + .fetch_light_block_with_rpc(adapters, block) + .await? + .expect("block must exist for parent_ptr") + .parent_ptr(), }; Ok(block) } } +impl TriggersAdapter { + async fn fetch_block_with_firehose( + &self, + endpoints: &FirehoseEndpoints, + block_ptr: &BlockPtr, + ) -> Result { + let endpoint = endpoints.endpoint().await?; + let logger = self.logger.clone(); + let retry_log_message = format!("fetch_block_with_firehose {}", block_ptr); + let block_ptr = block_ptr.clone(); + + let block = retry(retry_log_message, &logger) + .limit(ENV_VARS.request_retries) + .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) + .run(move || { + let endpoint = endpoint.cheap_clone(); + let logger = logger.cheap_clone(); + let block_ptr = block_ptr.clone(); + async move { + endpoint + .get_block_by_ptr::(&block_ptr, &logger) + .await + .context(format!("Failed to fetch block {} from firehose", block_ptr)) + } + }) + .await?; + + Ok(block) + } + + async fn fetch_light_block_with_rpc( + &self, + adapters: &EthereumNetworkAdapters, + block_ptr: &BlockPtr, + ) -> Result>, Error> { + let blocks = adapters + .cheapest_with(&self.capabilities) + .await? + .load_blocks( + self.logger.cheap_clone(), + self.chain_store.cheap_clone(), + HashSet::from_iter(Some(block_ptr.hash.as_b256())), + ) + .await?; + + Ok(blocks.into_iter().next()) + } + + async fn fetch_full_block_with_rpc( + &self, + adapters: &EthereumNetworkAdapters, + block_ptr: &BlockPtr, + ) -> Result, Error> { + let adapter = adapters.cheapest_with(&self.capabilities).await?; + + let block = adapter + .block_by_hash(&self.logger, block_ptr.hash.as_b256()) + .await?; + + match block { + Some(block) => { + let ethereum_block = adapter + .load_full_block(&self.logger, block) + .await + .map_err(|e| anyhow!("Failed to load full block: {}", e))?; + Ok(Some(ethereum_block)) + } + None => Ok(None), + } + } +} + pub struct FirehoseMapper { adapter: Arc>, filter: Arc, @@ -1374,4 +1532,118 @@ mod tests { assert!(missing.contains(&2)); assert!(missing.contains(&3)); } + + #[tokio::test] + async fn test_walk_back_ancestor() { + use std::collections::HashMap; + + let block_100_hash = BlockHash("block100".as_bytes().to_vec().into_boxed_slice()); + let block_101_hash = BlockHash("block101".as_bytes().to_vec().into_boxed_slice()); + let block_102_hash = BlockHash("block102".as_bytes().to_vec().into_boxed_slice()); + let block_103_hash = BlockHash("block103".as_bytes().to_vec().into_boxed_slice()); + let block_104_hash = BlockHash("block104".as_bytes().to_vec().into_boxed_slice()); + let block_105_hash = BlockHash("block105".as_bytes().to_vec().into_boxed_slice()); + + let block_105 = BlockPtr::new(block_105_hash.clone(), 105); + let block_104 = BlockPtr::new(block_104_hash.clone(), 104); + let block_103 = BlockPtr::new(block_103_hash.clone(), 103); + let block_102 = BlockPtr::new(block_102_hash.clone(), 102); + let block_101 = BlockPtr::new(block_101_hash.clone(), 101); + let block_100 = BlockPtr::new(block_100_hash.clone(), 100); + + let mut parent_map = HashMap::new(); + parent_map.insert(block_105_hash.clone(), block_104.clone()); + parent_map.insert(block_104_hash.clone(), block_103.clone()); + parent_map.insert(block_103_hash.clone(), block_102.clone()); + parent_map.insert(block_102_hash.clone(), block_101.clone()); + parent_map.insert(block_101_hash.clone(), block_100.clone()); + + let result = super::walk_back_ancestor(block_105.clone(), 2, None, |block_ptr| { + let parent = parent_map.get(&block_ptr.hash).cloned(); + async move { Ok::<_, std::convert::Infallible>(parent) } + }) + .await + .unwrap(); + assert_eq!(result, Some(block_103.clone())); + + let result = super::walk_back_ancestor( + block_105.clone(), + 10, + Some(block_102_hash.clone()), + |block_ptr| { + let parent = parent_map.get(&block_ptr.hash).cloned(); + async move { Ok::<_, std::convert::Infallible>(parent) } + }, + ) + .await + .unwrap(); + assert_eq!( + result, + Some(block_103.clone()), + "Should stop at child of root" + ); + } + + #[tokio::test] + async fn test_walk_back_ancestor_skipped_blocks_with_root() { + use std::collections::HashMap; + + let block_100_hash = BlockHash("block100".as_bytes().to_vec().into_boxed_slice()); + let block_101_hash = BlockHash("block101".as_bytes().to_vec().into_boxed_slice()); + let block_102_hash = BlockHash("block102".as_bytes().to_vec().into_boxed_slice()); + let block_110_hash = BlockHash("block110".as_bytes().to_vec().into_boxed_slice()); + let block_111_hash = BlockHash("block111".as_bytes().to_vec().into_boxed_slice()); + let block_112_hash = BlockHash("block112".as_bytes().to_vec().into_boxed_slice()); + let block_120_hash = BlockHash("block120".as_bytes().to_vec().into_boxed_slice()); + + let block_120 = BlockPtr::new(block_120_hash.clone(), 120); + let block_112 = BlockPtr::new(block_112_hash.clone(), 112); + let block_111 = BlockPtr::new(block_111_hash.clone(), 111); + let block_110 = BlockPtr::new(block_110_hash.clone(), 110); + let block_102 = BlockPtr::new(block_102_hash.clone(), 102); + let block_101 = BlockPtr::new(block_101_hash.clone(), 101); + let block_100 = BlockPtr::new(block_100_hash.clone(), 100); + + let mut parent_map = HashMap::new(); + parent_map.insert(block_120_hash.clone(), block_112.clone()); + parent_map.insert(block_112_hash.clone(), block_111.clone()); + parent_map.insert(block_111_hash.clone(), block_110.clone()); + parent_map.insert(block_110_hash.clone(), block_102.clone()); + parent_map.insert(block_102_hash.clone(), block_101.clone()); + parent_map.insert(block_101_hash.clone(), block_100.clone()); + + let result = super::walk_back_ancestor( + block_120.clone(), + 10, + Some(block_110_hash.clone()), + |block_ptr| { + let parent = parent_map.get(&block_ptr.hash).cloned(); + async move { Ok::<_, std::convert::Infallible>(parent) } + }, + ) + .await + .unwrap(); + assert_eq!( + result, + Some(block_111.clone()), + "root=110: should stop at 111 (child of root)" + ); + + let result = super::walk_back_ancestor( + block_120.clone(), + 10, + Some(block_101_hash.clone()), + |block_ptr| { + let parent = parent_map.get(&block_ptr.hash).cloned(); + async move { Ok::<_, std::convert::Infallible>(parent) } + }, + ) + .await + .unwrap(); + assert_eq!( + result, + Some(block_102.clone()), + "root=101: should stop at 102 (child of root, across skip)" + ); + } } diff --git a/chain/ethereum/src/codec.rs b/chain/ethereum/src/codec.rs index 935b294599b..0041335f659 100644 --- a/chain/ethereum/src/codec.rs +++ b/chain/ethereum/src/codec.rs @@ -7,9 +7,16 @@ use graph::{ blockchain::{ self, Block as BlockchainBlock, BlockPtr, BlockTime, ChainStoreBlock, ChainStoreData, }, + components::ethereum::{AnyBlock, AnyHeader, AnyRpcHeader, AnyRpcTransaction, AnyTxEnvelope}, prelude::{ - web3, - web3::types::{Bytes, H160, H2048, H256, H64, U256, U64}, + alloy::{ + self, + consensus::{ReceiptWithBloom, TxEnvelope, TxType}, + network::AnyReceiptEnvelope, + primitives::{aliases::B2048, Address, Bloom, Bytes, LogData, B256, U256}, + rpc::types::{self as alloy_rpc_types, AccessList, AccessListItem, Transaction}, + serde::WithOtherFields, + }, BlockNumber, Error, EthereumBlock, EthereumBlockWithCalls, EthereumCall, LightEthereumBlock, }, @@ -34,13 +41,13 @@ where } } -impl TryDecodeProto<[u8; 256], H2048> for &[u8] {} -impl TryDecodeProto<[u8; 32], H256> for &[u8] {} -impl TryDecodeProto<[u8; 20], H160> for &[u8] {} +impl TryDecodeProto<[u8; 32], B256> for &[u8] {} +impl TryDecodeProto<[u8; 256], B2048> for &[u8] {} +impl TryDecodeProto<[u8; 20], Address> for &[u8] {} -impl From<&BigInt> for web3::types::U256 { +impl From<&BigInt> for U256 { fn from(val: &BigInt) -> Self { - web3::types::U256::from_big_endian(&val.bytes) + U256::from_be_slice(&val.bytes) } } @@ -68,9 +75,9 @@ impl<'a> TryInto for CallAt<'a> { .value .as_ref() .map_or_else(|| U256::from(0), |v| v.into()), - gas_used: U256::from(self.call.gas_consumed), - input: Bytes(self.call.input.clone()), - output: Bytes(self.call.return_data.clone()), + gas_used: self.call.gas_consumed, + input: Bytes::from(self.call.input.clone()), + output: Bytes::from(self.call.return_data.clone()), block_hash: self.block.hash.try_decode_proto("call block hash")?, block_number: self.block.number as i32, transaction_hash: Some(self.trace.hash.try_decode_proto("call transaction hash")?), @@ -79,41 +86,6 @@ impl<'a> TryInto for CallAt<'a> { } } -impl TryInto for Call { - type Error = Error; - - fn try_into(self) -> Result { - Ok(web3::types::Call { - from: self.caller.try_decode_proto("call from address")?, - to: self.address.try_decode_proto("call to address")?, - value: self - .value - .as_ref() - .map_or_else(|| U256::from(0), |v| v.into()), - gas: U256::from(self.gas_limit), - input: Bytes::from(self.input.clone()), - call_type: CallType::try_from(self.call_type) - .map_err(|_| graph::anyhow::anyhow!("invalid call type: {}", self.call_type))? - .into(), - }) - } -} - -impl From for web3::types::CallType { - fn from(val: CallType) -> Self { - match val { - CallType::Unspecified => web3::types::CallType::None, - CallType::Call => web3::types::CallType::Call, - CallType::Callcode => web3::types::CallType::CallCode, - CallType::Delegate => web3::types::CallType::DelegateCall, - CallType::Static => web3::types::CallType::StaticCall, - - // FIXME (SF): Really not sure what this should map to, we are using None for now, need to revisit - CallType::Create => web3::types::CallType::None, - } - } -} - pub struct LogAt<'a> { log: &'a Log, block: &'a Block, @@ -126,46 +98,38 @@ impl<'a> LogAt<'a> { } } -impl<'a> TryInto for LogAt<'a> { +impl<'a> TryInto for LogAt<'a> { type Error = Error; - fn try_into(self) -> Result { - Ok(web3::types::Log { - address: self.log.address.try_decode_proto("log address")?, - topics: self - .log - .topics - .iter() - .map(|t| t.try_decode_proto("topic")) - .collect::, Error>>()?, - data: Bytes::from(self.log.data.clone()), + fn try_into(self) -> Result { + let topics = self + .log + .topics + .iter() + .map(|t| t.try_decode_proto("topic")) + .collect::, Error>>()?; + + Ok(alloy::rpc::types::Log { + inner: alloy::primitives::Log { + address: self.log.address.try_decode_proto("log address")?, + data: LogData::new(topics, self.log.data.clone().into()) + .ok_or_else(|| format_err!("invalid log data"))?, + }, block_hash: Some(self.block.hash.try_decode_proto("log block hash")?), - block_number: Some(U64::from(self.block.number)), + block_number: Some(self.block.number), transaction_hash: Some(self.trace.hash.try_decode_proto("log transaction hash")?), - transaction_index: Some(U64::from(self.trace.index as u64)), - log_index: Some(U256::from(self.log.block_index)), - transaction_log_index: Some(U256::from(self.log.index)), - log_type: None, - removed: None, + transaction_index: Some(self.trace.index as u64), + log_index: Some(self.log.block_index as u64), + removed: false, + block_timestamp: self + .block + .header + .as_ref() + .and_then(|h| h.timestamp.as_ref().map(|t| t.seconds as u64)), }) } } -impl TryFrom for Option { - type Error = Error; - - fn try_from(val: TransactionTraceStatus) -> Result { - match val { - TransactionTraceStatus::Unknown => Err(format_err!( - "Got a transaction trace with status UNKNOWN, datasource is broken" - )), - TransactionTraceStatus::Succeeded => Ok(Some(web3::types::U64::from(1))), - TransactionTraceStatus::Failed => Ok(Some(web3::types::U64::from(0))), - TransactionTraceStatus::Reverted => Ok(Some(web3::types::U64::from(0))), - } - } -} - pub struct TransactionTraceAt<'a> { trace: &'a TransactionTrace, block: &'a Block, @@ -177,34 +141,316 @@ impl<'a> TransactionTraceAt<'a> { } } -impl<'a> TryInto for TransactionTraceAt<'a> { +impl<'a> TryInto> for TransactionTraceAt<'a> { type Error = Error; - fn try_into(self) -> Result { - Ok(web3::types::Transaction { - hash: self.trace.hash.try_decode_proto("transaction hash")?, - nonce: U256::from(self.trace.nonce), - block_hash: Some(self.block.hash.try_decode_proto("transaction block hash")?), - block_number: Some(U64::from(self.block.number)), - transaction_index: Some(U64::from(self.trace.index as u64)), - from: Some( - self.trace - .from - .try_decode_proto("transaction from address")?, - ), - to: get_to_address(self.trace)?, - value: self.trace.value.as_ref().map_or(U256::zero(), |x| x.into()), - gas_price: self.trace.gas_price.as_ref().map(|x| x.into()), - gas: U256::from(self.trace.gas_limit), - input: Bytes::from(self.trace.input.clone()), - v: None, - r: None, - s: None, - raw: None, - access_list: None, - max_fee_per_gas: None, - max_priority_fee_per_gas: None, - transaction_type: None, + fn try_into(self) -> Result, Self::Error> { + use alloy::{ + consensus::transaction::Recovered, + consensus::{ + Signed, TxEip1559, TxEip2930, TxEip4844, TxEip4844Variant, TxEip7702, TxLegacy, + }, + network::{AnyTxEnvelope, AnyTxType, UnknownTxEnvelope, UnknownTypedTransaction}, + primitives::{Bytes, TxKind, U256}, + rpc::types::Transaction as AlloyTransaction, + serde::OtherFields, + }; + use std::collections::BTreeMap; + + // Extract data from trace and block + let block_hash = self.block.hash.try_decode_proto("transaction block hash")?; + let block_number = self.block.number; + let transaction_index = Some(self.trace.index as u64); + let from_address = self + .trace + .from + .try_decode_proto("transaction from address")?; + let to = get_to_address(self.trace)?; + let value = self.trace.value.as_ref().map_or(U256::ZERO, |x| x.into()); + let gas_price = self.trace.gas_price.as_ref().map_or(0u128, |x| { + let val: U256 = x.into(); + val.to::() + }); + let gas_limit = self.trace.gas_limit; + let input = Bytes::from(self.trace.input.clone()); + + let tx_type_u64 = u64::try_from(self.trace.r#type).map_err(|_| { + format_err!( + "Invalid transaction type value {} in transaction trace. Transaction type must be a valid u64.", + self.trace.r#type + ) + })?; + + // Try to convert to known Ethereum transaction type + let tx_type_result = TxType::try_from(tx_type_u64); + + // If this is an unknown transaction type, create an UnknownTxEnvelope + if tx_type_result.is_err() { + let mut fields_map = BTreeMap::new(); + + fields_map.insert( + "nonce".to_string(), + jsonrpc_core::serde_json::json!(format!("0x{:x}", self.trace.nonce)), + ); + fields_map.insert( + "from".to_string(), + jsonrpc_core::serde_json::json!(format!("{:?}", from_address)), + ); + if let Some(to_addr) = to { + fields_map.insert( + "to".to_string(), + jsonrpc_core::serde_json::json!(format!("{:?}", to_addr)), + ); + } + fields_map.insert( + "value".to_string(), + jsonrpc_core::serde_json::json!(format!("0x{:x}", value)), + ); + fields_map.insert( + "gas".to_string(), + jsonrpc_core::serde_json::json!(format!("0x{:x}", gas_limit)), + ); + fields_map.insert( + "gasPrice".to_string(), + jsonrpc_core::serde_json::json!(format!("0x{:x}", gas_price)), + ); + fields_map.insert( + "input".to_string(), + jsonrpc_core::serde_json::json!(format!("0x{}", hex::encode(&input))), + ); + + let fields = OtherFields::new(fields_map); + let unknown_tx = UnknownTypedTransaction { + ty: AnyTxType(tx_type_u64 as u8), + fields, + memo: Default::default(), + }; + + let tx_hash = self.trace.hash.try_decode_proto("transaction hash")?; + let unknown_envelope = UnknownTxEnvelope { + hash: tx_hash, + inner: unknown_tx, + }; + + let any_envelope = AnyTxEnvelope::Unknown(unknown_envelope); + let recovered = Recovered::new_unchecked(any_envelope, from_address); + + return Ok(AlloyTransaction { + inner: recovered, + block_hash: Some(block_hash), + block_number: Some(block_number), + transaction_index, + effective_gas_price: if gas_price > 0 { Some(gas_price) } else { None }, + }); + } + + let tx_type = tx_type_result.unwrap(); + let nonce = self.trace.nonce; + + // Extract EIP-1559 fee fields from trace + let max_fee_per_gas_u128 = self.trace.max_fee_per_gas.as_ref().map_or(gas_price, |x| { + let val: U256 = x.into(); + val.to::() + }); + + let max_priority_fee_per_gas_u128 = + self.trace + .max_priority_fee_per_gas + .as_ref() + .map_or(0u128, |x| { + let val: U256 = x.into(); + val.to::() + }); + + // Extract access list from trace + let access_list: AccessList = self + .trace + .access_list + .iter() + .map(|access_tuple| -> Result<_, Error> { + let address = access_tuple + .address + .try_decode_proto("access tuple address")?; + let storage_keys = access_tuple + .storage_keys + .iter() + .map(|key| key.try_decode_proto("storage key")) + .collect::, _>>()?; + Ok(AccessListItem { + address, + storage_keys, + }) + }) + .collect::, Error>>()? + .into(); + + // Extract actual signature components from trace + let signature = extract_signature_from_trace(self.trace, tx_type)?; + + let to_kind = match to { + Some(addr) => TxKind::Call(addr), + None => TxKind::Create, + }; + + let envelope = match tx_type { + TxType::Legacy => { + let tx = TxLegacy { + chain_id: None, + nonce, + gas_price, + gas_limit, + to: to_kind, + value, + input: input.clone(), + }; + let signed_tx = Signed::new_unchecked( + tx, + signature, + self.trace.hash.try_decode_proto("transaction hash")?, + ); + TxEnvelope::Legacy(signed_tx) + } + TxType::Eip2930 => { + let tx = TxEip2930 { + // Firehose protobuf doesn't provide chain_id for transactions. + // Using 0 as placeholder since the transaction has already been validated on-chain. + chain_id: 0, + nonce, + gas_price, + gas_limit, + to: to_kind, + value, + access_list: access_list.clone(), // Use actual access list from trace + input: input.clone(), + }; + let signed_tx = Signed::new_unchecked( + tx, + signature, + self.trace.hash.try_decode_proto("transaction hash")?, + ); + TxEnvelope::Eip2930(signed_tx) + } + TxType::Eip1559 => { + let tx = TxEip1559 { + // Firehose protobuf doesn't provide chain_id for transactions. + // Using 0 as placeholder since the transaction has already been validated on-chain. + chain_id: 0, + nonce, + gas_limit, + max_fee_per_gas: max_fee_per_gas_u128, + max_priority_fee_per_gas: max_priority_fee_per_gas_u128, + to: to_kind, + value, + access_list: access_list.clone(), // Use actual access list from trace + input: input.clone(), + }; + let signed_tx = Signed::new_unchecked( + tx, + signature, + self.trace.hash.try_decode_proto("transaction hash")?, + ); + TxEnvelope::Eip1559(signed_tx) + } + TxType::Eip4844 => { + let to_address = to.ok_or_else(|| { + format_err!("EIP-4844 transactions cannot be contract creation transactions. The 'to' field must contain a valid address.") + })?; + + let blob_versioned_hashes: Vec = self + .trace + .blob_hashes + .iter() + .map(|hash| hash.try_decode_proto("blob hash")) + .collect::, _>>()?; + + let max_fee_per_blob_gas_u128 = + self.trace.blob_gas_fee_cap.as_ref().map_or(0u128, |x| { + let val: U256 = x.into(); + val.to::() + }); + + let tx_eip4844 = TxEip4844 { + // Firehose protobuf doesn't provide chain_id for transactions. + // Using 0 as placeholder since the transaction has already been validated on-chain. + chain_id: 0, + nonce, + gas_limit, + max_fee_per_gas: max_fee_per_gas_u128, + max_priority_fee_per_gas: max_priority_fee_per_gas_u128, + to: to_address, + value, + access_list: access_list.clone(), // Use actual access list from trace + blob_versioned_hashes, + max_fee_per_blob_gas: max_fee_per_blob_gas_u128, + input: input.clone(), + }; + let tx = TxEip4844Variant::TxEip4844(tx_eip4844); + let signed_tx = Signed::new_unchecked( + tx, + signature, + self.trace.hash.try_decode_proto("transaction hash")?, + ); + TxEnvelope::Eip4844(signed_tx) + } + TxType::Eip7702 => { + let to_address = to.ok_or_else(|| { + format_err!("EIP-7702 transactions cannot be contract creation transactions. The 'to' field must contain a valid address.") + })?; + + // Convert set_code_authorizations to alloy authorization list + let authorization_list: Vec = self + .trace + .set_code_authorizations + .iter() + .map(|auth| -> Result<_, Error> { + let inner = alloy::eips::eip7702::Authorization { + chain_id: U256::from_be_slice(&auth.chain_id), + address: auth.address.try_decode_proto("authorization address")?, + nonce: auth.nonce, + }; + + let r = U256::from_be_slice(&auth.r); + let s = U256::from_be_slice(&auth.s); + let y_parity = auth.v as u8; + + Ok(alloy::eips::eip7702::SignedAuthorization::new_unchecked( + inner, y_parity, r, s, + )) + }) + .collect::, Error>>()?; + + let tx = TxEip7702 { + // Firehose protobuf doesn't provide chain_id for transactions. + // Using 0 as placeholder since the transaction has already been validated on-chain. + chain_id: 0, + nonce, + gas_limit, + max_fee_per_gas: max_fee_per_gas_u128, + max_priority_fee_per_gas: max_priority_fee_per_gas_u128, + to: to_address, + value, + access_list: access_list.clone(), // Use actual access list from trace + authorization_list, + input: input.clone(), + }; + let signed_tx = Signed::new_unchecked( + tx, + signature, + self.trace.hash.try_decode_proto("transaction hash")?, + ); + TxEnvelope::Eip7702(signed_tx) + } + }; + + let any_envelope = AnyTxEnvelope::Ethereum(envelope); + let recovered = Recovered::new_unchecked(any_envelope, from_address); + + Ok(AlloyTransaction { + inner: recovered, + block_hash: Some(block_hash), + block_number: Some(block_number), + transaction_index, + effective_gas_price: if gas_price > 0 { Some(gas_price) } else { None }, // gas_price already contains effective gas price per protobuf spec }) } } @@ -217,143 +463,136 @@ impl TryInto for &Block { } } +impl TryInto for &Block { + type Error = Error; + + fn try_into(self) -> Result { + let header = self.header(); + + let block_hash = self.hash.try_decode_proto("block hash")?; + let consensus_header = alloy::consensus::Header { + number: header.number, + beneficiary: header.coinbase.try_decode_proto("author / coinbase")?, + parent_hash: header.parent_hash.try_decode_proto("parent hash")?, + ommers_hash: header.uncle_hash.try_decode_proto("uncle hash")?, + state_root: header.state_root.try_decode_proto("state root")?, + transactions_root: header + .transactions_root + .try_decode_proto("transactions root")?, + receipts_root: header.receipt_root.try_decode_proto("receipt root")?, + gas_used: header.gas_used, + gas_limit: header.gas_limit, + base_fee_per_gas: header.base_fee_per_gas.as_ref().map(|v| { + let val: U256 = v.into(); + val.to::() + }), + extra_data: Bytes::from(header.extra_data.clone()), + logs_bloom: if header.logs_bloom.is_empty() { + Bloom::ZERO + } else { + Bloom::try_from(header.logs_bloom.as_slice())? + }, + timestamp: header.timestamp.as_ref().map_or(0, |v| v.seconds as u64), + difficulty: header + .difficulty + .as_ref() + .map_or_else(|| U256::ZERO, |v| v.into()), + + mix_hash: header.mix_hash.try_decode_proto("mix hash")?, + nonce: header.nonce.into(), + + withdrawals_root: if header.withdrawals_root.is_empty() { + None + } else { + Some( + header + .withdrawals_root + .try_decode_proto("withdrawals root")?, + ) + }, + blob_gas_used: header.blob_gas_used, + excess_blob_gas: header.excess_blob_gas, + parent_beacon_block_root: if header.parent_beacon_root.is_empty() { + None + } else { + Some( + header + .parent_beacon_root + .try_decode_proto("parent beacon root")?, + ) + }, + requests_hash: if header.requests_hash.is_empty() { + None + } else { + Some(header.requests_hash.try_decode_proto("requests hash")?) + }, + }; + + let rpc_header = alloy::rpc::types::Header { + hash: block_hash, + inner: consensus_header, + total_difficulty: { + #[allow(deprecated)] + let total_difficulty = &header.total_difficulty; + total_difficulty.as_ref().map(|v| v.into()) + }, + size: Some(U256::from(self.size)), + }; + + let transactions = self + .transaction_traces + .iter() + .map(|t| TransactionTraceAt::new(t, self).try_into()) + .collect::>, Error>>()?; + + let uncles = self + .uncles + .iter() + .map(|u| u.hash.try_decode_proto("uncle hash")) + .collect::, _>>()?; + + use alloy::rpc::types::Block; + + let any_header: AnyRpcHeader = rpc_header.map(AnyHeader::from); + + let any_transactions: Vec = transactions + .into_iter() + .map(|tx| AnyRpcTransaction::new(WithOtherFields::new(tx))) + .collect(); + + let any_block = Block { + header: any_header, + transactions: alloy::rpc::types::BlockTransactions::Full(any_transactions), + uncles, + withdrawals: None, + }; + + Ok(AnyBlock::new(WithOtherFields::new(any_block))) + } +} + impl TryInto for &Block { type Error = Error; fn try_into(self) -> Result { - let header = self.header.as_ref().ok_or_else(|| { - format_err!("block header should always be present from gRPC Firehose") - })?; - + let alloy_block: AnyBlock = self.try_into()?; + + let transaction_receipts = self + .transaction_traces + .iter() + .filter_map(|t| transaction_trace_to_alloy_txn_reciept(t, self).transpose()) + .collect::, Error>>()? + .into_iter() + // Transaction receipts will be shared along the code, so we put them into an + // Arc here to avoid excessive cloning. + .map(Arc::new) + .collect(); + + #[allow(unreachable_code)] let block = EthereumBlockWithCalls { ethereum_block: EthereumBlock { - block: Arc::new(LightEthereumBlock { - hash: Some(self.hash.try_decode_proto("block hash")?), - number: Some(U64::from(self.number)), - author: header.coinbase.try_decode_proto("author / coinbase")?, - parent_hash: header.parent_hash.try_decode_proto("parent hash")?, - uncles_hash: header.uncle_hash.try_decode_proto("uncle hash")?, - state_root: header.state_root.try_decode_proto("state root")?, - transactions_root: header - .transactions_root - .try_decode_proto("transactions root")?, - receipts_root: header.receipt_root.try_decode_proto("receipt root")?, - gas_used: U256::from(header.gas_used), - gas_limit: U256::from(header.gas_limit), - base_fee_per_gas: Some( - header - .base_fee_per_gas - .as_ref() - .map_or_else(U256::default, |v| v.into()), - ), - extra_data: Bytes::from(header.extra_data.clone()), - logs_bloom: match &header.logs_bloom.len() { - 0 => None, - _ => Some(header.logs_bloom.try_decode_proto("logs bloom")?), - }, - timestamp: header - .timestamp - .as_ref() - .map_or_else(U256::default, |v| U256::from(v.seconds)), - difficulty: header - .difficulty - .as_ref() - .map_or_else(U256::default, |v| v.into()), - total_difficulty: Some( - header - .total_difficulty - .as_ref() - .map_or_else(U256::default, |v| v.into()), - ), - // FIXME (SF): Firehose does not have seal fields, are they really used? Might be required for POA chains only also, I've seen that stuff on xDai (is this important?) - seal_fields: vec![], - uncles: self - .uncles - .iter() - .map(|u| u.hash.try_decode_proto("uncle hash")) - .collect::, _>>()?, - transactions: self - .transaction_traces - .iter() - .map(|t| TransactionTraceAt::new(t, self).try_into()) - .collect::, Error>>()?, - size: Some(U256::from(self.size)), - mix_hash: Some(header.mix_hash.try_decode_proto("mix hash")?), - nonce: Some(H64::from_low_u64_be(header.nonce)), - }), - transaction_receipts: self - .transaction_traces - .iter() - .filter_map(|t| { - t.receipt.as_ref().map(|r| { - Ok(web3::types::TransactionReceipt { - transaction_hash: t.hash.try_decode_proto("transaction hash")?, - transaction_index: U64::from(t.index), - block_hash: Some( - self.hash.try_decode_proto("transaction block hash")?, - ), - block_number: Some(U64::from(self.number)), - cumulative_gas_used: U256::from(r.cumulative_gas_used), - // FIXME (SF): What is the rule here about gas_used being None, when it's 0? - gas_used: Some(U256::from(t.gas_used)), - contract_address: { - match t.calls.len() { - 0 => None, - _ => { - match CallType::try_from(t.calls[0].call_type).map_err( - |_| { - graph::anyhow::anyhow!( - "invalid call type: {}", - t.calls[0].call_type, - ) - }, - )? { - CallType::Create => { - Some(t.calls[0].address.try_decode_proto( - "transaction contract address", - )?) - } - _ => None, - } - } - } - }, - logs: r - .logs - .iter() - .map(|l| LogAt::new(l, self, t).try_into()) - .collect::, Error>>()?, - status: TransactionTraceStatus::try_from(t.status) - .map_err(|_| { - graph::anyhow::anyhow!( - "invalid transaction trace status: {}", - t.status - ) - })? - .try_into()?, - root: match r.state_root.len() { - 0 => None, // FIXME (SF): should this instead map to [0;32]? - // FIXME (SF): if len < 32, what do we do? - _ => Some( - r.state_root.try_decode_proto("transaction state root")?, - ), - }, - logs_bloom: r - .logs_bloom - .try_decode_proto("transaction logs bloom")?, - from: t.from.try_decode_proto("transaction from")?, - to: get_to_address(t)?, - transaction_type: None, - effective_gas_price: None, - }) - }) - }) - .collect::, Error>>()? - .into_iter() - // Transaction receipts will be shared along the code, so we put them into an - // Arc here to avoid excessive cloning. - .map(Arc::new) - .collect(), + block: Arc::new(LightEthereumBlock::new(alloy_block)), + transaction_receipts, }, // Comment (437a9f17-67cc-478f-80a3-804fe554b227): This Some() will avoid calls in the triggers_in_block // TODO: Refactor in a way that this is no longer needed. @@ -376,12 +615,118 @@ impl TryInto for &Block { } } +fn transaction_trace_to_alloy_txn_reciept( + t: &TransactionTrace, + block: &Block, +) -> Result, Error> { + use alloy::consensus::{Eip658Value, Receipt}; + let r = t.receipt.as_ref(); + + if r.is_none() { + return Ok(None); + } + + let r = r.unwrap(); + + let contract_address = match t.calls.len() { + 0 => None, + _ => { + match CallType::try_from(t.calls[0].call_type).map_err(|_| { + graph::anyhow::anyhow!("invalid call type: {}", t.calls[0].call_type) + })? { + CallType::Create => Some( + t.calls[0] + .address + .try_decode_proto("transaction contract address")?, + ), + _ => None, + } + } + }; + + let state_root = match &r.state_root { + b if b.is_empty() => None, + _ => Some(r.state_root.try_decode_proto("transaction state root")?), + }; + + let status = match TransactionTraceStatus::try_from(t.status) + .map_err(|_| format_err!("invalid transaction trace status: {}", t.status))? + { + TransactionTraceStatus::Unknown => { + return Err(format_err!( + "Transaction trace has UNKNOWN status; datasource is broken" + )) + } + TransactionTraceStatus::Succeeded => true, + TransactionTraceStatus::Failed | TransactionTraceStatus::Reverted => false, + }; + + // [EIP-658]: https://eips.ethereum.org/EIPS/eip-658 + // Before EIP-658, the state root field was used to indicate the status of the transaction. + // After EIP-658, the status field is used to indicate the status of the transaction. + let status = match state_root { + Some(root) => Eip658Value::PostState(root), + None => Eip658Value::Eip658(status), + }; + + let logs: Vec = r + .logs + .iter() + .map(|l| LogAt::new(l, block, t).try_into()) + .collect::, Error>>()?; + + let core_receipt = Receipt { + status, + cumulative_gas_used: r.cumulative_gas_used, + logs, + }; + + let logs_bloom = Bloom::try_from(r.logs_bloom.as_slice())?; + + let receipt_with_bloom = ReceiptWithBloom::new(core_receipt, logs_bloom); + + let tx_type_u64 = u64::try_from(t.r#type).map_err(|_| { + format_err!( + "Invalid transaction type value {} in transaction receipt. Transaction type must be a valid u64.", + t.r#type + ) + })?; + + let any_envelope = AnyReceiptEnvelope { + inner: receipt_with_bloom, + r#type: tx_type_u64 as u8, + }; + + let receipt = alloy_rpc_types::TransactionReceipt { + transaction_hash: t.hash.try_decode_proto("transaction hash")?, + transaction_index: Some(t.index as u64), + block_hash: Some(block.hash.try_decode_proto("transaction block hash")?), + block_number: Some(block.number), + gas_used: t.gas_used, + contract_address, + from: t.from.try_decode_proto("transaction from")?, + to: get_to_address(t)?, + effective_gas_price: t.gas_price.as_ref().map_or(0u128, |x| { + let val: U256 = x.into(); + val.to::() + }), // gas_price already contains effective gas price per protobuf spec + blob_gas_used: r.blob_gas_used, + blob_gas_price: r.blob_gas_price.as_ref().map(|x| { + let val: U256 = x.into(); + val.to::() + }), + inner: any_envelope, + }; + + Ok(Some(WithOtherFields::new(receipt))) +} + impl BlockHeader { pub fn parent_ptr(&self) -> Option { match self.parent_hash.len() { 0 => None, _ => Some(BlockPtr::from(( - H256::from_slice(self.parent_hash.as_ref()), + B256::from_slice(self.parent_hash.as_ref()), self.number - 1, ))), } @@ -390,13 +735,13 @@ impl BlockHeader { impl<'a> From<&'a BlockHeader> for BlockPtr { fn from(b: &'a BlockHeader) -> BlockPtr { - BlockPtr::from((H256::from_slice(b.hash.as_ref()), b.number)) + BlockPtr::from((B256::from_slice(b.hash.as_ref()), b.number)) } } impl<'a> From<&'a Block> for BlockPtr { fn from(b: &'a Block) -> BlockPtr { - BlockPtr::from((H256::from_slice(b.hash.as_ref()), b.number)) + BlockPtr::from((B256::from_slice(b.hash.as_ref()), b.number)) } } @@ -499,21 +844,6 @@ impl BlockchainBlock for HeaderOnlyBlock { } } -fn get_to_address(trace: &TransactionTrace) -> Result, Error> { - // Try to detect contract creation transactions, which have no 'to' address - let is_contract_creation = trace.to.is_empty() - || trace - .calls - .first() - .is_some_and(|call| CallType::try_from(call.call_type) == Ok(CallType::Create)); - - if is_contract_creation { - Ok(None) - } else { - Ok(Some(trace.to.try_decode_proto("transaction to address")?)) - } -} - #[cfg(test)] mod test { use graph::{blockchain::Block as _, prelude::chrono::Utc}; @@ -548,4 +878,130 @@ mod test { format!(r#"{{"block":{{"data":null,"timestamp":"{}"}}}}"#, now) ); } + + #[test] + fn test_unknown_transaction_type_conversion() { + use super::TransactionTraceAt; + use crate::codec::TransactionTrace; + use graph::prelude::alloy::network::AnyTxEnvelope; + use graph::prelude::alloy::primitives::B256; + + let mut block = Block::default(); + let mut header = BlockHeader::default(); + header.number = 123456; + header.timestamp = Some(Timestamp { + seconds: 1234567890, + nanos: 0, + }); + block.header = Some(header); + block.number = 123456; + block.hash = vec![0u8; 32]; + + let mut trace = TransactionTrace::default(); + trace.r#type = 126; // 0x7e Optimism deposit transaction + trace.hash = vec![1u8; 32]; + trace.from = vec![2u8; 20]; + trace.to = vec![3u8; 20]; + trace.nonce = 42; + trace.gas_limit = 21000; + trace.index = 0; + + let trace_at = TransactionTraceAt::new(&trace, &block); + let result: Result< + graph::prelude::alloy::rpc::types::Transaction, + graph::prelude::Error, + > = trace_at.try_into(); + + assert!( + result.is_ok(), + "Should successfully convert unknown transaction type" + ); + + let tx = result.unwrap(); + + match tx.inner.inner() { + AnyTxEnvelope::Unknown(unknown_envelope) => { + assert_eq!(unknown_envelope.inner.ty.0, 126); + assert_eq!(unknown_envelope.hash, B256::from_slice(&trace.hash)); + assert!( + !unknown_envelope.inner.fields.is_empty(), + "OtherFields should contain transaction data" + ); + } + _ => panic!("Expected AnyTxEnvelope::Unknown, got Ethereum variant"), + } + + assert_eq!(tx.block_number, Some(123456)); + assert_eq!(tx.transaction_index, Some(0)); + assert_eq!(tx.block_hash, Some(B256::from_slice(&block.hash))); + } + + #[test] + fn test_unknown_receipt_type_conversion() { + use super::transaction_trace_to_alloy_txn_reciept; + use crate::codec::TransactionTrace; + + let mut block = Block::default(); + let mut header = BlockHeader::default(); + header.number = 123456; + block.header = Some(header); + block.hash = vec![0u8; 32]; + + let mut trace = TransactionTrace::default(); + trace.r#type = 126; // 0x7e Optimism deposit transaction + trace.hash = vec![1u8; 32]; + trace.from = vec![2u8; 20]; + trace.to = vec![3u8; 20]; + trace.index = 0; + trace.gas_used = 21000; + trace.status = 1; + + let mut receipt = super::TransactionReceipt::default(); + receipt.cumulative_gas_used = 21000; + receipt.logs_bloom = vec![0u8; 256]; + trace.receipt = Some(receipt); + + let result = transaction_trace_to_alloy_txn_reciept(&trace, &block); + + assert!( + result.is_ok(), + "Should successfully convert receipt with unknown transaction type" + ); + + let receipt_opt = result.unwrap(); + assert!(receipt_opt.is_some(), "Receipt should be present"); + + let receipt = receipt_opt.unwrap(); + + assert_eq!(receipt.inner.inner.r#type, 126); + assert_eq!(receipt.gas_used, 21000); + assert_eq!(receipt.transaction_index, Some(0)); + } +} + +fn extract_signature_from_trace( + _trace: &TransactionTrace, + _tx_type: TxType, +) -> Result { + use alloy::primitives::{Signature as PrimitiveSignature, U256}; + + // Create a dummy signature with r = 0, s = 0 and even y-parity (false) + let dummy = PrimitiveSignature::new(U256::ZERO, U256::ZERO, false); + + Ok(dummy) +} + +fn get_to_address(trace: &TransactionTrace) -> Result, Error> { + // Try to detect contract creation transactions, which have no 'to' address + let is_contract_creation = trace.to.is_empty() + || trace + .calls + .first() + .is_some_and(|call| CallType::try_from(call.call_type) == Ok(CallType::Create)); + + if is_contract_creation { + Ok(None) + } else { + Ok(Some(trace.to.try_decode_proto("transaction to address")?)) + } } diff --git a/chain/ethereum/src/data_source.rs b/chain/ethereum/src/data_source.rs index 5be627baf25..cd945d00c62 100644 --- a/chain/ethereum/src/data_source.rs +++ b/chain/ethereum/src/data_source.rs @@ -1,7 +1,11 @@ use anyhow::{anyhow, Error}; use anyhow::{ensure, Context}; use async_trait::async_trait; +use graph::abi; +use graph::abi::EventExt; +use graph::abi::FunctionExt; use graph::blockchain::{BlockPtr, TriggerWithHandler}; +use graph::components::ethereum::AnyTransaction; use graph::components::link_resolver::LinkResolverContext; use graph::components::metrics::subgraph::SubgraphInstanceMetrics; use graph::components::store::{EthereumCallCache, StoredDynamicDataSource}; @@ -17,9 +21,13 @@ use graph::env::ENV_VARS; use graph::futures03::future::try_join; use graph::futures03::stream::FuturesOrdered; use graph::futures03::TryStreamExt; -use graph::prelude::ethabi::ethereum_types::H160; -use graph::prelude::ethabi::StateMutability; -use graph::prelude::{Link, SubgraphManifestValidationError}; +use graph::prelude::alloy::{ + consensus::{TxEnvelope, TxLegacy}, + network::TransactionResponse, + primitives::{Address, B256, U256}, + rpc::types::Log, +}; +use graph::prelude::{alloy, Link, SubgraphManifestValidationError}; use graph::slog::{debug, error, o, trace}; use itertools::Itertools; use serde::de::Error as ErrorD; @@ -34,11 +42,8 @@ use tiny_keccak::{keccak256, Keccak}; use graph::{ blockchain::{self, Blockchain}, prelude::{ - ethabi::{Address, Event, Function, LogParam, ParamType, RawLog}, - serde_json, warn, - web3::types::{Log, Transaction, H256}, - BlockNumber, CheapClone, EthereumCall, LightEthereumBlock, LightEthereumBlockExt, - LinkResolver, Logger, + serde_json, warn, BlockNumber, CheapClone, EthereumCall, LightEthereumBlock, + LightEthereumBlockExt, LinkResolver, Logger, }, }; @@ -134,7 +139,7 @@ impl blockchain::DataSource for DataSource { } fn address(&self) -> Option<&[u8]> { - self.address.as_ref().map(|x| x.as_bytes()) + self.address.as_ref().map(|x| x.as_slice()) } fn has_declared_calls(&self) -> bool { @@ -238,7 +243,7 @@ impl blockchain::DataSource for DataSource { } fn as_stored_dynamic_data_source(&self) -> StoredDynamicDataSource { - let param = self.address.map(|addr| addr.0.into()); + let param = self.address.map(|addr| addr.as_slice().into()); StoredDynamicDataSource { manifest_idx: self.manifest_idx, param, @@ -277,7 +282,7 @@ impl blockchain::DataSource for DataSource { let contract_abi = template.mapping.find_abi(&template.source.abi)?; - let address = param.map(|x| H160::from_slice(&x)); + let address = param.map(|x| Address::from_slice(&x)); Ok(DataSource { kind: template.kind.to_string(), network: template.network.as_ref().map(|s| s.to_string()), @@ -432,6 +437,45 @@ impl blockchain::DataSource for DataSource { } } +/// Generic function that creates a mock legacy Transaction from ANY log +fn create_dummy_transaction( + block_number: u64, + block_hash: B256, + transaction_index: Option, + transaction_hash: Option, +) -> Result { + use alloy::serde::WithOtherFields; + use graph::components::ethereum::AnyTxEnvelope; + use graph::prelude::alloy::{ + consensus::transaction::Recovered, consensus::Signed, primitives::Signature, + rpc::types::Transaction, + }; + + let tx = TxLegacy::default(); + + // Create a dummy signature + let signature = Signature::new(U256::ZERO, U256::ZERO, false); + + let tx_hash = transaction_hash.ok_or(anyhow!("Log has no transaction hash"))?; + let signed_tx = Signed::new_unchecked(tx, signature, tx_hash); + let eth_envelope = TxEnvelope::Legacy(signed_tx); + + // Wrap in AnyTxEnvelope + let any_envelope = AnyTxEnvelope::Ethereum(eth_envelope); + + let recovered = Recovered::new_unchecked(any_envelope, Address::ZERO); + + let inner_tx = Transaction { + inner: recovered, + block_hash: Some(block_hash), + block_number: Some(block_number), + transaction_index, + effective_gas_price: None, + }; + + Ok(AnyTransaction::new(WithOtherFields::new(inner_tx))) +} + impl DataSource { fn from_manifest( kind: String, @@ -463,7 +507,7 @@ impl DataSource { }) } - fn handlers_for_log(&self, log: &Log) -> Vec { + fn handlers_for_log(&self, log: &alloy::rpc::types::Log) -> Vec { self.mapping .event_handlers .iter() @@ -527,28 +571,28 @@ impl DataSource { } } - /// Returns the contract event with the given signature, if it exists. A an event from the ABI + /// Returns the contract event with the given signature, if it exists. An event from the ABI /// will be matched if: /// 1. An event signature is equal to `signature`. /// 2. There are no equal matches, but there is exactly one event that equals `signature` if all /// `indexed` modifiers are removed from the parameters. - fn contract_event_with_signature(&self, signature: &str) -> Option<&Event> { + fn contract_event_with_signature(&self, signature: &str) -> Option<&abi::Event> { // Returns an `Event(uint256,address)` signature for an event, without `indexed` hints. - fn ambiguous_event_signature(event: &Event) -> String { + fn ambiguous_event_signature(event: &abi::Event) -> String { format!( "{}({})", event.name, event .inputs .iter() - .map(|input| event_param_type_signature(&input.kind)) + .map(|input| input.selector_type().into_owned()) .collect::>() .join(",") ) } // Returns an `Event(indexed uint256,address)` type signature for an event. - fn event_signature(event: &Event) -> String { + fn event_signature(event: &abi::Event) -> String { format!( "{}({})", event.name, @@ -558,40 +602,13 @@ impl DataSource { .map(|input| format!( "{}{}", if input.indexed { "indexed " } else { "" }, - event_param_type_signature(&input.kind) + input.selector_type() )) .collect::>() .join(",") ) } - // Returns the signature of an event parameter type (e.g. `uint256`). - fn event_param_type_signature(kind: &ParamType) -> String { - use ParamType::*; - - match kind { - Address => "address".into(), - Bytes => "bytes".into(), - Int(size) => format!("int{}", size), - Uint(size) => format!("uint{}", size), - Bool => "bool".into(), - String => "string".into(), - Array(inner) => format!("{}[]", event_param_type_signature(inner)), - FixedBytes(size) => format!("bytes{}", size), - FixedArray(inner, size) => { - format!("{}[{}]", event_param_type_signature(inner), size) - } - Tuple(components) => format!( - "({})", - components - .iter() - .map(event_param_type_signature) - .collect::>() - .join(",") - ), - } - } - self.contract_abi .contract .events() @@ -630,7 +647,9 @@ impl DataSource { }) } - fn contract_function_with_signature(&self, target_signature: &str) -> Option<&Function> { + fn contract_function_with_signature(&self, target_signature: &str) -> Option<&abi::Function> { + use abi::StateMutability; + self.contract_abi .contract .functions() @@ -644,7 +663,7 @@ impl DataSource { let mut arguments = function .inputs .iter() - .map(|input| format!("{}", input.kind)) + .map(|input| input.selector_type().into_owned()) .collect::>() .join(","); // `address,uint256,bool) @@ -734,11 +753,7 @@ impl DataSource { .into_iter() .filter_map(|(event_handler, event_abi)| { event_abi - .parse_log(RawLog { - topics: log.topics.clone(), - data: log.data.clone().0, - }) - .map(|log| log.params) + .decode_log(&log) .map_err(|e| { trace!( logger, @@ -777,17 +792,15 @@ impl DataSource { // See also ca0edc58-0ec5-4c89-a7dd-2241797f5e50. // There is another special case in zkSync-era, where the transaction hash in this case would be zero // See https://docs.zksync.io/zk-stack/concepts/blocks.html#fictive-l2-block-finalizing-the-batch - let transaction = if log.transaction_hash == block.hash - || log.transaction_hash == Some(H256::zero()) + let transaction = if log.transaction_hash == Some(block.hash()) + || log.transaction_hash == Some(B256::ZERO) { - Transaction { - hash: log.transaction_hash.unwrap(), - block_hash: block.hash, - block_number: block.number, - transaction_index: log.transaction_index, - from: Some(H160::zero()), - ..Transaction::default() - } + create_dummy_transaction( + block.number_u64(), + block.hash(), + log.transaction_index, + log.transaction_hash, + )? } else { // This is the general case where the log's transaction hash does not match the block's hash // and is not a special zero hash, implying a real transaction associated with this log. @@ -798,8 +811,8 @@ impl DataSource { let logging_extras = Arc::new(o! { "signature" => event_handler.event.to_string(), - "address" => format!("{}", &log.address), - "transaction" => format!("{}", &transaction.hash), + "address" => format!("{}", &log.address()), + "transaction" => format!("{}", &transaction.inner.tx_hash()), }); let handler = event_handler.handler.clone(); let calls = DeclaredCall::from_log_trigger_with_event( @@ -843,20 +856,15 @@ impl DataSource { ) })?; - // Parse the inputs - // - // Take the input for the call, chop off the first 4 bytes, then call - // `function.decode_input` to get a vector of `Token`s. Match the `Token`s - // with the `Param`s in `function.inputs` to create a `Vec`. - let tokens = match function_abi.decode_input(&call.input.0[4..]).with_context( - || { + let values = match function_abi + .abi_decode_input(&call.input.0[4..]) + .with_context(|| { format!( "Generating function inputs for the call {:?} failed, raw input: {}", &function_abi, hex::encode(&call.input.0) ) - }, - ) { + }) { Ok(val) => val, // See also 280b0108-a96e-4738-bb37-60ce11eeb5bf Err(err) => { @@ -866,27 +874,22 @@ impl DataSource { }; ensure!( - tokens.len() == function_abi.inputs.len(), + values.len() == function_abi.inputs.len(), "Number of arguments in call does not match \ number of inputs in function signature." ); - let inputs = tokens + let inputs = values .into_iter() .enumerate() - .map(|(i, token)| LogParam { + .map(|(i, value)| abi::DynSolParam { name: function_abi.inputs[i].name.clone(), - value: token, + value, }) .collect::>(); - // Parse the outputs - // - // Take the output for the call, then call `function.decode_output` to - // get a vector of `Token`s. Match the `Token`s with the `Param`s in - // `function.outputs` to create a `Vec`. - let tokens = function_abi - .decode_output(&call.output.0) + let values = function_abi + .abi_decode_output(&call.output.0) .with_context(|| { format!( "Decoding function outputs for the call {:?} failed, raw output: {}", @@ -896,17 +899,17 @@ impl DataSource { })?; ensure!( - tokens.len() == function_abi.outputs.len(), + values.len() == function_abi.outputs.len(), "Number of parameters in the call output does not match \ number of outputs in the function signature." ); - let outputs = tokens + let outputs = values .into_iter() .enumerate() - .map(|(i, token)| LogParam { + .map(|(i, value)| abi::DynSolParam { name: function_abi.outputs[i].name.clone(), - value: token, + value, }) .collect::>(); @@ -918,7 +921,7 @@ impl DataSource { let logging_extras = Arc::new(o! { "function" => handler.function.to_string(), "to" => format!("{}", &call.to), - "transaction" => format!("{}", &transaction.hash), + "transaction" => format!("{}", &transaction.inner.tx_hash()), }); Ok(Some(TriggerWithHandler::::new_with_logging_extras( MappingTrigger::Call { @@ -1478,13 +1481,13 @@ pub struct MappingCallHandler { #[derive(Clone, Debug, Eq, PartialEq, Deserialize)] pub struct UnresolvedMappingEventHandler { pub event: String, - pub topic0: Option, - #[serde(deserialize_with = "deserialize_h256_vec", default)] - pub topic1: Option>, - #[serde(deserialize_with = "deserialize_h256_vec", default)] - pub topic2: Option>, - #[serde(deserialize_with = "deserialize_h256_vec", default)] - pub topic3: Option>, + pub topic0: Option, + #[serde(deserialize_with = "deserialize_b256_vec", default)] + pub topic1: Option>, + #[serde(deserialize_with = "deserialize_b256_vec", default)] + pub topic2: Option>, + #[serde(deserialize_with = "deserialize_b256_vec", default)] + pub topic3: Option>, pub handler: String, #[serde(default)] pub receipt: bool, @@ -1518,17 +1521,17 @@ impl UnresolvedMappingEventHandler { #[derive(Clone, Debug, Hash, Eq, PartialEq)] pub struct MappingEventHandler { pub event: String, - pub topic0: Option, - pub topic1: Option>, - pub topic2: Option>, - pub topic3: Option>, + pub topic0: Option, + pub topic1: Option>, + pub topic2: Option>, + pub topic3: Option>, pub handler: String, pub receipt: bool, pub calls: CallDecls, } -// Custom deserializer for H256 fields that removes the '0x' prefix before parsing -fn deserialize_h256_vec<'de, D>(deserializer: D) -> Result>, D::Error> +// Custom deserializer for B256 fields that removes the '0x' prefix before parsing +fn deserialize_b256_vec<'de, D>(deserializer: D) -> Result>, D::Error> where D: Deserializer<'de>, { @@ -1536,40 +1539,40 @@ where match s { Some(vec) => { - let mut h256_vec = Vec::new(); + let mut b256_vec = Vec::new(); for hex_str in vec { // Remove '0x' prefix if present let clean_hex_str = hex_str.trim_start_matches("0x"); // Ensure the hex string is 64 characters long, after removing '0x' let padded_hex_str = format!("{:0>64}", clean_hex_str); // Parse the padded string into H256, handling potential errors - h256_vec.push( - H256::from_str(&padded_hex_str) - .map_err(|e| D::Error::custom(format!("Failed to parse H256: {}", e)))?, + b256_vec.push( + B256::from_str(&padded_hex_str) + .map_err(|e| D::Error::custom(format!("Failed to parse B256: {}", e)))?, ); } - Ok(Some(h256_vec)) + Ok(Some(b256_vec)) } None => Ok(None), } } impl MappingEventHandler { - pub fn topic0(&self) -> H256 { + pub fn topic0(&self) -> B256 { self.topic0 - .unwrap_or_else(|| string_to_h256(&self.event.replace("indexed ", ""))) + .unwrap_or_else(|| string_to_b256(&self.event.replace("indexed ", ""))) } pub fn matches(&self, log: &Log) -> bool { - let matches_topic = |index: usize, topic_opt: &Option>| -> bool { + let matches_topic = |index: usize, topic_opt: &Option>| -> bool { topic_opt.as_ref().is_none_or(|topic_vec| { - log.topics + log.topics() .get(index) .is_some_and(|log_topic| topic_vec.contains(log_topic)) }) }; - if let Some(topic0) = log.topics.first() { + if let Some(topic0) = log.topics().first() { return self.topic0() == *topic0 && matches_topic(1, &self.topic1) && matches_topic(2, &self.topic2) @@ -1587,18 +1590,15 @@ impl MappingEventHandler { } } -/// Hashes a string to a H256 hash. -fn string_to_h256(s: &str) -> H256 { +/// Hashes a string to a B256 hash. +fn string_to_b256(s: &str) -> B256 { let mut result = [0u8; 32]; let data = s.replace(' ', "").into_bytes(); let mut sponge = Keccak::new_keccak256(); sponge.update(&data); sponge.finalize(&mut result); - // This was deprecated but the replacement seems to not be available in the - // version web3 uses. - #[allow(deprecated)] - H256::from_slice(&result) + B256::from_slice(&result) } #[derive(Clone, Debug, Default, Hash, Eq, PartialEq, Deserialize)] diff --git a/chain/ethereum/src/ethereum_adapter.rs b/chain/ethereum/src/ethereum_adapter.rs index b1ee277c0d3..49d5fc0d01e 100644 --- a/chain/ethereum/src/ethereum_adapter.rs +++ b/chain/ethereum/src/ethereum_adapter.rs @@ -1,13 +1,13 @@ use async_trait::async_trait; use futures03::{future::BoxFuture, stream::FuturesUnordered}; -use tokio::sync::RwLock; -use tokio::time::timeout; - +use graph::abi; +use graph::abi::DynSolValueExt; +use graph::abi::FunctionExt; use graph::blockchain::client::ChainClient; use graph::blockchain::BlockHash; use graph::blockchain::ChainIdentifier; use graph::blockchain::ExtendedBlockPtr; - +use graph::components::ethereum::*; use graph::components::transaction_receipt::LightTransactionReceipt; use graph::data::store::ethereum::call; use graph::data::store::scalar; @@ -21,33 +21,35 @@ use graph::futures03::future::try_join_all; use graph::futures03::{ self, compat::Future01CompatExt, FutureExt, StreamExt, TryFutureExt, TryStreamExt, }; -use graph::prelude::ethabi::ParamType; -use graph::prelude::ethabi::Token; -use graph::prelude::tokio::try_join; -use graph::prelude::web3::types::U256; +use graph::prelude::{ + alloy::{ + self, + network::{AnyNetwork, TransactionResponse}, + primitives::{Address, B256}, + providers::{ + ext::TraceApi, + fillers::{ + BlobGasFiller, ChainIdFiller, FillProvider, GasFiller, JoinFill, NonceFiller, + }, + Identity, Provider, RootProvider, + }, + rpc::types::{ + trace::{filter::TraceFilter as AlloyTraceFilter, parity::LocalizedTransactionTrace}, + TransactionInput, TransactionRequest, + }, + transports::{RpcError, TransportErrorKind}, + }, + tokio::try_join, +}; use graph::slog::o; use graph::{ blockchain::{block_stream::BlockWithTriggers, BlockPtr, IngestorError}, prelude::{ anyhow::{self, anyhow, bail, ensure, Context}, - debug, error, ethabi, hex, info, retry, serde_json as json, tiny_keccak, trace, warn, - web3::{ - self, - types::{ - Address, BlockId, BlockNumber as Web3BlockNumber, Bytes, CallRequest, Filter, - FilterBuilder, Log, Transaction, TransactionReceipt, H256, - }, - }, - BlockNumber, ChainStore, CheapClone, DynTryFuture, Error, EthereumCallCache, Logger, - TimeoutError, + debug, error, hex, info, retry, serde_json as json, trace, warn, BlockNumber, ChainStore, + CheapClone, DynTryFuture, Error, EthereumCallCache, Logger, TimeoutError, }, }; -use graph::{ - components::ethereum::*, - prelude::web3::api::Web3, - prelude::web3::transports::Batch, - prelude::web3::types::{Trace, TraceFilter, TraceFilterBuilder, H160}, -}; use itertools::Itertools; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::convert::TryFrom; @@ -55,9 +57,13 @@ use std::iter::FromIterator; use std::pin::Pin; use std::sync::Arc; use std::time::Instant; +use tokio::sync::RwLock; +use tokio::time::timeout; +use crate::adapter::EthGetLogsFilter; use crate::adapter::EthereumRpcError; use crate::adapter::ProviderStatus; +use crate::call_helper::interpret_eth_call_error; use crate::chain::BlockFinality; use crate::trigger::{LogPosition, LogRef}; use crate::Chain; @@ -65,32 +71,54 @@ use crate::NodeCapabilities; use crate::TriggerFilter; use crate::{ adapter::{ - ContractCallError, EthGetLogsFilter, EthereumAdapter as EthereumAdapterTrait, - EthereumBlockFilter, EthereumCallFilter, EthereumLogFilter, ProviderEthRpcMetrics, - SubgraphEthRpcMetrics, + ContractCallError, EthereumAdapter as EthereumAdapterTrait, EthereumBlockFilter, + EthereumCallFilter, EthereumLogFilter, ProviderEthRpcMetrics, SubgraphEthRpcMetrics, }, transport::Transport, trigger::{EthereumBlockTriggerType, EthereumTrigger}, ENV_VARS, }; -#[derive(Debug, Clone)] +type AlloyProvider = FillProvider< + JoinFill< + Identity, + JoinFill>>, + >, + RootProvider, + AnyNetwork, +>; + +#[derive(Clone)] pub struct EthereumAdapter { logger: Logger, provider: String, - web3: Arc>, + alloy: Arc, metrics: Arc, supports_eip_1898: bool, call_only: bool, supports_block_receipts: Arc>>, } +impl std::fmt::Debug for EthereumAdapter { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("EthereumAdapter") + .field("logger", &self.logger) + .field("provider", &self.provider) + .field("alloy", &"") + .field("metrics", &self.metrics) + .field("supports_eip_1898", &self.supports_eip_1898) + .field("call_only", &self.call_only) + .field("supports_block_receipts", &self.supports_block_receipts) + .finish() + } +} + impl CheapClone for EthereumAdapter { fn cheap_clone(&self) -> Self { Self { logger: self.logger.clone(), provider: self.provider.clone(), - web3: self.web3.cheap_clone(), + alloy: self.alloy.clone(), metrics: self.metrics.cheap_clone(), supports_eip_1898: self.supports_eip_1898, call_only: self.call_only, @@ -112,12 +140,35 @@ impl EthereumAdapter { supports_eip_1898: bool, call_only: bool, ) -> Self { - let web3 = Arc::new(Web3::new(transport)); + let alloy = match &transport { + Transport::RPC(client) => Arc::new( + alloy::providers::ProviderBuilder::<_, _, AnyNetwork>::default() + .network::() + .with_recommended_fillers() + .connect_client(client.clone()), + ), + Transport::IPC(ipc_connect) => Arc::new( + alloy::providers::ProviderBuilder::<_, _, AnyNetwork>::default() + .network::() + .with_recommended_fillers() + .connect_ipc(ipc_connect.clone()) + .await + .expect("Failed to connect to Ethereum IPC"), + ), + Transport::WS(ws_connect) => Arc::new( + alloy::providers::ProviderBuilder::<_, _, AnyNetwork>::default() + .network::() + .with_recommended_fillers() + .connect_ws(ws_connect.clone()) + .await + .expect("Failed to connect to Ethereum WS"), + ), + }; EthereumAdapter { logger, provider, - web3, + alloy, metrics: provider_metrics, supports_eip_1898, call_only, @@ -131,78 +182,26 @@ impl EthereumAdapter { subgraph_metrics: Arc, from: BlockNumber, to: BlockNumber, - addresses: Vec, - ) -> Result, Error> { + addresses: Vec
, + ) -> Result, Error> { assert!(!self.call_only); - let eth = self.clone(); let retry_log_message = format!("trace_filter RPC call for block range: [{}..{}]", from, to); + let eth = self.clone(); + retry(retry_log_message, &logger) .redact_log_urls(true) .limit(ENV_VARS.request_retries) .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let trace_filter: TraceFilter = match addresses.len() { - 0 => TraceFilterBuilder::default() - .from_block(from.into()) - .to_block(to.into()) - .build(), - _ => TraceFilterBuilder::default() - .from_block(from.into()) - .to_block(to.into()) - .to_address(addresses.clone()) - .build(), - }; - - let eth = eth.cheap_clone(); - let logger_for_triggers = logger.clone(); - let logger_for_error = logger.clone(); - let start = Instant::now(); + let eth = eth.clone(); + let logger = logger.clone(); let subgraph_metrics = subgraph_metrics.clone(); - let provider_metrics = eth.metrics.clone(); - let provider = self.provider.clone(); - + let addresses = addresses.clone(); async move { - let result = eth - .web3 - .trace() - .filter(trace_filter) + eth.execute_trace_filter_request(logger, subgraph_metrics, from, to, addresses) .await - .inspect(|traces| { - if !traces.is_empty() { - if to == from { - debug!( - logger_for_triggers, - "Received {} traces for block {}", - traces.len(), - to - ); - } else { - debug!( - logger_for_triggers, - "Received {} traces for blocks [{}, {}]", - traces.len(), - from, - to - ); - } - } - }) - .map_err(Error::from); - - let elapsed = start.elapsed().as_secs_f64(); - provider_metrics.observe_request(elapsed, "trace_filter", &provider); - subgraph_metrics.observe_request(elapsed, "trace_filter", &provider); - if let Err(e) = &result { - provider_metrics.add_error("trace_filter", &provider); - subgraph_metrics.add_error("trace_filter", &provider); - debug!( - logger_for_error, - "Error querying traces error = {:#} from = {} to = {}", e, from, to - ); - } - result } }) .map_err(move |e| { @@ -218,12 +217,99 @@ impl EthereumAdapter { .await } + async fn execute_trace_filter_request( + &self, + logger: Logger, + subgraph_metrics: Arc, + from: BlockNumber, + to: BlockNumber, + addresses: Vec
, + ) -> Result, Error> { + let alloy_trace_filter = Self::build_trace_filter(from, to, &addresses); + let start = Instant::now(); + + let result = self.alloy.trace_filter(&alloy_trace_filter).await; + + if let Ok(traces) = &result { + self.log_trace_results(&logger, from, to, traces.len()); + } + + self.record_trace_metrics( + &subgraph_metrics, + start.elapsed().as_secs_f64(), + &result, + from, + to, + &logger, + ); + + result.map_err(Error::from) + } + + fn build_trace_filter( + from: BlockNumber, + to: BlockNumber, + addresses: &[Address], + ) -> AlloyTraceFilter { + let filter = AlloyTraceFilter::default() + .from_block(from as u64) + .to_block(to as u64); + + if !addresses.is_empty() { + filter.to_address(addresses.to_vec()) + } else { + filter + } + } + + fn log_trace_results( + &self, + logger: &Logger, + from: BlockNumber, + to: BlockNumber, + trace_len: usize, + ) { + if trace_len > 0 { + if to == from { + debug!(logger, "Received {} traces for block {}", trace_len, to); + } else { + debug!( + logger, + "Received {} traces for blocks [{}, {}]", trace_len, from, to + ); + } + } + } + + fn record_trace_metrics( + &self, + subgraph_metrics: &Arc, + elapsed: f64, + result: &Result, RpcError>, + from: BlockNumber, + to: BlockNumber, + logger: &Logger, + ) { + self.metrics + .observe_request(elapsed, "trace_filter", &self.provider); + subgraph_metrics.observe_request(elapsed, "trace_filter", &self.provider); + + if let Err(e) = result { + self.metrics.add_error("trace_filter", &self.provider); + subgraph_metrics.add_error("trace_filter", &self.provider); + debug!( + logger, + "Error querying traces error = {:#} from = {} to = {}", e, from, to + ); + } + } + // This is a lazy check for block receipt support. It is only called once and then the result is // cached. The result is not used for anything critical, so it is fine to be lazy. async fn check_block_receipt_support_and_update_cache( &self, - web3: Arc>, - block_hash: H256, + alloy: Arc, + block_hash: B256, supports_eip_1898: bool, call_only: bool, logger: Logger, @@ -240,7 +326,7 @@ impl EthereumAdapter { info!(logger, "Checking eth_getBlockReceipts support"); let result = timeout( ENV_VARS.block_receipts_check_timeout, - check_block_receipt_support(web3, block_hash, supports_eip_1898, call_only), + check_block_receipt_support(alloy, block_hash, supports_eip_1898, call_only), ) .await; @@ -273,6 +359,7 @@ impl EthereumAdapter { result } + /// Alloy-exclusive version of logs_with_sigs using alloy types and methods async fn logs_with_sigs( &self, logger: Logger, @@ -281,19 +368,24 @@ impl EthereumAdapter { to: BlockNumber, filter: Arc, too_many_logs_fingerprints: &'static [&'static str], - ) -> Result, TimeoutError> { + ) -> Result< + Vec, + TimeoutError>, + > { assert!(!self.call_only); let eth_adapter = self.clone(); let retry_log_message = format!("eth_getLogs RPC call for block range: [{}..{}]", from, to); retry(retry_log_message, &logger) .redact_log_urls(true) - .when(move |res: &Result<_, web3::error::Error>| match res { - Ok(_) => false, - Err(e) => !too_many_logs_fingerprints - .iter() - .any(|f| e.to_string().contains(f)), - }) + .when( + move |res: &Result<_, RpcError>| match res { + Ok(_) => false, + Err(e) => !too_many_logs_fingerprints + .iter() + .any(|f| e.to_string().contains(f)), + }, + ) .limit(ENV_VARS.request_retries) .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { @@ -305,21 +397,10 @@ impl EthereumAdapter { async move { let start = Instant::now(); - // Create a log filter - let log_filter: Filter = FilterBuilder::default() - .from_block(from.into()) - .to_block(to.into()) - .address(filter.contracts.clone()) - .topics( - Some(filter.event_signatures.clone()), - filter.topic1.clone(), - filter.topic2.clone(), - filter.topic3.clone(), - ) - .build(); - // Request logs from client - let result = eth_adapter.web3.eth().logs(log_filter).boxed().await; + let alloy_filter = filter.to_alloy_filter(from, to); + + let result = eth_adapter.alloy.get_logs(&alloy_filter).await; let elapsed = start.elapsed().as_secs_f64(); provider_metrics.observe_request(elapsed, "eth_getLogs", &provider); subgraph_metrics.observe_request(elapsed, "eth_getLogs", &provider); @@ -339,8 +420,8 @@ impl EthereumAdapter { subgraph_metrics: Arc, from: BlockNumber, to: BlockNumber, - addresses: Vec, - ) -> impl Stream + Send { + addresses: Vec
, + ) -> impl futures03::Stream> + Send { if from > to { panic!( "Can not produce a call stream on a backwards block range: from = {}, to = {}", @@ -354,36 +435,40 @@ impl EthereumAdapter { true => 1, }; + let ranges: Vec<(BlockNumber, BlockNumber)> = { + let mut ranges = Vec::new(); + let mut start = from; + while start <= to { + let end = (start + step_size - 1).min(to); + ranges.push((start, end)); + start = end + 1; + } + ranges + }; + let eth = self; let logger = logger.clone(); - stream::unfold(from, move |start| { - if start > to { - return None; - } - let end = (start + step_size - 1).min(to); - let new_start = end + 1; - if start == end { - debug!(logger, "Requesting traces for block {}", start); - } else { - debug!(logger, "Requesting traces for blocks [{}, {}]", start, end); + + futures03::stream::iter(ranges.into_iter().map(move |(start, end)| { + let eth = eth.clone(); + let logger = logger.clone(); + let subgraph_metrics = subgraph_metrics.clone(); + let addresses = addresses.clone(); + + async move { + if start == end { + debug!(logger, "Requesting traces for block {}", start); + } else { + debug!(logger, "Requesting traces for blocks [{}, {}]", start, end); + } + + eth.traces(logger, subgraph_metrics, start, end, addresses) + .await } - Some(graph::futures01::future::ok(( - eth.clone() - .traces( - logger.cheap_clone(), - subgraph_metrics.clone(), - start, - end, - addresses.clone(), - ) - .boxed() - .compat(), - new_start, - ))) - }) + })) .buffered(ENV_VARS.block_batch_size) - .map(stream::iter_ok) - .flatten() + .map_ok(|traces| futures03::stream::iter(traces.into_iter().map(Ok))) + .try_flatten() } fn log_stream( @@ -393,7 +478,7 @@ impl EthereumAdapter { from: BlockNumber, to: BlockNumber, filter: EthGetLogsFilter, - ) -> DynTryFuture<'static, Vec, Error> { + ) -> DynTryFuture<'static, Vec, Error> { // Codes returned by Ethereum node providers if an eth_getLogs request is too heavy. const TOO_MANY_LOGS_FINGERPRINTS: &[&str] = &[ "ServerError(-32005)", // Infura @@ -480,14 +565,11 @@ impl EthereumAdapter { .boxed() } - // Method to determine block_id based on support for EIP-1898 - fn block_ptr_to_id(&self, block_ptr: &BlockPtr) -> BlockId { - // Ganache does not support calls by block hash. - // See https://github.com/trufflesuite/ganache-cli/issues/973 + fn block_ptr_to_id(&self, block_ptr: &BlockPtr) -> alloy::rpc::types::BlockId { if !self.supports_eip_1898 { - BlockId::Number(block_ptr.number.into()) + alloy::rpc::types::BlockId::number(block_ptr.number as u64) } else { - BlockId::Hash(block_ptr.hash_as_h256()) + alloy::rpc::types::BlockId::hash(block_ptr.hash.as_b256()) } } @@ -496,8 +578,8 @@ impl EthereumAdapter { logger: &Logger, address: Address, block_ptr: BlockPtr, - ) -> Result { - let web3 = self.web3.clone(); + ) -> Result { + let alloy = self.alloy.clone(); let logger = Logger::new(logger, o!("provider" => self.provider.clone())); let block_id = self.block_ptr_to_id(&block_ptr); @@ -509,13 +591,12 @@ impl EthereumAdapter { .limit(ENV_VARS.request_retries) .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.cheap_clone(); + let alloy = alloy.cheap_clone(); async move { - let result: Result = - web3.eth().code(address, Some(block_id)).boxed().await; + let result = alloy.get_code_at(address).block_id(block_id).await; match result { Ok(code) => Ok(code), - Err(err) => Err(EthereumRpcError::Web3Error(err)), + Err(err) => Err(EthereumRpcError::AlloyError(err)), } } }) @@ -528,8 +609,8 @@ impl EthereumAdapter { logger: &Logger, address: Address, block_ptr: BlockPtr, - ) -> Result { - let web3 = self.web3.clone(); + ) -> Result { + let alloy = self.alloy.clone(); let logger = Logger::new(logger, o!("provider" => self.provider.clone())); let block_id = self.block_ptr_to_id(&block_ptr); @@ -541,13 +622,12 @@ impl EthereumAdapter { .limit(ENV_VARS.request_retries) .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.cheap_clone(); + let alloy = alloy.cheap_clone(); async move { - let result: Result = - web3.eth().balance(address, Some(block_id)).boxed().await; + let result = alloy.get_balance(address).block_id(block_id).await; match result { Ok(balance) => Ok(balance), - Err(err) => Err(EthereumRpcError::Web3Error(err)), + Err(err) => Err(EthereumRpcError::AlloyError(err)), } } }) @@ -562,15 +642,10 @@ impl EthereumAdapter { block_ptr: BlockPtr, gas: Option, ) -> Result { - fn reverted(logger: &Logger, reason: &str) -> Result { - info!(logger, "Contract call reverted"; "reason" => reason); - Ok(call::Retval::Null) - } - - let web3 = self.web3.clone(); + let alloy = self.alloy.clone(); let logger = Logger::new(&logger, o!("provider" => self.provider.clone())); - let block_id = self.block_ptr_to_id(&block_ptr); + let alloy_block_id = self.block_ptr_to_id(&block_ptr); let retry_log_message = format!("eth_call RPC call for block {}", block_ptr); retry(retry_log_message, &logger) .redact_log_urls(true) @@ -578,138 +653,29 @@ impl EthereumAdapter { .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { let call_data = call_data.clone(); - let web3 = web3.cheap_clone(); + let alloy = alloy.cheap_clone(); let logger = logger.cheap_clone(); async move { - let req = CallRequest { - to: Some(call_data.address), - gas: gas.map(web3::types::U256::from), - data: Some(Bytes::from(call_data.encoded_call.to_vec())), - from: None, - gas_price: None, - value: None, - access_list: None, - max_fee_per_gas: None, - max_priority_fee_per_gas: None, - transaction_type: None, - }; - let result = web3.eth().call(req, Some(block_id)).boxed().await; - - // Try to check if the call was reverted. The JSON-RPC response for reverts is - // not standardized, so we have ad-hoc checks for each Ethereum client. - - // 0xfe is the "designated bad instruction" of the EVM, and Solidity uses it for - // asserts. - const PARITY_BAD_INSTRUCTION_FE: &str = "Bad instruction fe"; - - // 0xfd is REVERT, but on some contracts, and only on older blocks, - // this happens. Makes sense to consider it a revert as well. - const PARITY_BAD_INSTRUCTION_FD: &str = "Bad instruction fd"; - - const PARITY_BAD_JUMP_PREFIX: &str = "Bad jump"; - const PARITY_STACK_LIMIT_PREFIX: &str = "Out of stack"; - - // See f0af4ab0-6b7c-4b68-9141-5b79346a5f61. - const PARITY_OUT_OF_GAS: &str = "Out of gas"; - - // Also covers Nethermind reverts - const PARITY_VM_EXECUTION_ERROR: i64 = -32015; - const PARITY_REVERT_PREFIX: &str = "revert"; - - const XDAI_REVERT: &str = "revert"; - - // Deterministic Geth execution errors. We might need to expand this as - // subgraphs come across other errors. See - // https://github.com/ethereum/go-ethereum/blob/cd57d5cd38ef692de8fbedaa56598b4e9fbfbabc/core/vm/errors.go - const GETH_EXECUTION_ERRORS: &[&str] = &[ - // The "revert" substring covers a few known error messages, including: - // Hardhat: "error: transaction reverted", - // Ganache and Moonbeam: "vm exception while processing transaction: revert", - // Geth: "execution reverted" - // And others. - "revert", - "invalid jump destination", - "invalid opcode", - // Ethereum says 1024 is the stack sizes limit, so this is deterministic. - "stack limit reached 1024", - // See f0af4ab0-6b7c-4b68-9141-5b79346a5f61 for why the gas limit is considered deterministic. - "out of gas", - "stack underflow", - ]; - - let env_geth_call_errors = ENV_VARS.geth_eth_call_errors.iter(); - let mut geth_execution_errors = GETH_EXECUTION_ERRORS - .iter() - .copied() - .chain(env_geth_call_errors.map(|s| s.as_str())); - - let as_solidity_revert_with_reason = |bytes: &[u8]| { - let solidity_revert_function_selector = - &tiny_keccak::keccak256(b"Error(string)")[..4]; - - match bytes.len() >= 4 && &bytes[..4] == solidity_revert_function_selector { - false => None, - true => ethabi::decode(&[ParamType::String], &bytes[4..]) - .ok() - .and_then(|tokens| tokens[0].clone().into_string()), - } - }; - - match result { - // A successful response. - Ok(bytes) => Ok(call::Retval::Value(scalar::Bytes::from(bytes))), + let mut req = TransactionRequest::default() + .input(TransactionInput::both(alloy::primitives::Bytes::from( + call_data.encoded_call.to_vec(), + ))) + .to(call_data.address); - // Check for Geth revert. - Err(web3::Error::Rpc(rpc_error)) - if geth_execution_errors - .any(|e| rpc_error.message.to_lowercase().contains(e)) => - { - reverted(&logger, &rpc_error.message) - } + if let Some(gas) = gas { + req = req.gas_limit(gas as u64); + } - // Check for Parity revert. - Err(web3::Error::Rpc(ref rpc_error)) - if rpc_error.code.code() == PARITY_VM_EXECUTION_ERROR => - { - match rpc_error.data.as_ref().and_then(|d| d.as_str()) { - Some(data) - if data.to_lowercase().starts_with(PARITY_REVERT_PREFIX) - || data.starts_with(PARITY_BAD_JUMP_PREFIX) - || data.starts_with(PARITY_STACK_LIMIT_PREFIX) - || data == PARITY_BAD_INSTRUCTION_FE - || data == PARITY_BAD_INSTRUCTION_FD - || data == PARITY_OUT_OF_GAS - || data == XDAI_REVERT => - { - let reason = if data == PARITY_BAD_INSTRUCTION_FE { - PARITY_BAD_INSTRUCTION_FE.to_owned() - } else { - let payload = data.trim_start_matches(PARITY_REVERT_PREFIX); - hex::decode(payload) - .ok() - .and_then(|payload| { - as_solidity_revert_with_reason(&payload) - }) - .unwrap_or("no reason".to_owned()) - }; - reverted(&logger, &reason) - } + let result = alloy.call(req.into()).block(alloy_block_id).await; - // The VM execution error was not identified as a revert. - _ => Err(ContractCallError::Web3Error(web3::Error::Rpc( - rpc_error.clone(), - ))), - } - } - - // The error was not identified as a revert. - Err(err) => Err(ContractCallError::Web3Error(err)), + match result { + Ok(bytes) => Ok(call::Retval::Value(scalar::Bytes::from(bytes))), + Err(err) => interpret_eth_call_error(&logger, err), } } }) - .map_err(|e| e.into_inner().unwrap_or(ContractCallError::Timeout)) - .boxed() .await + .map_err(|e| e.into_inner().unwrap_or(ContractCallError::Timeout)) } async fn call_and_cache( @@ -747,30 +713,46 @@ impl EthereumAdapter { fn load_blocks_rpc( &self, logger: Logger, - ids: Vec, - ) -> impl Stream, Error = Error> + Send { - let web3 = self.web3.clone(); + ids: Vec, + ) -> impl futures03::Stream, Error>> + Send { + let alloy = self.alloy.clone(); - stream::iter_ok::<_, Error>(ids.into_iter().map(move |hash| { - let web3 = web3.clone(); - retry(format!("load block {}", hash), &logger) - .redact_log_urls(true) - .limit(ENV_VARS.request_retries) - .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) - .run(move || { - Box::pin(web3.eth().block_with_txs(BlockId::Hash(hash))) - .compat() - .from_err::() - .and_then(move |block| { - block.map(Arc::new).ok_or_else(|| { - anyhow::anyhow!("Ethereum node did not find block {:?}", hash) - }) + futures03::stream::iter(ids.into_iter().map(move |hash| { + let alloy = alloy.clone(); + let logger = logger.clone(); + + async move { + retry(format!("load block {}", hash), &logger) + .redact_log_urls(true) + .limit(ENV_VARS.request_retries) + .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) + .run(move || { + let alloy = alloy.cheap_clone(); + async move { + alloy + .get_block_by_hash(hash) + .full() + .await + .map_err(Error::from) + .and_then(|block| { + block + .map(|b| Arc::new(LightEthereumBlock::new(b))) + .ok_or_else(|| { + anyhow::anyhow!( + "Ethereum node did not find block {:?}", + hash + ) + }) + }) + } + }) + .await + .map_err(|e| { + e.into_inner().unwrap_or_else(|| { + anyhow::anyhow!("Ethereum node took too long to return block {}", hash) }) - .compat() - }) - .boxed() - .compat() - .from_err() + }) + } })) .buffered(ENV_VARS.block_batch_size) } @@ -781,10 +763,10 @@ impl EthereumAdapter { logger: Logger, numbers: Vec, ) -> impl futures03::Stream, Error>> + Send { - let web3 = self.web3.clone(); + let alloy = self.alloy.clone(); futures03::stream::iter(numbers.into_iter().map(move |number| { - let web3 = web3.clone(); + let alloy = alloy.clone(); let logger = logger.clone(); async move { @@ -793,21 +775,22 @@ impl EthereumAdapter { .limit(ENV_VARS.request_retries) .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.clone(); + let alloy = alloy.cheap_clone(); async move { - let block_result = web3 - .eth() - .block(BlockId::Number(Web3BlockNumber::Number(number.into()))) + let block_result = alloy + .get_block_by_number(alloy::rpc::types::BlockNumberOrTag::Number( + number as u64, + )) .await; match block_result { Ok(Some(block)) => { let ptr = ExtendedBlockPtr::try_from(( - block.hash, - block.number, - block.parent_hash, - block.timestamp, + block.header.hash, + i32::try_from(block.header.number).unwrap(), + block.header.parent_hash, + block.header.timestamp, )) .map_err(|e| { anyhow::anyhow!("Failed to convert block: {}", e) @@ -842,22 +825,22 @@ impl EthereumAdapter { logger: Logger, block_nums: Vec, ) -> impl Stream + Send { - let web3 = self.web3.clone(); + let alloy = self.alloy.clone(); stream::iter_ok::<_, Error>(block_nums.into_iter().map(move |block_num| { - let web3 = web3.clone(); + let alloy = alloy.clone(); retry(format!("load block ptr {}", block_num), &logger) .redact_log_urls(true) .when(|res| !res.is_ok() && !detect_null_block(res)) .no_limit() .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.clone(); + let alloy = alloy.cheap_clone(); async move { - let block = web3 - .eth() - .block(BlockId::Number(Web3BlockNumber::Number(block_num.into()))) - .boxed() + let block = alloy + .get_block_by_number(alloy::rpc::types::BlockNumberOrTag::Number( + block_num as u64, + )) .await?; block.ok_or_else(|| { @@ -878,7 +861,7 @@ impl EthereumAdapter { })) .buffered(ENV_VARS.block_batch_size) .filter_map(|b| b) - .map(|b| b.into()) + .map(|b| BlockPtr::from((b.header.hash, b.header.number))) } /// Check if `block_ptr` refers to a block that is on the main chain, according to the Ethereum @@ -911,7 +894,7 @@ impl EthereumAdapter { from: BlockNumber, to: BlockNumber, log_filter: EthereumLogFilter, - ) -> DynTryFuture<'static, Vec, Error> { + ) -> DynTryFuture<'static, Vec, Error> { let eth: Self = self.cheap_clone(); let logger = logger.clone(); @@ -945,13 +928,13 @@ impl EthereumAdapter { wildcard_signatures, } = call_filter; - let mut addresses: Vec = contract_addresses_function_signatures + let mut addresses: Vec
= contract_addresses_function_signatures .iter() .filter(|(_addr, (start_block, _fsigs))| start_block <= &to) .map(|(addr, (_start_block, _fsigs))| *addr) - .collect::>() + .collect::>() .into_iter() - .collect::>(); + .collect::>(); if addresses.is_empty() && wildcard_signatures.is_empty() { // The filter has no started data sources in the requested range, nothing to do. @@ -968,14 +951,13 @@ impl EthereumAdapter { Box::new( eth.trace_stream(logger, subgraph_metrics, from, to, addresses) - .filter_map(|trace| EthereumCall::try_from_trace(&trace)) - .filter(move |call| { - // `trace_filter` can only filter by calls `to` an address and - // a block range. Since subgraphs are subscribing to calls - // for a specific contract function an additional filter needs - // to be applied - call_filter.matches(call) - }), + .try_filter_map(move |trace| { + let maybe_call = EthereumCall::try_from_trace(&trace) + .filter(|call| call_filter.matches(call)); + futures03::future::ready(Ok(maybe_call)) + }) + .boxed() + .compat(), ) } @@ -1054,11 +1036,11 @@ impl EthereumAdapter { logger: &Logger, subgraph_metrics: Arc, block_number: BlockNumber, - block_hash: H256, + block_hash: alloy::primitives::B256, ) -> Result, Error> { let eth = self.clone(); let addresses = Vec::new(); - let traces = eth + let traces: Vec = eth .trace_stream( logger, subgraph_metrics.clone(), @@ -1066,8 +1048,7 @@ impl EthereumAdapter { block_number, addresses, ) - .collect() - .compat() + .try_collect() .await?; // `trace_stream` returns all of the traces for the block, and this @@ -1085,7 +1066,7 @@ impl EthereumAdapter { // all the traces for the block, we need to ensure that the // block hash for the traces is equal to the desired block hash. // Assume all traces are for the same block. - if traces.first().unwrap().block_hash != block_hash { + if traces.first().unwrap().block_hash != Some(block_hash) { return Err(anyhow!( "Trace stream returned traces for an unexpected block: \ number = `{}`, hash = `{}`", @@ -1129,19 +1110,17 @@ impl EthereumAdapter { pub async fn chain_id(&self) -> Result { let logger = self.logger.clone(); - let web3 = self.web3.clone(); - u64::try_from( - retry("chain_id RPC call", &logger) - .redact_log_urls(true) - .no_limit() - .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) - .run(move || { - let web3 = web3.cheap_clone(); - async move { web3.eth().chain_id().await } - }) - .await?, - ) - .map_err(Error::msg) + let alloy = self.alloy.clone(); + retry("chain_id RPC call", &logger) + .redact_log_urls(true) + .no_limit() + .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) + .run(move || { + let alloy = alloy.cheap_clone(); + async move { alloy.get_chain_id().await.map_err(Error::from) } + }) + .await + .map_err(|e| e.into_inner().unwrap_or(EthereumRpcError::Timeout.into())) } } @@ -1164,7 +1143,7 @@ impl EthereumAdapterTrait for EthereumAdapter { async fn net_identifiers(&self) -> Result { let logger = self.logger.clone(); - let web3 = self.web3.clone(); + let alloy = self.alloy.clone(); let metrics = self.metrics.clone(); let provider = self.provider().to_string(); let net_version_future = retry("net_version RPC call", &logger) @@ -1172,11 +1151,11 @@ impl EthereumAdapterTrait for EthereumAdapter { .no_limit() .timeout_secs(20) .run(move || { - let web3 = web3.cheap_clone(); + let alloy = alloy.cheap_clone(); let metrics = metrics.cheap_clone(); let provider = provider.clone(); async move { - web3.net().version().await.map_err(|e| { + alloy.get_net_version().await.map_err(|e| { metrics.set_status(ProviderStatus::VersionFail, &provider); e.into() }) @@ -1189,7 +1168,7 @@ impl EthereumAdapterTrait for EthereumAdapter { }) .boxed(); - let web3 = self.web3.clone(); + let alloy_provider = self.alloy.clone(); let metrics = self.metrics.clone(); let provider = self.provider().to_string(); let retry_log_message = format!( @@ -1201,19 +1180,19 @@ impl EthereumAdapterTrait for EthereumAdapter { .no_limit() .timeout_secs(30) .run(move || { - let web3 = web3.cheap_clone(); + let alloy_genesis = alloy_provider.cheap_clone(); let metrics = metrics.cheap_clone(); let provider = provider.clone(); async move { - web3.eth() - .block(BlockId::Number(Web3BlockNumber::Number( - ENV_VARS.genesis_block_number.into(), - ))) + alloy_genesis + .get_block_by_number(alloy::rpc::types::BlockNumberOrTag::Number( + ENV_VARS.genesis_block_number, + )) .await .inspect_err(|_| { metrics.set_status(ProviderStatus::GenesisFail, &provider); })? - .and_then(|gen_block| gen_block.hash.map(BlockHash::from)) + .map(|gen_block| BlockHash::from(gen_block.header.hash)) .ok_or_else(|| anyhow!("Ethereum node could not find genesis block")) } }) @@ -1232,7 +1211,7 @@ impl EthereumAdapterTrait for EthereumAdapter { })?; let ident = ChainIdentifier { - net_version, + net_version: net_version.to_string(), genesis_block_hash, }; @@ -1241,52 +1220,24 @@ impl EthereumAdapterTrait for EthereumAdapter { Ok(ident) } - async fn latest_block_header( - &self, - logger: &Logger, - ) -> Result, IngestorError> { - let web3 = self.web3.clone(); + async fn latest_block_ptr(&self, logger: &Logger) -> Result { + let alloy = self.alloy.clone(); retry("eth_getBlockByNumber(latest) no txs RPC call", logger) .redact_log_urls(true) .no_limit() .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.cheap_clone(); + let alloy = alloy.cheap_clone(); async move { - let block_opt = web3 - .eth() - .block(Web3BlockNumber::Latest.into()) + let block_opt = alloy + .get_block_by_number(alloy::rpc::types::BlockNumberOrTag::Latest) .await .map_err(|e| anyhow!("could not get latest block from Ethereum: {}", e))?; - block_opt - .ok_or_else(|| anyhow!("no latest block returned from Ethereum").into()) - } - }) - .map_err(move |e| { - e.into_inner().unwrap_or_else(move || { - anyhow!("Ethereum node took too long to return latest block").into() - }) - }) - .await - } + let block = block_opt + .ok_or_else(|| anyhow!("no latest block returned from Ethereum"))?; - async fn latest_block(&self, logger: &Logger) -> Result { - let web3 = self.web3.clone(); - retry("eth_getBlockByNumber(latest) with txs RPC call", logger) - .redact_log_urls(true) - .no_limit() - .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) - .run(move || { - let web3 = web3.cheap_clone(); - async move { - let block_opt = web3 - .eth() - .block_with_txs(Web3BlockNumber::Latest.into()) - .await - .map_err(|e| anyhow!("could not get latest block from Ethereum: {}", e))?; - block_opt - .ok_or_else(|| anyhow!("no latest block returned from Ethereum").into()) + Ok(BlockPtr::from((block.header.hash, block.header.number))) } }) .map_err(move |e| { @@ -1297,27 +1248,12 @@ impl EthereumAdapterTrait for EthereumAdapter { .await } - async fn load_block( - &self, - logger: &Logger, - block_hash: H256, - ) -> Result { - self.block_by_hash(logger, block_hash) - .await? - .ok_or_else(move || { - anyhow!( - "Ethereum node could not find block with hash {}", - block_hash - ) - }) - } - async fn block_by_hash( &self, logger: &Logger, - block_hash: H256, - ) -> Result, Error> { - let web3 = self.web3.clone(); + block_hash: B256, + ) -> Result, Error> { + let alloy = self.alloy.clone(); let logger = logger.clone(); let retry_log_message = format!( "eth_getBlockByHash RPC call for block hash {:?}", @@ -1329,10 +1265,11 @@ impl EthereumAdapterTrait for EthereumAdapter { .limit(ENV_VARS.request_retries) .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.cheap_clone(); + let alloy = alloy.cheap_clone(); async move { - web3.eth() - .block_with_txs(BlockId::Hash(block_hash)) + alloy + .get_block_by_hash(block_hash) + .full() .await .map_err(Error::from) } @@ -1349,8 +1286,8 @@ impl EthereumAdapterTrait for EthereumAdapter { &self, logger: &Logger, block_number: BlockNumber, - ) -> Result, Error> { - let web3 = self.web3.clone(); + ) -> Result, Error> { + let alloy = self.alloy.clone(); let logger = logger.clone(); let retry_log_message = format!( "eth_getBlockByNumber RPC call for block number {}", @@ -1361,10 +1298,13 @@ impl EthereumAdapterTrait for EthereumAdapter { .no_limit() .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.cheap_clone(); + let alloy = alloy.clone(); async move { - web3.eth() - .block_with_txs(BlockId::Number(block_number.into())) + alloy + .get_block_by_number(alloy::rpc::types::BlockNumberOrTag::Number( + block_number as u64, + )) + .full() .await .map_err(Error::from) } @@ -1383,26 +1323,26 @@ impl EthereumAdapterTrait for EthereumAdapter { async fn load_full_block( &self, logger: &Logger, - block: LightEthereumBlock, + block: AnyBlock, ) -> Result { - let web3 = Arc::clone(&self.web3); + let alloy = self.alloy.clone(); let logger = logger.clone(); - let block_hash = block.hash.expect("block is missing block hash"); + let block_hash = block.header.hash; // The early return is necessary for correctness, otherwise we'll // request an empty batch which is not valid in JSON-RPC. if block.transactions.is_empty() { trace!(logger, "Block {} contains no transactions", block_hash); return Ok(EthereumBlock { - block: Arc::new(block), + block: Arc::new(LightEthereumBlock::new(block)), transaction_receipts: Vec::new(), }); } - let hashes: Vec<_> = block.transactions.iter().map(|txn| txn.hash).collect(); + let hashes: Vec<_> = block.transactions.hashes().collect(); let supports_block_receipts = self .check_block_receipt_support_and_update_cache( - web3.clone(), + alloy.clone(), block_hash, self.supports_eip_1898, self.call_only, @@ -1410,55 +1350,20 @@ impl EthereumAdapterTrait for EthereumAdapter { ) .await; - fetch_receipts_with_retry(web3, hashes, block_hash, logger, supports_block_receipts) + fetch_receipts_with_retry(alloy, hashes, block_hash, logger, supports_block_receipts) .await .map(|transaction_receipts| EthereumBlock { - block: Arc::new(block), + block: Arc::new(LightEthereumBlock::new(block)), transaction_receipts, }) } - async fn block_hash_by_block_number( - &self, - logger: &Logger, - block_number: BlockNumber, - ) -> Result, Error> { - let web3 = self.web3.clone(); - let retry_log_message = format!( - "eth_getBlockByNumber RPC call for block number {}", - block_number - ); - retry(retry_log_message, logger) - .redact_log_urls(true) - .no_limit() - .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) - .run(move || { - let web3 = web3.cheap_clone(); - async move { - web3.eth() - .block(BlockId::Number(block_number.into())) - .await - .map(|block_opt| block_opt.and_then(|block| block.hash)) - .map_err(Error::from) - } - }) - .await - .map_err(move |e| { - e.into_inner().unwrap_or_else(move || { - anyhow!( - "Ethereum node took too long to return data for block #{}", - block_number - ) - }) - }) - } - async fn get_balance( &self, logger: &Logger, - address: H160, + address: Address, block_ptr: BlockPtr, - ) -> Result { + ) -> Result { debug!( logger, "eth_getBalance"; "address" => format!("{}", address), @@ -1470,9 +1375,9 @@ impl EthereumAdapterTrait for EthereumAdapter { async fn get_code( &self, logger: &Logger, - address: H160, + address: Address, block_ptr: BlockPtr, - ) -> Result { + ) -> Result { debug!( logger, "eth_getCode"; "address" => format!("{}", address), @@ -1492,7 +1397,7 @@ impl EthereumAdapterTrait for EthereumAdapter { "eth_getBlockByNumber RPC call for block number {}", next_number ); - let web3 = self.web3.clone(); + let alloy = self.alloy.clone(); let logger = logger.clone(); let res = retry(retry_log_message, &logger) .redact_log_urls(true) @@ -1500,12 +1405,16 @@ impl EthereumAdapterTrait for EthereumAdapter { .no_limit() .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.cheap_clone(); + let alloy = alloy.cheap_clone(); async move { - web3.eth() - .block(BlockId::Number(next_number.into())) + alloy + .get_block_by_number(alloy::rpc::types::BlockNumberOrTag::Number( + next_number as u64, + )) .await - .map(|block_opt| block_opt.and_then(|block| block.hash)) + .map(|block_opt| { + block_opt.map(|block| BlockHash::from(block.header.hash.0.to_vec())) + }) .map_err(Error::from) } }) @@ -1523,7 +1432,7 @@ impl EthereumAdapterTrait for EthereumAdapter { continue; } return match res { - Ok(Some(hash)) => Ok(BlockPtr::new(hash.into(), next_number)), + Ok(Some(hash)) => Ok(BlockPtr::new(hash, next_number)), Ok(None) => Err(anyhow!("Block {} does not contain hash", next_number)), Err(e) => Err(e), }; @@ -1535,7 +1444,7 @@ impl EthereumAdapterTrait for EthereumAdapter { logger: &Logger, inp_call: &ContractCall, cache: Arc, - ) -> Result<(Option>, call::Source), ContractCallError> { + ) -> Result<(Option>, call::Source), ContractCallError> { let mut result = self.contract_calls(logger, &[inp_call], cache).await?; // unwrap: self.contract_calls returns as many results as there were calls Ok(result.pop().unwrap()) @@ -1546,20 +1455,26 @@ impl EthereumAdapterTrait for EthereumAdapter { logger: &Logger, calls: &[&ContractCall], cache: Arc, - ) -> Result>, call::Source)>, ContractCallError> { + ) -> Result>, call::Source)>, ContractCallError> { fn as_req( logger: &Logger, call: &ContractCall, index: u32, ) -> Result { // Emit custom error for type mismatches. - for (token, kind) in call + for (val, kind) in call .args .iter() - .zip(call.function.inputs.iter().map(|p| &p.kind)) + .zip(call.function.inputs.iter().map(|p| p.selector_type())) { - if !token.type_check(kind) { - return Err(ContractCallError::TypeError(token.clone(), kind.clone())); + let kind: abi::DynSolType = kind.parse().map_err(|err| { + ContractCallError::ABIError(anyhow!( + "failed to parse function input type '{kind}': {err}" + )) + })?; + + if !val.type_check(&kind) { + return Err(ContractCallError::TypeError(val.clone(), kind.clone())); } } @@ -1567,8 +1482,8 @@ impl EthereumAdapterTrait for EthereumAdapter { let req = { let encoded_call = call .function - .encode_input(&call.args) - .map_err(ContractCallError::EncodingError)?; + .abi_encode_input(&call.args) + .map_err(|err| ContractCallError::EncodingError(err))?; call::Request::new(call.address, encoded_call, index) }; @@ -1586,15 +1501,14 @@ impl EthereumAdapterTrait for EthereumAdapter { logger: &Logger, resp: call::Response, call: &ContractCall, - ) -> (Option>, call::Source) { + ) -> (Option>, call::Source) { let call::Response { retval, source, req: _, } = resp; - use call::Retval::*; match retval { - Value(output) => match call.function.decode_output(&output) { + call::Retval::Value(output) => match call.function.abi_decode_output(&output) { Ok(tokens) => (Some(tokens), source), Err(e) => { // Decode failures are reverts. The reasoning is that if Solidity fails to @@ -1604,7 +1518,7 @@ impl EthereumAdapterTrait for EthereumAdapter { (None, call::Source::Rpc) } }, - Null => { + call::Retval::Null => { // We got a `0x` response. For old Geth, this can mean a revert. It can also be // that the contract actually returned an empty response. A view call is meant // to return something, so we treat empty responses the same as reverts. @@ -1616,7 +1530,7 @@ impl EthereumAdapterTrait for EthereumAdapter { fn log_call_error(logger: &Logger, e: &ContractCallError, call: &ContractCall) { match e { - ContractCallError::Web3Error(e) => error!(logger, + ContractCallError::AlloyError(e) => error!(logger, "Ethereum node returned an error when calling function \"{}\" of contract \"{}\": {}", call.function.name, call.contract_name, e), ContractCallError::Timeout => error!(logger, @@ -1688,33 +1602,48 @@ impl EthereumAdapterTrait for EthereumAdapter { &self, logger: Logger, chain_store: Arc, - block_hashes: HashSet, + block_hashes: HashSet, ) -> Result>, Error> { let block_hashes: Vec<_> = block_hashes.iter().cloned().collect(); // Search for the block in the store first then use json-rpc as a backup. - let mut blocks: Vec> = chain_store + let mut blocks: Vec<_> = chain_store .cheap_clone() .blocks(block_hashes.iter().map(|&b| b.into()).collect::>()) .await .map_err(|e| error!(&logger, "Error accessing block cache {}", e)) .unwrap_or_default() .into_iter() - .filter_map(|value| json::from_value(value).ok()) - .map(Arc::new) + .filter_map(|value| { + json::from_value(value.clone()) + .map_err(|e| { + let block_num = value.get("number").and_then(|n| n.as_u64()); + let block_hash = value.get("hash").and_then(|h| h.as_str()); + warn!( + &logger, + "Failed to deserialize cached block #{:?} {:?}: {}. \ + This may indicate stale cache data from a previous version. \ + Block will be re-fetched from RPC.", + block_num, + block_hash, + e + ); + }) + .ok() + }) + .map(|b| Arc::new(LightEthereumBlock::new(b))) .collect(); let missing_blocks = Vec::from_iter( block_hashes .into_iter() - .filter(|hash| !blocks.iter().any(|b| b.hash == Some(*hash))), + .filter(|hash| !blocks.iter().any(|b| b.hash() == *hash)), ); // Return a stream that lazily loads batches of blocks. debug!(logger, "Requesting {} block(s)", missing_blocks.len()); - let new_blocks = self + let new_blocks: Vec<_> = self .load_blocks_rpc(logger.clone(), missing_blocks) - .collect() - .compat() + .try_collect() .await?; let upsert_blocks: Vec<_> = new_blocks .iter() @@ -1728,7 +1657,7 @@ impl EthereumAdapterTrait for EthereumAdapter { error!(logger, "Error writing to block cache {}", e); } blocks.extend(new_blocks); - blocks.sort_by_key(|block| block.number); + blocks.sort_by_key(|block| block.number()); Ok(blocks) } } @@ -1771,7 +1700,7 @@ pub(crate) async fn blocks_with_triggers( debug!(logger, "Finding nearest valid `to` block to {}", to); let to_ptr = eth.next_existing_ptr_to_number(&logger, to).await?; - let to_hash = to_ptr.hash_as_h256(); + let to_hash = to_ptr.hash.as_b256(); let to = to_ptr.block_number(); // This is for `start` triggers which can be initialization handlers which needs to be run @@ -1848,8 +1777,10 @@ pub(crate) async fn blocks_with_triggers( .await .with_context(|| format!("Failed to obtain triggers for block {}", to))?; - let mut block_hashes: HashSet = - triggers.iter().map(EthereumTrigger::block_hash).collect(); + let mut block_hashes: HashSet = triggers + .iter() + .map(|trigger| trigger.block_hash()) + .collect(); let mut triggers_by_block: HashMap> = triggers.into_iter().fold(HashMap::new(), |mut map, t| { map.entry(t.block_number()).or_default().push(t); @@ -1869,7 +1800,7 @@ pub(crate) async fn blocks_with_triggers( .await? .into_iter() .map( - move |block| match triggers_by_block.remove(&(block.number() as BlockNumber)) { + move |block| match triggers_by_block.remove(&(block.number())) { Some(triggers) => Ok(BlockWithTriggers::new( BlockFinality::Final(block), triggers, @@ -1944,9 +1875,8 @@ pub(crate) async fn get_calls( .calls_in_block( &logger, subgraph_metrics.clone(), - BlockNumber::try_from(ethereum_block.block.number.unwrap().as_u64()) - .unwrap(), - ethereum_block.block.hash.unwrap(), + ethereum_block.block.number(), + ethereum_block.block.hash(), ) .await? }; @@ -1973,15 +1903,15 @@ pub(crate) fn parse_log_triggers( .transaction_receipts .iter() .flat_map(move |receipt| { - receipt.logs.iter().enumerate().map(move |(index, log)| { + receipt.logs().iter().enumerate().map(move |(index, log)| { let requires_transaction_receipt = log - .topics + .topics() .first() .map(|signature| { log_filter.requires_transaction_receipt( signature, - Some(&log.address), - &log.topics, + Some(&log.address()), + log.topics(), ) }) .unwrap_or(false); @@ -2032,7 +1962,7 @@ pub(crate) fn parse_block_triggers( return vec![]; } - let block_ptr = BlockPtr::from(&block.ethereum_block); + let block_ptr = block.ethereum_block.block.block_ptr(); let trigger_every_block = block_filter.trigger_every_block; let call_filter = EthereumCallFilter::from(block_filter); let block_ptr2 = block_ptr.cheap_clone(); @@ -2102,9 +2032,9 @@ pub(crate) fn parse_block_triggers( async fn fetch_receipt_from_ethereum_client( eth: &EthereumAdapter, - transaction_hash: &H256, -) -> anyhow::Result { - match eth.web3.eth().transaction_receipt(*transaction_hash).await { + transaction_hash: B256, +) -> anyhow::Result { + match eth.alloy.get_transaction_receipt(transaction_hash).await { Ok(Some(receipt)) => Ok(receipt), Ok(None) => bail!("Could not find transaction receipt"), Err(error) => bail!("Failed to fetch transaction receipt: {}", error), @@ -2125,14 +2055,14 @@ async fn filter_call_triggers_from_unsuccessful_transactions( let initial_number_of_triggers = block.trigger_data.len(); // Get the transaction hash from each call trigger - let transaction_hashes: BTreeSet = block + let transaction_hashes: BTreeSet = block .trigger_data .iter() .filter_map(|trigger| match trigger.as_chain() { Some(EthereumTrigger::Call(call_trigger)) => Some(call_trigger.transaction_hash), _ => None, }) - .collect::>>() + .collect::>>() .ok_or(anyhow!( "failed to obtain transaction hash from call triggers" ))?; @@ -2143,12 +2073,13 @@ async fn filter_call_triggers_from_unsuccessful_transactions( } // And obtain all Transaction values for the calls in this block. - let transactions: Vec<&Transaction> = { + let transactions: Vec<&AnyTransaction> = { match &block.block { BlockFinality::Final(ref block) => block - .transactions + .transactions() + .ok_or_else(|| anyhow!("Block transactions not available"))? .iter() - .filter(|transaction| transaction_hashes.contains(&transaction.hash)) + .filter(|transaction| transaction_hashes.contains(&transaction.tx_hash())) .collect(), BlockFinality::NonFinal(_block_with_calls) => { unreachable!( @@ -2170,21 +2101,21 @@ async fn filter_call_triggers_from_unsuccessful_transactions( // We'll also need the receipts for those transactions. In this step we collect all receipts // we have in store for the current block. - let mut receipts = chain_store - .transaction_receipts_in_block(&block.ptr().hash_as_h256()) + let mut receipts: BTreeMap = chain_store + .transaction_receipts_in_block(&block.ptr().hash.as_b256()) .await? .into_iter() .map(|receipt| (receipt.transaction_hash, receipt)) - .collect::>(); + .collect::>(); // Do we have a receipt for each transaction under analysis? - let mut receipts_and_transactions: Vec<(&Transaction, LightTransactionReceipt)> = Vec::new(); - let mut transactions_without_receipt: Vec<&Transaction> = Vec::new(); + let mut receipts_and_transactions: Vec<(&AnyTransaction, LightTransactionReceipt)> = Vec::new(); + let mut transactions_without_receipt: Vec<&AnyTransaction> = Vec::new(); for transaction in transactions.iter() { - if let Some(receipt) = receipts.remove(&transaction.hash) { - receipts_and_transactions.push((transaction, receipt)); + if let Some(receipt) = receipts.remove(&transaction.tx_hash()) { + receipts_and_transactions.push((*transaction, receipt)); } else { - transactions_without_receipt.push(transaction); + transactions_without_receipt.push(*transaction); } } @@ -2192,7 +2123,7 @@ async fn filter_call_triggers_from_unsuccessful_transactions( let futures = transactions_without_receipt .iter() .map(|transaction| async move { - fetch_receipt_from_ethereum_client(eth, &transaction.hash) + fetch_receipt_from_ethereum_client(eth, transaction.tx_hash()) .await .map(|receipt| (transaction, receipt)) }); @@ -2207,12 +2138,9 @@ async fn filter_call_triggers_from_unsuccessful_transactions( // additional Ethereum API calls for future scans on this block. // With all transactions and receipts in hand, we can evaluate the success of each transaction - let mut transaction_success: BTreeMap<&H256, bool> = BTreeMap::new(); + let mut transaction_success: BTreeMap = BTreeMap::new(); for (transaction, receipt) in receipts_and_transactions.into_iter() { - transaction_success.insert( - &transaction.hash, - evaluate_transaction_status(receipt.status), - ); + transaction_success.insert(transaction.tx_hash(), receipt.status); } // Confidence check: Did we inspect the status of all transactions? @@ -2254,11 +2182,11 @@ async fn filter_call_triggers_from_unsuccessful_transactions( /// Deprecated. Wraps the [`fetch_transaction_receipts_in_batch`] in a retry loop. async fn fetch_transaction_receipts_in_batch_with_retry( - web3: Arc>, - hashes: Vec, - block_hash: H256, + alloy: Arc, + hashes: Vec, + block_hash: B256, logger: Logger, -) -> Result>, IngestorError> { +) -> Result>, IngestorError> { let retry_log_message = format!( "batch eth_getTransactionReceipt RPC call for block {:?}", block_hash @@ -2269,51 +2197,84 @@ async fn fetch_transaction_receipts_in_batch_with_retry( .no_logging() .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) .run(move || { - let web3 = web3.cheap_clone(); + let alloy = alloy.cheap_clone(); let hashes = hashes.clone(); let logger = logger.cheap_clone(); - fetch_transaction_receipts_in_batch(web3, hashes, block_hash, logger).boxed() + fetch_transaction_receipts_in_batch(alloy, hashes, block_hash, logger).boxed() }) .await .map_err(|_timeout| anyhow!(block_hash).into()) } -/// Deprecated. Attempts to fetch multiple transaction receipts in a batching contex. +/// Deprecated. Attempts to fetch multiple transaction receipts in a batching context. async fn fetch_transaction_receipts_in_batch( - web3: Arc>, - hashes: Vec, - block_hash: H256, + alloy: Arc, + hashes: Vec, + block_hash: B256, logger: Logger, -) -> Result>, IngestorError> { - let batching_web3 = Web3::new(Batch::new(web3.transport().clone())); - let eth = batching_web3.eth(); - let receipt_futures = hashes - .into_iter() - .map(move |hash| { - let logger = logger.cheap_clone(); - eth.transaction_receipt(hash) - .map_err(IngestorError::from) - .and_then(move |some_receipt| async move { - resolve_transaction_receipt(some_receipt, hash, block_hash, logger) - }) - }) - .collect::>(); +) -> Result>, IngestorError> { + // Use the batch method to get all receipts at once + let receipts = batch_get_transaction_receipts(alloy, hashes.clone()) + .await + .map_err(|e| { + IngestorError::Unknown(anyhow::anyhow!("Batch receipt fetch failed: {}", e)) + })?; + + let mut result = Vec::new(); + for (receipt, hash) in receipts.into_iter().zip(hashes.iter()) { + if let Some(receipt) = receipt { + let validated_receipt = resolve_transaction_receipt( + Some(receipt), + *hash, + block_hash, + logger.cheap_clone(), + )?; + result.push(Arc::new(validated_receipt)); + } else { + return Err(IngestorError::ReceiptUnavailable(block_hash, *hash)); + } + } - batching_web3.transport().submit_batch().await?; + Ok(result) +} - let mut collected = vec![]; - for receipt in receipt_futures.into_iter() { - collected.push(Arc::new(receipt.await?)) +async fn batch_get_transaction_receipts( + provider: Arc, + tx_hashes: Vec, +) -> Result>, Box> { + let mut batch = alloy::rpc::client::BatchRequest::new(provider.client()); + let mut receipt_futures = Vec::new(); + + // Add all receipt requests to batch + for tx_hash in &tx_hashes { + let receipt_future = batch + .add_call::<(B256,), Option>( + "eth_getTransactionReceipt", + &(*tx_hash,), + )?; + receipt_futures.push(receipt_future); } - Ok(collected) + + // Execute batch + batch.send().await?; + + // Collect results in order + let mut results = Vec::new(); + for receipt_future in receipt_futures { + let receipt = receipt_future.await?; + results.push(receipt); + } + + Ok(results) } pub(crate) async fn check_block_receipt_support( - web3: Arc>, - block_hash: H256, + alloy: Arc, + block_hash: B256, supports_eip_1898: bool, call_only: bool, ) -> Result<(), Error> { + use alloy::rpc::types::BlockId; if call_only { return Err(anyhow!("Provider is call-only")); } @@ -2323,7 +2284,7 @@ pub(crate) async fn check_block_receipt_support( } // Fetch block receipts from the provider for the latest block. - let block_receipts_result = web3.eth().block_receipts(BlockId::Hash(block_hash)).await; + let block_receipts_result = alloy.get_block_receipts(BlockId::from(block_hash)).await; // Determine if the provider supports block receipts based on the fetched result. match block_receipts_result { @@ -2337,27 +2298,27 @@ pub(crate) async fn check_block_receipt_support( // based on whether block receipts are supported or individual transaction receipts // need to be fetched. async fn fetch_receipts_with_retry( - web3: Arc>, - hashes: Vec, - block_hash: H256, + alloy: Arc, + hashes: Vec, + block_hash: B256, logger: Logger, supports_block_receipts: bool, -) -> Result>, IngestorError> { +) -> Result>, IngestorError> { if supports_block_receipts { - return fetch_block_receipts_with_retry(web3, hashes, block_hash, logger).await; + return fetch_block_receipts_with_retry(alloy, hashes, block_hash, logger).await; } - fetch_individual_receipts_with_retry(web3, hashes, block_hash, logger).await + fetch_individual_receipts_with_retry(alloy, hashes, block_hash, logger).await } // Fetches receipts for each transaction in the block individually. async fn fetch_individual_receipts_with_retry( - web3: Arc>, - hashes: Vec, - block_hash: H256, + alloy: Arc, + hashes: Vec, + block_hash: B256, logger: Logger, -) -> Result>, IngestorError> { +) -> Result>, IngestorError> { if ENV_VARS.fetch_receipts_in_batches { - return fetch_transaction_receipts_in_batch_with_retry(web3, hashes, block_hash, logger) + return fetch_transaction_receipts_in_batch_with_retry(alloy, hashes, block_hash, logger) .await; } @@ -2366,7 +2327,7 @@ async fn fetch_individual_receipts_with_retry( let receipt_stream = hash_stream .map(move |tx_hash| { fetch_transaction_receipt_with_retry( - web3.cheap_clone(), + alloy.cheap_clone(), tx_hash, block_hash, logger.cheap_clone(), @@ -2374,19 +2335,20 @@ async fn fetch_individual_receipts_with_retry( }) .buffered(ENV_VARS.block_ingestor_max_concurrent_json_rpc_calls); - tokio_stream::StreamExt::collect::>, IngestorError>>( - receipt_stream, - ) + tokio_stream::StreamExt::collect::< + Result>, IngestorError>, + >(receipt_stream) .await } /// Fetches transaction receipts of all transactions in a block with `eth_getBlockReceipts` call. async fn fetch_block_receipts_with_retry( - web3: Arc>, - hashes: Vec, - block_hash: H256, + alloy: Arc, + hashes: Vec, + block_hash: B256, logger: Logger, -) -> Result>, IngestorError> { +) -> Result>, IngestorError> { + use graph::prelude::alloy::rpc::types::BlockId; let logger = logger.cheap_clone(); let retry_log_message = format!("eth_getBlockReceipts RPC call for block {:?}", block_hash); @@ -2395,7 +2357,7 @@ async fn fetch_block_receipts_with_retry( .redact_log_urls(true) .limit(ENV_VARS.request_retries) .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) - .run(move || web3.eth().block_receipts(BlockId::Hash(block_hash)).boxed()) + .run(move || alloy.get_block_receipts(BlockId::from(block_hash)).boxed()) .await .map_err(|_timeout| -> IngestorError { anyhow!(block_hash).into() })?; @@ -2424,23 +2386,27 @@ async fn fetch_block_receipts_with_retry( } } -/// Retries fetching a single transaction receipt. +/// Retries fetching a single transaction receipt using alloy, then converts to web3 format. async fn fetch_transaction_receipt_with_retry( - web3: Arc>, - transaction_hash: H256, - block_hash: H256, + alloy: Arc, + transaction_hash: B256, + block_hash: B256, logger: Logger, -) -> Result, IngestorError> { +) -> Result, IngestorError> { let logger = logger.cheap_clone(); let retry_log_message = format!( "eth_getTransactionReceipt RPC call for transaction {:?}", transaction_hash ); + retry(retry_log_message, &logger) .redact_log_urls(true) .limit(ENV_VARS.request_retries) .timeout_secs(ENV_VARS.json_rpc_timeout.as_secs()) - .run(move || web3.eth().transaction_receipt(transaction_hash).boxed()) + .run(move || { + let alloy_clone = alloy.clone(); + async move { alloy_clone.get_transaction_receipt(transaction_hash).await }.boxed() + }) .await .map_err(|_timeout| anyhow!(block_hash).into()) .and_then(move |some_receipt| { @@ -2450,11 +2416,11 @@ async fn fetch_transaction_receipt_with_retry( } fn resolve_transaction_receipt( - transaction_receipt: Option, - transaction_hash: H256, - block_hash: H256, + transaction_receipt: Option, + transaction_hash: B256, + block_hash: B256, logger: Logger, -) -> Result { +) -> Result { match transaction_receipt { // A receipt might be missing because the block was uncled, and the transaction never // made it back into the main chain. @@ -2527,12 +2493,16 @@ async fn get_logs_and_transactions( // Not all logs have associated transaction hashes, nor do all triggers require them. // We also restrict receipts retrieval for some api versions. - let transaction_hashes_by_block: HashMap> = logs + let transaction_hashes_by_block: HashMap> = logs .iter() .filter(|_| unified_api_version.equal_or_greater_than(&API_VERSION_0_0_7)) .filter(|log| { - if let Some(signature) = log.topics.first() { - log_filter.requires_transaction_receipt(signature, Some(&log.address), &log.topics) + if let Some(signature) = log.topics().first() { + log_filter.requires_transaction_receipt( + signature, + Some(&log.address()), + log.topics(), + ) } else { false } @@ -2547,7 +2517,7 @@ async fn get_logs_and_transactions( } }) .fold( - HashMap::>::new(), + HashMap::>::new(), |mut acc, (block_hash, txn_hash)| { acc.entry(block_hash).or_default().insert(txn_hash); acc @@ -2569,6 +2539,7 @@ async fn get_logs_and_transactions( let optional_receipt = log .transaction_hash .and_then(|txn| transaction_receipts_by_hash.get(&txn).cloned()); + let value = EthereumTrigger::Log(LogRef::FullLog(Arc::new(log), optional_receipt)); log_triggers.push(value); } @@ -2579,13 +2550,14 @@ async fn get_logs_and_transactions( /// Tries to retrive all transaction receipts for a set of transaction hashes. async fn get_transaction_receipts_for_transaction_hashes( adapter: &EthereumAdapter, - transaction_hashes_by_block: &HashMap>, + transaction_hashes_by_block: &HashMap>, subgraph_metrics: Arc, logger: Logger, -) -> Result>, anyhow::Error> { +) -> Result>, anyhow::Error> { use std::collections::hash_map::Entry::Vacant; - let mut receipts_by_hash: HashMap> = HashMap::new(); + let mut receipts_by_hash: HashMap> = + HashMap::new(); // Return early if input set is empty if transaction_hashes_by_block.is_empty() { @@ -2594,17 +2566,17 @@ async fn get_transaction_receipts_for_transaction_hashes( // Keep a record of all unique transaction hashes for which we'll request receipts. We will // later use this to check if we have collected the receipts from all required transactions. - let mut unique_transaction_hashes: HashSet<&H256> = HashSet::new(); + let mut unique_transaction_hashes: HashSet<&B256> = HashSet::new(); // Request transaction receipts concurrently let receipt_futures = FuturesUnordered::new(); - let web3 = Arc::clone(&adapter.web3); + let alloy = Arc::clone(&adapter.alloy); for (block_hash, transaction_hashes) in transaction_hashes_by_block { for transaction_hash in transaction_hashes { unique_transaction_hashes.insert(transaction_hash); let receipt_future = fetch_transaction_receipt_with_retry( - web3.cheap_clone(), + alloy.cheap_clone(), *transaction_hash, *block_hash, logger.cheap_clone(), @@ -2666,11 +2638,12 @@ mod tests { EthereumBlockWithCalls, }; use graph::blockchain::BlockPtr; - use graph::prelude::ethabi::ethereum_types::U64; - use graph::prelude::web3::transports::test::TestTransport; - use graph::prelude::web3::types::{Address, Block, Bytes, H256}; - use graph::prelude::web3::Web3; - use graph::prelude::EthereumCall; + use graph::components::ethereum::AnyBlock; + use graph::prelude::alloy::network::AnyNetwork; + use graph::prelude::alloy::primitives::{Address, Bytes, B256}; + use graph::prelude::alloy::providers::mock::Asserter; + use graph::prelude::alloy::providers::ProviderBuilder; + use graph::prelude::{create_minimal_block_for_test, EthereumCall, LightEthereumBlock}; use jsonrpc_core::serde_json::{self, Value}; use std::collections::HashSet; use std::iter::FromIterator; @@ -2678,13 +2651,11 @@ mod tests { #[test] fn parse_block_triggers_every_block() { + let block = create_minimal_block_for_test(2, hash(2)); + let block = EthereumBlockWithCalls { ethereum_block: EthereumBlock { - block: Arc::new(Block { - hash: Some(hash(2)), - number: Some(U64::from(2)), - ..Default::default() - }), + block: Arc::new(LightEthereumBlock::new(AnyBlock::from(block))), ..Default::default() }, calls: Some(vec![EthereumCall { @@ -2716,8 +2687,6 @@ mod tests { #[graph::test] async fn test_check_block_receipts_support() { - let mut transport = TestTransport::default(); - let json_receipts = r#"[{ "blockHash": "0x23f785604642e91613881fc3c9d16740ee416e340fd36f3fa2239f203d68fd33", "blockNumber": "0x12f7f81", @@ -2739,23 +2708,24 @@ mod tests { // Helper function to run a single test case async fn run_test_case( - transport: &mut TestTransport, json_response: &str, expected_err: Option<&str>, supports_eip_1898: bool, call_only: bool, ) -> Result<(), anyhow::Error> { let json_value: Value = serde_json::from_str(json_response).unwrap(); - // let block_json: Value = serde_json::from_str(block).unwrap(); - transport.set_response(json_value); - // transport.set_response(block_json); - // transport.add_response(json_value); - #[allow(clippy::arc_with_non_send_sync)] - let web3 = Arc::new(Web3::new(transport.clone())); + let asserter = Asserter::new(); + let provider = ProviderBuilder::<_, _, AnyNetwork>::default() + .network::() + .with_recommended_fillers() + .connect_mocked_client(asserter.clone()); + + asserter.push_success(&json_value); + let result = check_block_receipt_support( - web3.clone(), - H256::zero(), + Arc::new(provider), + B256::ZERO, supports_eip_1898, call_only, ) @@ -2780,38 +2750,25 @@ mod tests { } // Test case 1: Valid block receipts - run_test_case(&mut transport, json_receipts, None, true, false) + run_test_case(json_receipts, None, true, false) .await .unwrap(); // Test case 2: Empty block receipts - run_test_case( - &mut transport, - json_empty, - Some("Block receipts are empty"), - true, - false, - ) - .await - .unwrap(); + run_test_case(json_empty, Some("Block receipts are empty"), true, false) + .await + .unwrap(); // Test case 3: Null response - run_test_case( - &mut transport, - "null", - Some("Block receipts are empty"), - true, - false, - ) - .await - .unwrap(); + run_test_case("null", Some("Block receipts are empty"), true, false) + .await + .unwrap(); // Test case 3: Simulating an RPC error // Note: In the context of this test, we cannot directly simulate an RPC error. // Instead, we simulate a response that would cause a decoding error, such as an unexpected key("error"). // The function should handle this as an error case. run_test_case( - &mut transport, r#"{"error":"RPC Error"}"#, Some("Error fetching block receipts:"), true, @@ -2822,7 +2779,6 @@ mod tests { // Test case 5: Does not support EIP-1898 run_test_case( - &mut transport, json_receipts, Some("Provider does not support EIP 1898"), false, @@ -2832,26 +2788,19 @@ mod tests { .unwrap(); // Test case 5: Does not support Call only adapters - run_test_case( - &mut transport, - json_receipts, - Some("Provider is call-only"), - true, - true, - ) - .await - .unwrap(); + run_test_case(json_receipts, Some("Provider is call-only"), true, true) + .await + .unwrap(); } #[test] fn parse_block_triggers_specific_call_not_found() { + let block = create_minimal_block_for_test(2, hash(2)); + + #[allow(unreachable_code)] let block = EthereumBlockWithCalls { ethereum_block: EthereumBlock { - block: Arc::new(Block { - hash: Some(hash(2)), - number: Some(U64::from(2)), - ..Default::default() - }), + block: Arc::new(LightEthereumBlock::new(AnyBlock::from(block))), ..Default::default() }, calls: Some(vec![EthereumCall { @@ -2877,13 +2826,12 @@ mod tests { #[test] fn parse_block_triggers_specific_call_found() { + let block = create_minimal_block_for_test(2, hash(2)); + + #[allow(unreachable_code)] let block = EthereumBlockWithCalls { ethereum_block: EthereumBlock { - block: Arc::new(Block { - hash: Some(hash(2)), - number: Some(U64::from(2)), - ..Default::default() - }), + block: Arc::new(LightEthereumBlock::new(AnyBlock::from(block))), ..Default::default() }, calls: Some(vec![EthereumCall { @@ -2911,11 +2859,11 @@ mod tests { } fn address(id: u64) -> Address { - Address::from_low_u64_be(id) + Address::left_padding_from(&id.to_be_bytes()) } - fn hash(id: u8) -> H256 { - H256::from([id; 32]) + fn hash(id: u8) -> B256 { + B256::from_slice(&[id; 32]) } fn bytes(value: Vec) -> Bytes { diff --git a/chain/ethereum/src/ingestor.rs b/chain/ethereum/src/ingestor.rs index e7821248d90..9e7a8c01725 100644 --- a/chain/ethereum/src/ingestor.rs +++ b/chain/ethereum/src/ingestor.rs @@ -4,14 +4,14 @@ use async_trait::async_trait; use graph::blockchain::client::ChainClient; use graph::blockchain::BlockchainKind; use graph::components::network_provider::ChainName; +use graph::prelude::alloy::primitives::B256; use graph::slog::o; use graph::util::backoff::ExponentialBackoff; use graph::{ blockchain::{BlockHash, BlockIngestor, BlockPtr, IngestorError}, cheap_clone::CheapClone, prelude::{ - error, ethabi::ethereum_types::H256, info, tokio, trace, warn, ChainStore, Error, - EthereumBlockWithCalls, LogCode, Logger, + error, info, tokio, trace, warn, ChainStore, Error, EthereumBlockWithCalls, LogCode, Logger, }, }; use std::{sync::Arc, time::Duration}; @@ -173,8 +173,7 @@ impl PollingBlockIngestor { eth_adapter: &Arc, block_hash: &BlockHash, ) -> Result, IngestorError> { - // TODO: H256::from_slice can panic - let block_hash = H256::from_slice(block_hash.as_slice()); + let block_hash = B256::from_slice(block_hash.as_slice()); // Get the fully populated block let block = eth_adapter @@ -211,10 +210,7 @@ impl PollingBlockIngestor { logger: &Logger, eth_adapter: &Arc, ) -> Result { - eth_adapter - .latest_block_header(logger) - .await - .map(|block| block.into()) + eth_adapter.latest_block_ptr(logger).await } async fn eth_adapter(&self) -> anyhow::Result> { diff --git a/chain/ethereum/src/lib.rs b/chain/ethereum/src/lib.rs index fa76f70d799..8850764d63b 100644 --- a/chain/ethereum/src/lib.rs +++ b/chain/ethereum/src/lib.rs @@ -1,5 +1,6 @@ mod adapter; mod buffered_call_cache; +mod call_helper; mod capabilities; pub mod codec; mod data_source; diff --git a/chain/ethereum/src/protobuf/sf.ethereum.r#type.v2.rs b/chain/ethereum/src/protobuf/sf.ethereum.r#type.v2.rs index 4ab8d0a1324..bcb068083df 100644 --- a/chain/ethereum/src/protobuf/sf.ethereum.r#type.v2.rs +++ b/chain/ethereum/src/protobuf/sf.ethereum.r#type.v2.rs @@ -1,14 +1,60 @@ // This file is @generated by prost-build. +/// Block is the representation of the tracing of a block in the Ethereum +/// blockchain. A block is a collection of \[TransactionTrace\] that are grouped +/// together and processed as an atomic unit. Each \[TransactionTrace\] is composed +/// of a series of \[Call\] (a.k.a internal transactions) and there is also at +/// least one call per transaction a.k.a the root call which essentially has the +/// same parameters as the transaction itself (e.g. `from`, `to`, `gas`, `value`, +/// etc.). +/// +/// The exact tracing method used to build the block must be checked against +/// \[DetailLevel\] field. There is two levels of details available, `BASE` and +/// `EXTENDED`. The `BASE` level has been extracted using archive node RPC calls +/// and will contain only the block header, transaction receipts and event logs. +/// Refers to the Firehose service provider to know which blocks are offered on +/// each network. +/// +/// The `EXTENDED` level has been extracted using the Firehose tracer and all +/// fields are available in this Protobuf. +/// +/// The Ethereum block model is used across many chains which means that it +/// happen that certain fields are not available in one chain but are available +/// in another. Each field should be documented when necesssary if it's available +/// on a subset of chains. +/// +/// One major concept to get about the Block is the concept of 'ordinal'. The +/// ordinal is a number that is used to globally order every element of execution +/// that happened throughout the processing of the block like +/// \[TransactionTracer\], \[Call\], \[Log\], \[BalanceChange\], \[StateChange\], etc. +/// Element that have a start and end interval, \[Transaction\] and \[Call\], will +/// have two ordinals: `begin_ordinal` and `end_ordinal`. Element that are +/// executed as "point in time" \[Log\], \[BalanceChange\], \[StateChange\], etc. will +/// have only one ordinal named `ordinal`. If you take all of the message in the +/// Block that have an 'ordinal' field in an array and you sort each element +/// against the `ordinal` field, you will get the exact order of execution of +/// each element in the block. +/// +/// All the 'ordinal' fields in a block are globally unique for the given block, +/// it is **not** a chain-wide global ordering. Furthermore, caution must be take +/// with reverted elements due to execution failure. For anything attached to a +/// \[Call\] that has a `state_reverted` field set to `true`, the `ordinal` field +/// is not reliable and should not be used to order the element against other +/// elements in the block as those element might have 0 as the ordinal. Only +/// successful calls have a reliable `ordinal` field. #[derive(Clone, PartialEq, ::prost::Message)] pub struct Block { - #[prost(int32, tag = "1")] - pub ver: i32, + /// Hash is the block's hash. #[prost(bytes = "vec", tag = "2")] pub hash: ::prost::alloc::vec::Vec, + /// Number is the block's height at which this block was mined. #[prost(uint64, tag = "3")] pub number: u64, + /// Size is the size in bytes of the RLP encoding of the block according to Ethereum + /// rules. #[prost(uint64, tag = "4")] pub size: u64, + /// Header contain's the block's header information like its parent hash, the merkel root hash + /// and all other information the form a block. #[prost(message, optional, tag = "5")] pub header: ::core::option::Option, /// Uncles represents block produced with a valid solution but were not actually chosen @@ -18,54 +64,110 @@ pub struct Block { /// field will actually be always empty. #[prost(message, repeated, tag = "6")] pub uncles: ::prost::alloc::vec::Vec, + /// TransactionTraces hold the execute trace of all the transactions that were executed + /// in this block. In in there that you will find most of the Ethereum data model. + /// + /// They are ordered by the order of execution of the transaction in the block. #[prost(message, repeated, tag = "10")] pub transaction_traces: ::prost::alloc::vec::Vec, + /// BalanceChanges here is the array of ETH transfer that happened at the block level + /// outside of the normal transaction flow of a block. The best example of this is mining + /// reward for the block mined, the transfer of ETH to the miner happens outside the normal + /// transaction flow of the chain and is recorded as a `BalanceChange` here since we cannot + /// attached it to any transaction. + /// + /// Only available in DetailLevel: EXTENDED #[prost(message, repeated, tag = "11")] pub balance_changes: ::prost::alloc::vec::Vec, + /// DetailLevel affects the data available in this block. + /// + /// ## DetailLevel_EXTENDED + /// + /// Describes the most complete block, with traces, balance changes, storage + /// changes. It is extracted during the execution of the block. + /// + /// ## DetailLevel_BASE + /// + /// Describes a block that contains only the block header, transaction receipts + /// and event logs: everything that can be extracted using the base JSON-RPC + /// interface + /// () + /// Furthermore, the eth_getTransactionReceipt call has been avoided because it + /// brings only minimal improvements at the cost of requiring an archive node + /// or a full node with complete transaction index. + #[prost(enumeration = "block::DetailLevel", tag = "12")] + pub detail_level: i32, + /// CodeChanges here is the array of smart code change that happened that happened at the block level + /// outside of the normal transaction flow of a block. Some Ethereum's fork like BSC and Polygon + /// has some capabilities to upgrade internal smart contracts used usually to track the validator + /// list. + /// + /// On hard fork, some procedure runs to upgrade the smart contract code to a new version. In those + /// network, a `CodeChange` for each modified smart contract on upgrade would be present here. Note + /// that this happen rarely, so the vast majority of block will have an empty list here. + /// + /// Only available in DetailLevel: EXTENDED #[prost(message, repeated, tag = "20")] pub code_changes: ::prost::alloc::vec::Vec, + /// System calls are introduced in Cancun, along with blobs. They are executed outside of transactions but affect the state. + /// + /// Only available in DetailLevel: EXTENDED + #[prost(message, repeated, tag = "21")] + pub system_calls: ::prost::alloc::vec::Vec, + /// Withdrawals represents the list of validator balance withdrawals processed in this block. + /// Introduced in the Shanghai hard fork (EIP-4895). + /// + /// This field has been added because Geth blocks include withdrawals after Shanghai fork, + /// but our previous Firehose model didn't capture this data. Currently experimental - + /// NOT ready for production use yet as we validate the tracing implementation. + /// + /// Only available when Shanghai fork is active on the chain. + #[prost(message, repeated, tag = "22")] + pub withdrawals: ::prost::alloc::vec::Vec, + /// Ver represents that data model version of the block, it is used internally by Firehose on Ethereum + /// as a validation that we are reading the correct version. + #[prost(int32, tag = "1")] + pub ver: i32, } -/// HeaderOnlyBlock is used to optimally unpack the \[Block\] structure (note the -/// corresponding message number for the `header` field) while consuming less -/// memory, when only the `header` is desired. -/// -/// WARN: this is a client-side optimization pattern and should be moved in the -/// consuming code. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct HeaderOnlyBlock { - #[prost(message, optional, tag = "5")] - pub header: ::core::option::Option, -} -/// BlockWithRefs is a lightweight block, with traces and transactions -/// purged from the `block` within, and only. It is used in transports -/// to pass block data around. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct BlockWithRefs { - #[prost(string, tag = "1")] - pub id: ::prost::alloc::string::String, - #[prost(message, optional, tag = "2")] - pub block: ::core::option::Option, - #[prost(message, optional, tag = "3")] - pub transaction_trace_refs: ::core::option::Option, - #[prost(bool, tag = "4")] - pub irreversible: bool, -} -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct TransactionRefs { - #[prost(bytes = "vec", repeated, tag = "1")] - pub hashes: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec>, -} -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct UnclesHeaders { - #[prost(message, repeated, tag = "1")] - pub uncles: ::prost::alloc::vec::Vec, -} -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct BlockRef { - #[prost(bytes = "vec", tag = "1")] - pub hash: ::prost::alloc::vec::Vec, - #[prost(uint64, tag = "2")] - pub number: u64, +/// Nested message and enum types in `Block`. +pub mod block { + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum DetailLevel { + DetaillevelExtended = 0, + /// DETAILLEVEL_TRACE = 1; // TBD + DetaillevelBase = 2, + } + impl DetailLevel { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::DetaillevelExtended => "DETAILLEVEL_EXTENDED", + Self::DetaillevelBase => "DETAILLEVEL_BASE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "DETAILLEVEL_EXTENDED" => Some(Self::DetaillevelExtended), + "DETAILLEVEL_BASE" => Some(Self::DetaillevelBase), + _ => None, + } + } + } } #[derive(Clone, PartialEq, ::prost::Message)] pub struct BlockHeader { @@ -94,12 +196,10 @@ pub struct BlockHeader { /// consensus algorithm, this field will actually be constant and set to `0x00`. #[prost(message, optional, tag = "8")] pub difficulty: ::core::option::Option, - /// TotalDifficulty is the sum of all previous blocks difficulty including this block difficulty. + /// TotalDifficulty used to be the sum of all previous blocks difficulty including this block difficulty. /// - /// If the Block containing this `BlockHeader` has been produced using the Proof of Stake - /// consensus algorithm, this field will actually be constant and set to the terminal total difficulty - /// that was required to transition to Proof of Stake algorithm, which varies per network. It is set to - /// 58 750 000 000 000 000 000 000 on Ethereum Mainnet and to 10 790 000 on Ethereum Testnet Goerli. + /// It has been deprecated in geth v1.15.0 but was already removed from the JSON-RPC interface for a while + #[deprecated] #[prost(message, optional, tag = "17")] pub total_difficulty: ::core::option::Option, #[prost(uint64, tag = "9")] @@ -147,7 +247,12 @@ pub struct BlockHeader { /// extra_data, /// mix_hash, /// nonce, - /// base_fee_per_gas + /// base_fee_per_gas (to be included only if London fork is active) + /// withdrawals_root (to be included only if Shangai fork is active) + /// blob_gas_used (to be included only if Cancun fork is active) + /// excess_blob_gas (to be included only if Cancun fork is active) + /// parent_beacon_root (to be included only if Cancun fork is active) + /// requests_hash (to be included only if Prague fork is active) /// ])) /// #[prost(bytes = "vec", tag = "16")] @@ -155,12 +260,74 @@ pub struct BlockHeader { /// Base fee per gas according to EIP-1559 (e.g. London Fork) rules, only set if London is present/active on the chain. #[prost(message, optional, tag = "18")] pub base_fee_per_gas: ::core::option::Option, + /// Withdrawals root hash according to EIP-4895 (e.g. Shangai Fork) rules, only set if Shangai is present/active on the chain. + /// + /// Only available in DetailLevel: EXTENDED + #[prost(bytes = "vec", tag = "19")] + pub withdrawals_root: ::prost::alloc::vec::Vec, + /// TxDependency is list of transaction indexes that are dependent on each other in the block + /// header. This is metadata only that was used by the internal Polygon parallel execution engine. + /// + /// This field was available in a few versions on Polygon Mainnet and Polygon Mumbai chains. It was actually + /// removed and is not populated anymore. It's now embedded in the `extraData` field, refer to Polygon source + /// code to determine how to extract it if you need it. + /// + /// Only available in DetailLevel: EXTENDED + #[prost(message, optional, tag = "20")] + pub tx_dependency: ::core::option::Option, + /// BlobGasUsed was added by EIP-4844 and is ignored in legacy headers. + #[prost(uint64, optional, tag = "22")] + pub blob_gas_used: ::core::option::Option, + /// ExcessBlobGas was added by EIP-4844 and is ignored in legacy headers. + #[prost(uint64, optional, tag = "23")] + pub excess_blob_gas: ::core::option::Option, + /// ParentBeaconRoot was added by EIP-4788 and is ignored in legacy headers. + #[prost(bytes = "vec", tag = "24")] + pub parent_beacon_root: ::prost::alloc::vec::Vec, + /// RequestsHash was added by EIP-7685 and is ignored in legacy headers. + #[prost(bytes = "vec", tag = "25")] + pub requests_hash: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Uint64NestedArray { + #[prost(message, repeated, tag = "1")] + pub val: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Uint64Array { + #[prost(uint64, repeated, tag = "1")] + pub val: ::prost::alloc::vec::Vec, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct BigInt { #[prost(bytes = "vec", tag = "1")] pub bytes: ::prost::alloc::vec::Vec, } +/// TransactionTrace is full trace of execution of the transaction when the +/// it actually executed on chain. +/// +/// It contains all the transaction details like `from`, `to`, `gas`, etc. +/// as well as all the internal calls that were made during the transaction. +/// +/// The `calls` vector contains Call objects which have balance changes, events +/// storage changes, etc. +/// +/// If ordering is important between elements, almost each message like `Log`, +/// `Call`, `StorageChange`, etc. have an ordinal field that is represents "execution" +/// order of the said element against all other elements in this block. +/// +/// Due to how the call tree works doing "naively", looping through all calls then +/// through a Call's element like `logs` while not yielding the elements in the order +/// they were executed on chain. A log in call could have been done before or after +/// another in another call depending on the actual call tree. +/// +/// The `calls` are ordered by creation order and the call tree can be re-computing +/// using fields found in `Call` object (parent/child relationship). +/// +/// Another important thing to note is that even if a transaction succeed, some calls +/// within it could have been reverted internally, if this is important to you, you must +/// check the field `state_reverted` on the `Call` to determine if it was fully committed +/// to the chain or not. #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionTrace { /// consensus @@ -205,7 +372,7 @@ pub struct TransactionTrace { /// The value is always set even for transaction before Berlin fork because those before the fork are still legacy transactions. #[prost(enumeration = "transaction_trace::Type", tag = "12")] pub r#type: i32, - /// AcccessList represents the storage access this transaction has agreed to do in which case those storage + /// AccessList represents the storage access this transaction has agreed to do in which case those storage /// access cost less gas unit per access. /// /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_ACCESS_LIST || TRX_TYPE_DYNAMIC_FEE` which @@ -216,6 +383,8 @@ pub struct TransactionTrace { /// /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_DYNAMIC_FEE` which is possible only /// if London fork is active on the chain. + /// + /// Only available in DetailLevel: EXTENDED #[prost(message, optional, tag = "11")] pub max_fee_per_gas: ::core::option::Option, /// MaxPriorityFeePerGas is priority fee per gas the user to pay in extra to the miner on top of the block's @@ -223,6 +392,8 @@ pub struct TransactionTrace { /// /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_DYNAMIC_FEE` which is possible only /// if London fork is active on the chain. + /// + /// Only available in DetailLevel: EXTENDED #[prost(message, optional, tag = "13")] pub max_priority_fee_per_gas: ::core::option::Option, /// meta @@ -232,20 +403,97 @@ pub struct TransactionTrace { pub hash: ::prost::alloc::vec::Vec, #[prost(bytes = "vec", tag = "22")] pub from: ::prost::alloc::vec::Vec, + /// Only available in DetailLevel: EXTENDED + /// Known Issues + /// - Version 3: + /// Field not populated. It will be empty. + /// + /// Fixed in `Version 4`, see for information about block versions. #[prost(bytes = "vec", tag = "23")] pub return_data: ::prost::alloc::vec::Vec, + /// Only available in DetailLevel: EXTENDED #[prost(bytes = "vec", tag = "24")] pub public_key: ::prost::alloc::vec::Vec, + /// The block's global ordinal when the transaction started executing, refer to + /// \[Block\] documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "25")] pub begin_ordinal: u64, + /// The block's global ordinal when the transaction finished executing, refer to + /// \[Block\] documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "26")] pub end_ordinal: u64, + /// TransactionTraceStatus is the status of the transaction execution and will let you know if the transaction + /// was successful or not. + /// + /// ## Explanation relevant only for blocks with `DetailLevel: EXTENDED` + /// + /// A successful transaction has been recorded to the blockchain's state for calls in it that were successful. + /// This means it's possible only a subset of the calls were properly recorded, refer to \[calls[\].state_reverted] field + /// to determine which calls were reverted. + /// + /// A quirks of the Ethereum protocol is that a transaction `FAILED` or `REVERTED` still affects the blockchain's + /// state for **some** of the state changes. Indeed, in those cases, the transactions fees are still paid to the miner + /// which means there is a balance change for the transaction's emitter (e.g. `from`) to pay the gas fees, an optional + /// balance change for gas refunded to the transaction's emitter (e.g. `from`) and a balance change for the miner who + /// received the transaction fees. There is also a nonce change for the transaction's emitter (e.g. `from`). + /// + /// This means that to properly record the state changes for a transaction, you need to conditionally procees the + /// transaction's status. + /// + /// For a `SUCCEEDED` transaction, you iterate over the `calls` array and record the state changes for each call for + /// which `state_reverted == false` (if a transaction succeeded, the call at #0 will always `state_reverted == false` + /// because it aligns with the transaction). + /// + /// For a `FAILED` or `REVERTED` transaction, you iterate over the root call (e.g. at #0, will always exist) for + /// balance changes you process those where `reason` is either `REASON_GAS_BUY`, `REASON_GAS_REFUND` or + /// `REASON_REWARD_TRANSACTION_FEE` and for nonce change, still on the root call, you pick the nonce change which the + /// smallest ordinal (if more than one). #[prost(enumeration = "TransactionTraceStatus", tag = "30")] pub status: i32, #[prost(message, optional, tag = "31")] pub receipt: ::core::option::Option, + /// Only available in DetailLevel: EXTENDED #[prost(message, repeated, tag = "32")] pub calls: ::prost::alloc::vec::Vec, + /// BlobGas is the amount of gas the transaction is going to pay for the blobs, this is a computed value + /// equivalent to `self.blob_gas_fee_cap * len(self.blob_hashes)` and provided in the model for convenience. + /// + /// This is specified by + /// + /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + /// if Cancun fork is active on the chain. + #[prost(uint64, optional, tag = "33")] + pub blob_gas: ::core::option::Option, + /// BlobGasFeeCap is the maximum fee per data gas the user is willing to pay for the data gas used. + /// + /// This is specified by + /// + /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + /// if Cancun fork is active on the chain. + #[prost(message, optional, tag = "34")] + pub blob_gas_fee_cap: ::core::option::Option, + /// BlobHashes field represents a list of hash outputs from 'kzg_to_versioned_hash' which + /// essentially is a version byte + the sha256 hash of the blob commitment (e.g. + /// `BLOB_COMMITMENT_VERSION_KZG + sha256(commitment)\[1:\]`. + /// + /// This is specified by + /// + /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + /// if Cancun fork is active on the chain. + #[prost(bytes = "vec", repeated, tag = "35")] + pub blob_hashes: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec>, + /// SetCodeAuthorizations represents the authorizations of a transaction to set code to an EOA (Externally Owned Accounts) + /// as defined in EIP-7702. The list will contain all the authorizations as they were specified in the + /// transaction itself regardless of their validity. If you need to determined if a given authorization was + /// correctly applied on chain's state, refer to \[SetCodeAuthorization.discarded\] field that records + /// if the authorization was discarded or not by the chain due to invalidity. + /// + /// This is specified by + /// + /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_SET_CODE` which is possible only + /// if Prague fork is active on the chain. + #[prost(message, repeated, tag = "36")] + pub set_code_authorizations: ::prost::alloc::vec::Vec, } /// Nested message and enum types in `TransactionTrace`. pub mod transaction_trace { @@ -264,15 +512,38 @@ pub mod transaction_trace { pub enum Type { /// All transactions that ever existed prior Berlin fork before EIP-2718 was implemented. TrxTypeLegacy = 0, - /// Field that specifies an access list of contract/storage_keys that is going to be used + /// Transaction that specicy an access list of contract/storage_keys that is going to be used /// in this transaction. /// /// Added in Berlin fork (EIP-2930). TrxTypeAccessList = 1, - /// Transaction that specifies an access list just like TRX_TYPE_ACCESS_LIST but in addition defines the + /// Transaction that specifis an access list just like TRX_TYPE_ACCESS_LIST but in addition defines the /// max base gas gee and max priority gas fee to pay for this transaction. Transaction's of those type are /// executed against EIP-1559 rules which dictates a dynamic gas cost based on the congestion of the network. TrxTypeDynamicFee = 2, + /// Transaction which contain a large amount of data that cannot be accessed by EVM execution, but whose commitment + /// can be accessed. The format is intended to be fully compatible with the format that will be used in full sharding. + /// + /// Transaction that defines an access list just like TRX_TYPE_ACCESS_LIST and enables dynamic fee just like + /// TRX_TYPE_DYNAMIC_FEE but in addition defines the fields 'max_fee_per_data_gas' of type 'uint256' and the fields + /// 'blob_versioned_hashes' which represents a list of hash outputs from 'kzg_to_versioned_hash'. + /// + /// Activated in Cancun fork (EIP-4844) + TrxTypeBlob = 3, + /// Transaction that sets code to an EOA (Externally Owned Accounts) + /// + /// Activated in Prague (EIP-7702) + TrxTypeSetCode = 4, + /// Arbitrum-specific transactions + TrxTypeArbitrumDeposit = 100, + TrxTypeArbitrumUnsigned = 101, + TrxTypeArbitrumContract = 102, + TrxTypeArbitrumRetry = 104, + TrxTypeArbitrumSubmitRetryable = 105, + TrxTypeArbitrumInternal = 106, + TrxTypeArbitrumLegacy = 120, + /// OPTIMISM-specific transactions + TrxTypeOptimismDeposit = 126, } impl Type { /// String value of the enum field names used in the ProtoBuf definition. @@ -284,6 +555,18 @@ pub mod transaction_trace { Self::TrxTypeLegacy => "TRX_TYPE_LEGACY", Self::TrxTypeAccessList => "TRX_TYPE_ACCESS_LIST", Self::TrxTypeDynamicFee => "TRX_TYPE_DYNAMIC_FEE", + Self::TrxTypeBlob => "TRX_TYPE_BLOB", + Self::TrxTypeSetCode => "TRX_TYPE_SET_CODE", + Self::TrxTypeArbitrumDeposit => "TRX_TYPE_ARBITRUM_DEPOSIT", + Self::TrxTypeArbitrumUnsigned => "TRX_TYPE_ARBITRUM_UNSIGNED", + Self::TrxTypeArbitrumContract => "TRX_TYPE_ARBITRUM_CONTRACT", + Self::TrxTypeArbitrumRetry => "TRX_TYPE_ARBITRUM_RETRY", + Self::TrxTypeArbitrumSubmitRetryable => { + "TRX_TYPE_ARBITRUM_SUBMIT_RETRYABLE" + } + Self::TrxTypeArbitrumInternal => "TRX_TYPE_ARBITRUM_INTERNAL", + Self::TrxTypeArbitrumLegacy => "TRX_TYPE_ARBITRUM_LEGACY", + Self::TrxTypeOptimismDeposit => "TRX_TYPE_OPTIMISM_DEPOSIT", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -292,6 +575,18 @@ pub mod transaction_trace { "TRX_TYPE_LEGACY" => Some(Self::TrxTypeLegacy), "TRX_TYPE_ACCESS_LIST" => Some(Self::TrxTypeAccessList), "TRX_TYPE_DYNAMIC_FEE" => Some(Self::TrxTypeDynamicFee), + "TRX_TYPE_BLOB" => Some(Self::TrxTypeBlob), + "TRX_TYPE_SET_CODE" => Some(Self::TrxTypeSetCode), + "TRX_TYPE_ARBITRUM_DEPOSIT" => Some(Self::TrxTypeArbitrumDeposit), + "TRX_TYPE_ARBITRUM_UNSIGNED" => Some(Self::TrxTypeArbitrumUnsigned), + "TRX_TYPE_ARBITRUM_CONTRACT" => Some(Self::TrxTypeArbitrumContract), + "TRX_TYPE_ARBITRUM_RETRY" => Some(Self::TrxTypeArbitrumRetry), + "TRX_TYPE_ARBITRUM_SUBMIT_RETRYABLE" => { + Some(Self::TrxTypeArbitrumSubmitRetryable) + } + "TRX_TYPE_ARBITRUM_INTERNAL" => Some(Self::TrxTypeArbitrumInternal), + "TRX_TYPE_ARBITRUM_LEGACY" => Some(Self::TrxTypeArbitrumLegacy), + "TRX_TYPE_OPTIMISM_DEPOSIT" => Some(Self::TrxTypeOptimismDeposit), _ => None, } } @@ -306,22 +601,73 @@ pub struct AccessTuple { #[prost(bytes = "vec", repeated, tag = "2")] pub storage_keys: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec>, } -/// TransactionTraceWithBlockRef +/// SetCodeAuthorization represents the authorization of a transaction to set code of an EOA (Externally Owned Account) +/// as defined in EIP-7702. +/// +/// The 'authority' field is the address that is authorizing the delegation mechanism. The 'authority' value is computed +/// from the signature contained in the message using the computation +/// `authority = ecrecover(keccak(MAGIC || rlp(\[chain_id, address, nonce\])), y_parity, r, s)` +/// where `MAGIC` is `0x5`, `||` is the bytes concatenation operator, `ecrecover` is the Ethereum signature recovery +/// and `y_parity` is the recovery ID value denoted `v` in the message below. Checking the go-ethereum implementation +/// at might prove easier to "read". +/// +/// We do extract the 'authority' value from the signature in the message and store it in the 'authority' field for +/// convenience so you don't need to perform the computation yourself. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct TransactionTraceWithBlockRef { - #[prost(message, optional, tag = "1")] - pub trace: ::core::option::Option, - #[prost(message, optional, tag = "2")] - pub block_ref: ::core::option::Option, +pub struct SetCodeAuthorization { + /// Discarded determines if this authorization was skipped due to being invalid. As EIP-7702 states, + /// if the authorization is invalid (invalid signature, nonce mismatch, etc.) it must be simply + /// discarded and the transaction is processed as if the authorization was not present in the + /// authorization list. + /// + /// This boolean records if the authorization was discarded or not by the chain due to invalidity. + #[prost(bool, tag = "1")] + pub discarded: bool, + /// ChainID is the chain ID of the chain where the transaction was executed, used + /// to recover the authority from the signature. + #[prost(bytes = "vec", tag = "2")] + pub chain_id: ::prost::alloc::vec::Vec, + /// Address contains the address this account is delegating to. This address usually + /// contain code that this account essentially "delegates" to. + /// + /// Note: This was missing when EIP-7702 was first activated on Holesky, Sepolia, BSC Chapel, + /// BSC Mainnet and Arbitrum Sepolia but was ready for Ethereum Mainnet hard fork. We will backfill + /// those missing values in the near future at which point we will remove this note. + #[prost(bytes = "vec", tag = "8")] + pub address: ::prost::alloc::vec::Vec, + /// Nonce is the nonce of the account that is authorizing delegation mechanism, EIP-7702 rules + /// states that nonce should be verified using this rule: + /// + /// - Verify the nonce of authority is equal to nonce. In case authority does not exist in the trie, + /// verify that nonce is equal to 0. + /// + /// Read SetCodeAuthorization to know how to recover the `authority` value. + #[prost(uint64, tag = "3")] + pub nonce: u64, + /// V is the recovery ID value for the signature Y point. While it's defined as a + /// `uint32`, it's actually bounded by a `uint8` data type withing the Ethereum protocol. + #[prost(uint32, tag = "4")] + pub v: u32, + /// R is the signature's X point on the elliptic curve (32 bytes). + #[prost(bytes = "vec", tag = "5")] + pub r: ::prost::alloc::vec::Vec, + /// S is the signature's Y point on the elliptic curve (32 bytes). + #[prost(bytes = "vec", tag = "6")] + pub s: ::prost::alloc::vec::Vec, + /// Authority is the address of the account that is authorizing delegation mechanism, it + /// is computed from the signature contained in the message and stored for convenience. + /// + /// If the authority cannot be recovered from the signature, this field will be empty and + /// the `discarded` field will be set to `true`. + #[prost(bytes = "vec", optional, tag = "7")] + pub authority: ::core::option::Option<::prost::alloc::vec::Vec>, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionReceipt { /// State root is an intermediate state_root hash, computed in-between transactions to make /// **sure** you could build a proof and point to state in the middle of a block. Geth client /// uses `PostState + root + PostStateOrStatus`` while Parity used `status_code, root...`` this piles - /// hardforks, see (read the EIPs first): - /// - - /// - + /// hard forks, see (read the EIPs first): /// - /// /// Moreover, the notion of `Outcome`` in parity, which segregates the two concepts, which are @@ -337,6 +683,23 @@ pub struct TransactionReceipt { pub logs_bloom: ::prost::alloc::vec::Vec, #[prost(message, repeated, tag = "4")] pub logs: ::prost::alloc::vec::Vec, + /// BlobGasUsed is the amount of blob gas that has been used within this transaction. At time + /// of writing, this is equal to `self.blob_gas_fee_cap * len(self.blob_hashes)`. + /// + /// This is specified by + /// + /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + /// if Cancun fork is active on the chain. + #[prost(uint64, optional, tag = "5")] + pub blob_gas_used: ::core::option::Option, + /// BlobGasPrice is the amount to pay per blob item in the transaction. + /// + /// This is specified by + /// + /// This will is populated only if `TransactionTrace.Type == TRX_TYPE_BLOB` which is possible only + /// if Cancun fork is active on the chain. + #[prost(message, optional, tag = "6")] + pub blob_gas_price: ::core::option::Option, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct Log { @@ -347,8 +710,10 @@ pub struct Log { #[prost(bytes = "vec", tag = "3")] pub data: ::prost::alloc::vec::Vec, /// Index is the index of the log relative to the transaction. This index - /// is always populated regardless of the state reversion of the call + /// is always populated regardless of the state reversion of the the call /// that emitted this log. + /// + /// Only available in DetailLevel: EXTENDED #[prost(uint32, tag = "4")] pub index: u32, /// BlockIndex represents the index of the log relative to the Block. @@ -356,7 +721,7 @@ pub struct Log { /// An **important** notice is that this field will be 0 when the call /// that emitted the log has been reverted by the chain. /// - /// Currently, there are two locations where a Log can be obtained: + /// Currently, there is two locations where a Log can be obtained: /// - block.transaction_traces\[\].receipt.logs\[\] /// - block.transaction_traces\[\].calls\[\].logs\[\] /// @@ -368,6 +733,8 @@ pub struct Log { /// the `blockIndex` value will always be 0. #[prost(uint32, tag = "6")] pub block_index: u32, + /// The block's global ordinal when the log was recorded, refer to \[Block\] + /// documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "7")] pub ordinal: u64, } @@ -385,6 +752,25 @@ pub struct Call { pub caller: ::prost::alloc::vec::Vec, #[prost(bytes = "vec", tag = "6")] pub address: ::prost::alloc::vec::Vec, + /// AddressDelegatesTo contains the address from which the actual code to execute will be loaded + /// as defined per EIP-7702 rules. If the Call's address value resolves to a code + /// that delegates to another address, this field will be populated with the address + /// that the call is delegated to. It will be empty in all other situations. + /// + /// Assumes that a 'SetCode' transaction set address `0xA` to delegates to address `0xB`, + /// then when a call is made to `0xA`, the Call object would have: + /// + /// - caller = + /// - address = 0xA + /// - address_delegates_to = 0xB + /// + /// Again, it's important to emphasize that this field relates to EIP-7702, if the call is + /// a DELEGATE or CALLCODE type, this field will not be populated and will remain empty. + /// + /// It will be populated only if EIP-7702 is active on the chain (Prague fork) and if the + /// 'address' of the call was pointing to another address at time of execution. + #[prost(bytes = "vec", optional, tag = "34")] + pub address_delegates_to: ::core::option::Option<::prost::alloc::vec::Vec>, #[prost(message, optional, tag = "7")] pub value: ::core::option::Option, #[prost(uint64, tag = "8")] @@ -393,8 +779,25 @@ pub struct Call { pub gas_consumed: u64, #[prost(bytes = "vec", tag = "13")] pub return_data: ::prost::alloc::vec::Vec, + /// Known Issues + /// - Version 3: + /// When call is `CREATE` or `CREATE2`, this field is not populated. A couple of suggestions: + /// 1. You can get the contract's code in the `code_changes` field. + /// 2. In the root `CREATE` call, you can directly use the `TransactionTrace`'s input field. + /// + /// Fixed in `Version 4`, see for information about block versions. #[prost(bytes = "vec", tag = "14")] pub input: ::prost::alloc::vec::Vec, + /// Indicates whether the call executed code. + /// + /// Known Issues + /// - Version 3: + /// This may be incorrectly set to `false` for accounts with code handling native value transfers, + /// as well as for certain precompiles with no input. + /// The value is initially set based on `call.type != CREATE && len(call.input) > 0` + /// and later adjusted if the tracer detects an account without code. + /// + /// Fixed in `Version 4`, see for information about block versions. #[prost(bool, tag = "15")] pub executed_code: bool, #[prost(bool, tag = "16")] @@ -405,6 +808,11 @@ pub struct Call { ::prost::alloc::string::String, ::prost::alloc::string::String, >, + /// Known Issues + /// - Version 3: + /// The data might be not be in order. + /// + /// Fixed in `Version 4`, see for information about block versions. #[prost(message, repeated, tag = "21")] pub storage_changes: ::prost::alloc::vec::Vec, #[prost(message, repeated, tag = "22")] @@ -415,6 +823,13 @@ pub struct Call { pub logs: ::prost::alloc::vec::Vec, #[prost(message, repeated, tag = "26")] pub code_changes: ::prost::alloc::vec::Vec, + /// Known Issues + /// - Version 3: + /// Some gas changes are not correctly tracked: + /// 1. Gas refunded due to data returned to the chain (occurs at the end of a transaction, before buyback). + /// 2. Initial gas allocation (0 -> GasLimit) at the start of a call. + /// 3. Final gas deduction (LeftOver -> 0) at the end of a call (if applicable). + /// Fixed in `Version 4`, see for information about block versions. #[prost(message, repeated, tag = "28")] pub gas_changes: ::prost::alloc::vec::Vec, /// In Ethereum, a call can be either: @@ -453,13 +868,42 @@ pub struct Call { /// ``` /// /// In the transaction above, while Call #2 and Call #3 would have the - /// status `EXECUTED` + /// status `EXECUTED`. + /// + /// If you check all calls and check only `state_reverted` flag, you might be missing + /// some balance changes and nonce changes. This is because when a full transaction fails + /// in ethereum (e.g. `calls.all(x.state_reverted == true)`), there is still the transaction + /// fee that are recorded to the chain. + /// + /// Refer to \[TransactionTrace#status\] field for more details about the handling you must + /// perform. #[prost(bool, tag = "30")] pub state_reverted: bool, + /// Known Issues + /// - Version 3: + /// 1. The block's global ordinal when the call started executing, refer to + /// \[Block\] documentation for further information about ordinals and total ordering. + /// 2. The transaction root call `begin_ordial` is always `0` (also in the GENESIS block), which can cause issues + /// when sorting by this field. To ensure proper execution order, set it as follows: + /// `trx.Calls\[0\].BeginOrdinal = trx.BeginOrdinal`. + /// + /// Fixed in `Version 4`, see for information about block versions. #[prost(uint64, tag = "31")] pub begin_ordinal: u64, + /// Known Issues + /// - Version 3: + /// 1. The block's global ordinal when the call finished executing, refer to + /// \[Block\] documentation for further information about ordinals and total ordering. + /// 2. The root call of the GENESIS block is always `0`. To fix it, you can set it as follows: + /// `rx.Calls\[0\].EndOrdinal = max.Uint64`. + /// + /// Fixed in `Version 4`, see for information about block versions. #[prost(uint64, tag = "32")] pub end_ordinal: u64, + /// Known Issues + /// - Version 4: + /// AccountCreations are NOT SUPPORTED anymore. DO NOT rely on them. + #[deprecated] #[prost(message, repeated, tag = "33")] pub account_creations: ::prost::alloc::vec::Vec, } @@ -473,29 +917,50 @@ pub struct StorageChange { pub old_value: ::prost::alloc::vec::Vec, #[prost(bytes = "vec", tag = "4")] pub new_value: ::prost::alloc::vec::Vec, + /// The block's global ordinal when the storage change was recorded, refer to \[Block\] + /// documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "5")] pub ordinal: u64, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct BalanceChange { + /// Address is the address of the account that has changed balance. #[prost(bytes = "vec", tag = "1")] pub address: ::prost::alloc::vec::Vec, + /// OldValue is the balance of the address before the change. This value + /// can be **nil/null/None** if there was no previous balance for the address. + /// It is safe in those case(s) to consider the balance as being 0. + /// + /// If you consume this from a Substreams, you can safely use: + /// + /// ```ignore + /// let old_value = old_value.unwrap_or_default(); + /// ``` #[prost(message, optional, tag = "2")] pub old_value: ::core::option::Option, + /// NewValue is the balance of the address after the change. This value + /// can be **nil/null/None** if there was no previous balance for the address + /// after the change. It is safe in those case(s) to consider the balance as being + /// 0. + /// + /// If you consume this from a Substreams, you can safely use: + /// + /// ```ignore + /// let new_value = new_value.unwrap_or_default(); + /// ``` #[prost(message, optional, tag = "3")] pub new_value: ::core::option::Option, + /// Reason is the reason why the balance has changed. This is useful to determine + /// why the balance has changed and what is the context of the change. #[prost(enumeration = "balance_change::Reason", tag = "4")] pub reason: i32, + /// The block's global ordinal when the balance change was recorded, refer to \[Block\] + /// documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "5")] pub ordinal: u64, } /// Nested message and enum types in `BalanceChange`. pub mod balance_change { - /// Obtain all balance change reasons under deep mind repository: - /// - /// ```shell - /// ack -ho 'BalanceChangeReason\(".*"\)' | grep -Eo '".*"' | sort | uniq - /// ``` #[derive( Clone, Copy, @@ -526,6 +991,14 @@ pub mod balance_change { CallBalanceOverride = 12, /// Used on chain(s) where some Ether burning happens Burn = 15, + Withdrawal = 16, + /// Rewards for Blob processing on BNB chain added in Tycho hard-fork, refers + /// to BNB documentation to check the timestamp at which it was activated. + RewardBlobFee = 17, + /// This reason is used only on Optimism chain. + IncreaseMint = 18, + /// This reason is used only on Optimism chain. + Revert = 19, } impl Reason { /// String value of the enum field names used in the ProtoBuf definition. @@ -550,6 +1023,10 @@ pub mod balance_change { Self::SuicideWithdraw => "REASON_SUICIDE_WITHDRAW", Self::CallBalanceOverride => "REASON_CALL_BALANCE_OVERRIDE", Self::Burn => "REASON_BURN", + Self::Withdrawal => "REASON_WITHDRAWAL", + Self::RewardBlobFee => "REASON_REWARD_BLOB_FEE", + Self::IncreaseMint => "REASON_INCREASE_MINT", + Self::Revert => "REASON_REVERT", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -571,6 +1048,10 @@ pub mod balance_change { "REASON_SUICIDE_WITHDRAW" => Some(Self::SuicideWithdraw), "REASON_CALL_BALANCE_OVERRIDE" => Some(Self::CallBalanceOverride), "REASON_BURN" => Some(Self::Burn), + "REASON_WITHDRAWAL" => Some(Self::Withdrawal), + "REASON_REWARD_BLOB_FEE" => Some(Self::RewardBlobFee), + "REASON_INCREASE_MINT" => Some(Self::IncreaseMint), + "REASON_REVERT" => Some(Self::Revert), _ => None, } } @@ -584,6 +1065,8 @@ pub struct NonceChange { pub old_value: u64, #[prost(uint64, tag = "3")] pub new_value: u64, + /// The block's global ordinal when the nonce change was recorded, refer to \[Block\] + /// documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "4")] pub ordinal: u64, } @@ -591,6 +1074,8 @@ pub struct NonceChange { pub struct AccountCreation { #[prost(bytes = "vec", tag = "1")] pub account: ::prost::alloc::vec::Vec, + /// The block's global ordinal when the account creation was recorded, refer to \[Block\] + /// documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "2")] pub ordinal: u64, } @@ -606,6 +1091,8 @@ pub struct CodeChange { pub new_hash: ::prost::alloc::vec::Vec, #[prost(bytes = "vec", tag = "5")] pub new_code: ::prost::alloc::vec::Vec, + /// The block's global ordinal when the code change was recorded, refer to \[Block\] + /// documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "6")] pub ordinal: u64, } @@ -613,7 +1100,7 @@ pub struct CodeChange { /// The gas is computed per actual op codes. Doing them completely might prove /// overwhelming in most cases. /// -/// Hence, we only index some of them, those that are costly like all the calls +/// Hence, we only index some of them, those that are costy like all the calls /// one, log events, return data, etc. #[derive(Clone, Copy, PartialEq, ::prost::Message)] pub struct GasChange { @@ -623,16 +1110,13 @@ pub struct GasChange { pub new_value: u64, #[prost(enumeration = "gas_change::Reason", tag = "3")] pub reason: i32, + /// The block's global ordinal when the gas change was recorded, refer to \[Block\] + /// documentation for further information about ordinals and total ordering. #[prost(uint64, tag = "4")] pub ordinal: u64, } /// Nested message and enum types in `GasChange`. pub mod gas_change { - /// Obtain all gas change reasons under deep mind repository: - /// - /// ```shell - /// ack -ho 'GasChangeReason\(".*"\)' | grep -Eo '".*"' | sort | uniq - /// ``` #[derive( Clone, Copy, @@ -647,27 +1131,88 @@ pub mod gas_change { #[repr(i32)] pub enum Reason { Unknown = 0, + /// REASON_CALL is the amount of gas that will be charged for a 'CALL' opcode executed by the EVM Call = 1, + /// REASON_CALL_CODE is the amount of gas that will be charged for a 'CALLCODE' opcode executed by the EVM CallCode = 2, + /// REASON_CALL_DATA_COPY is the amount of gas that will be charged for a 'CALLDATACOPY' opcode executed by the EVM CallDataCopy = 3, + /// REASON_CODE_COPY is the amount of gas that will be charged for a 'CALLDATACOPY' opcode executed by the EVM CodeCopy = 4, + /// REASON_CODE_STORAGE is the amount of gas that will be charged for code storage CodeStorage = 5, + /// REASON_CONTRACT_CREATION is the amount of gas that will be charged for a 'CREATE' opcode executed by the EVM and for the gas + /// burned for a CREATE, today controlled by EIP150 rules ContractCreation = 6, + /// REASON_CONTRACT_CREATION2 is the amount of gas that will be charged for a 'CREATE2' opcode executed by the EVM and for the gas + /// burned for a CREATE2, today controlled by EIP150 rules ContractCreation2 = 7, + /// REASON_DELEGATE_CALL is the amount of gas that will be charged for a 'DELEGATECALL' opcode executed by the EVM DelegateCall = 8, + /// REASON_EVENT_LOG is the amount of gas that will be charged for a 'LOG' opcode executed by the EVM EventLog = 9, + /// REASON_EXT_CODE_COPY is the amount of gas that will be charged for a 'LOG' opcode executed by the EVM ExtCodeCopy = 10, + /// REASON_FAILED_EXECUTION is the burning of the remaining gas when the execution failed without a revert FailedExecution = 11, + /// REASON_INTRINSIC_GAS is the amount of gas that will be charged for the intrinsic cost of the transaction, there is + /// always exactly one of those per transaction IntrinsicGas = 12, + /// GasChangePrecompiledContract is the amount of gas that will be charged for a precompiled contract execution PrecompiledContract = 13, + /// REASON_REFUND_AFTER_EXECUTION is the amount of gas that will be refunded to the caller after the execution of the call, + /// if there is left over at the end of execution RefundAfterExecution = 14, + /// REASON_RETURN is the amount of gas that will be charged for a 'RETURN' opcode executed by the EVM Return = 15, + /// REASON_RETURN_DATA_COPY is the amount of gas that will be charged for a 'RETURNDATACOPY' opcode executed by the EVM ReturnDataCopy = 16, + /// REASON_REVERT is the amount of gas that will be charged for a 'REVERT' opcode executed by the EVM Revert = 17, + /// REASON_SELF_DESTRUCT is the amount of gas that will be charged for a 'SELFDESTRUCT' opcode executed by the EVM SelfDestruct = 18, + /// REASON_STATIC_CALL is the amount of gas that will be charged for a 'STATICALL' opcode executed by the EVM StaticCall = 19, + /// REASON_STATE_COLD_ACCESS is the amount of gas that will be charged for a cold storage access as controlled by EIP2929 rules + /// /// Added in Berlin fork (Geth 1.10+) StateColdAccess = 20, + /// REASON_TX_INITIAL_BALANCE is the initial balance for the call which will be equal to the gasLimit of the call + /// + /// Added as new tracing reason in Geth, available only on some chains + TxInitialBalance = 21, + /// REASON_TX_REFUNDS is the sum of all refunds which happened during the tx execution (e.g. storage slot being cleared) + /// this generates an increase in gas. There is only one such gas change per transaction. + /// + /// Added as new tracing reason in Geth, available only on some chains + TxRefunds = 22, + /// REASON_TX_LEFT_OVER_RETURNED is the amount of gas left over at the end of transaction's execution that will be returned + /// to the chain. This change will always be a negative change as we "drain" left over gas towards 0. If there was no gas + /// left at the end of execution, no such even will be emitted. The returned gas's value in Wei is returned to caller. + /// There is at most one of such gas change per transaction. + /// + /// Added as new tracing reason in Geth, available only on some chains + TxLeftOverReturned = 23, + /// REASON_CALL_INITIAL_BALANCE is the initial balance for the call which will be equal to the gasLimit of the call. There is only + /// one such gas change per call. + /// + /// Added as new tracing reason in Geth, available only on some chains + CallInitialBalance = 24, + /// REASON_CALL_LEFT_OVER_RETURNED is the amount of gas left over that will be returned to the caller, this change will always + /// be a negative change as we "drain" left over gas towards 0. If there was no gas left at the end of execution, no such even + /// will be emitted. + CallLeftOverReturned = 25, + /// REASON_WITNESS_CONTRACT_INIT flags the event of adding to the witness during the contract creation initialization step. + WitnessContractInit = 26, + /// REASON_WITNESS_CONTRACT_CREATION flags the event of adding to the witness during the contract creation finalization step. + WitnessContractCreation = 27, + /// REASON_WITNESS_CODE_CHUNK flags the event of adding one or more contract code chunks to the witness. + WitnessCodeChunk = 28, + /// REASON_WITNESS_CONTRACT_COLLISION_CHECK flags the event of adding to the witness when checking for contract address collision. + WitnessContractCollisionCheck = 29, + /// REASON_TX_DATA_FLOOR is the amount of extra gas the transaction has to pay to reach the minimum gas requirement for the + /// transaction data. This change will always be a negative change. + TxDataFloor = 30, } impl Reason { /// String value of the enum field names used in the ProtoBuf definition. @@ -697,6 +1242,18 @@ pub mod gas_change { Self::SelfDestruct => "REASON_SELF_DESTRUCT", Self::StaticCall => "REASON_STATIC_CALL", Self::StateColdAccess => "REASON_STATE_COLD_ACCESS", + Self::TxInitialBalance => "REASON_TX_INITIAL_BALANCE", + Self::TxRefunds => "REASON_TX_REFUNDS", + Self::TxLeftOverReturned => "REASON_TX_LEFT_OVER_RETURNED", + Self::CallInitialBalance => "REASON_CALL_INITIAL_BALANCE", + Self::CallLeftOverReturned => "REASON_CALL_LEFT_OVER_RETURNED", + Self::WitnessContractInit => "REASON_WITNESS_CONTRACT_INIT", + Self::WitnessContractCreation => "REASON_WITNESS_CONTRACT_CREATION", + Self::WitnessCodeChunk => "REASON_WITNESS_CODE_CHUNK", + Self::WitnessContractCollisionCheck => { + "REASON_WITNESS_CONTRACT_COLLISION_CHECK" + } + Self::TxDataFloor => "REASON_TX_DATA_FLOOR", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -723,11 +1280,84 @@ pub mod gas_change { "REASON_SELF_DESTRUCT" => Some(Self::SelfDestruct), "REASON_STATIC_CALL" => Some(Self::StaticCall), "REASON_STATE_COLD_ACCESS" => Some(Self::StateColdAccess), + "REASON_TX_INITIAL_BALANCE" => Some(Self::TxInitialBalance), + "REASON_TX_REFUNDS" => Some(Self::TxRefunds), + "REASON_TX_LEFT_OVER_RETURNED" => Some(Self::TxLeftOverReturned), + "REASON_CALL_INITIAL_BALANCE" => Some(Self::CallInitialBalance), + "REASON_CALL_LEFT_OVER_RETURNED" => Some(Self::CallLeftOverReturned), + "REASON_WITNESS_CONTRACT_INIT" => Some(Self::WitnessContractInit), + "REASON_WITNESS_CONTRACT_CREATION" => Some(Self::WitnessContractCreation), + "REASON_WITNESS_CODE_CHUNK" => Some(Self::WitnessCodeChunk), + "REASON_WITNESS_CONTRACT_COLLISION_CHECK" => { + Some(Self::WitnessContractCollisionCheck) + } + "REASON_TX_DATA_FLOOR" => Some(Self::TxDataFloor), _ => None, } } } } +/// HeaderOnlyBlock is used to optimally unpack the \[Block\] structure (note the +/// corresponding message number for the `header` field) while consuming less +/// memory, when only the `header` is desired. +/// +/// WARN: this is a client-side optimization pattern and should be moved in the +/// consuming code. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct HeaderOnlyBlock { + #[prost(message, optional, tag = "5")] + pub header: ::core::option::Option, +} +/// BlockWithRefs is a lightweight block, with traces and transactions +/// purged from the `block` within, and only. It is used in transports +/// to pass block data around. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlockWithRefs { + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub block: ::core::option::Option, + #[prost(message, optional, tag = "3")] + pub transaction_trace_refs: ::core::option::Option, + #[prost(bool, tag = "4")] + pub irreversible: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TransactionTraceWithBlockRef { + #[prost(message, optional, tag = "1")] + pub trace: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub block_ref: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TransactionRefs { + #[prost(bytes = "vec", repeated, tag = "1")] + pub hashes: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlockRef { + #[prost(bytes = "vec", tag = "1")] + pub hash: ::prost::alloc::vec::Vec, + #[prost(uint64, tag = "2")] + pub number: u64, +} +/// Withdrawal represents a validator withdrawal from the beacon chain to the EVM. +/// Introduced in EIP-4895 (Shanghai hard fork). +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Withdrawal { + /// Index is the monotonically increasing identifier of the withdrawal + #[prost(uint64, tag = "1")] + pub index: u64, + /// ValidatorIndex is the index of the validator that is withdrawing + #[prost(uint64, tag = "2")] + pub validator_index: u64, + /// Address is the Ethereum address receiving the withdrawn funds + #[prost(bytes = "vec", tag = "3")] + pub address: ::prost::alloc::vec::Vec, + /// Amount is the value of the withdrawal in gwei (1 gwei = 1e9 wei) + #[prost(uint64, tag = "4")] + pub amount: u64, +} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum TransactionTraceStatus { diff --git a/chain/ethereum/src/runtime/abi.rs b/chain/ethereum/src/runtime/abi.rs index 7a772caccec..7ac13752671 100644 --- a/chain/ethereum/src/runtime/abi.rs +++ b/chain/ethereum/src/runtime/abi.rs @@ -2,16 +2,16 @@ use super::runtime_adapter::UnresolvedContractCall; use crate::trigger::{ EthereumBlockData, EthereumCallData, EthereumEventData, EthereumTransactionData, }; +use anyhow::anyhow; use async_trait::async_trait; +use graph::abi; +use graph::prelude::alloy; +use graph::prelude::alloy::consensus::TxReceipt; +use graph::prelude::alloy::network::ReceiptResponse; +use graph::prelude::alloy::rpc::types::{Log, TransactionReceipt}; +use graph::prelude::alloy::serde::WithOtherFields; use graph::{ - prelude::{ - ethabi, - web3::{ - self, - types::{Log, TransactionReceipt, H256}, - }, - BigInt, - }, + prelude::BigInt, runtime::{ asc_get, asc_new, asc_new_or_null, gas::GasCounter, AscHeap, AscIndexId, AscPtr, AscType, DeterministicHostError, FromAscObj, HostExportError, IndexForAscTypeId, ToAscObj, @@ -24,7 +24,7 @@ use graph_runtime_wasm::asc_abi::class::{ }; use semver::Version; -type AscH256 = Uint8Array; +type AscB256 = Uint8Array; type AscH2048 = Uint8Array; pub struct AscLogParamArray(Array>); @@ -42,7 +42,7 @@ impl AscType for AscLogParamArray { } #[async_trait] -impl ToAscObj for &[ethabi::LogParam] { +impl ToAscObj for &[abi::DynSolParam] { async fn to_asc_obj( &self, heap: &mut H, @@ -60,7 +60,7 @@ impl AscIndexId for AscLogParamArray { const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::ArrayEventParam; } -pub struct AscTopicArray(Array>); +pub struct AscTopicArray(Array>); impl AscType for AscTopicArray { fn to_asc_bytes(&self) -> Result, DeterministicHostError> { @@ -76,14 +76,14 @@ impl AscType for AscTopicArray { } #[async_trait] -impl ToAscObj for Vec { +impl ToAscObj for &[alloy::primitives::B256] { async fn to_asc_obj( &self, heap: &mut H, gas: &GasCounter, ) -> Result { let mut topics = Vec::with_capacity(self.len()); - for topic in self { + for topic in *self { topics.push(asc_new(heap, topic, gas).await?); } Ok(AscTopicArray(Array::new(&topics, heap, gas).await?)) @@ -91,7 +91,7 @@ impl ToAscObj for Vec { } impl AscIndexId for AscTopicArray { - const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::ArrayH256; + const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::ArrayB256; } pub struct AscLogArray(Array>); @@ -110,14 +110,14 @@ impl AscType for AscLogArray { } #[async_trait] -impl ToAscObj for Vec { +impl ToAscObj for &[Log] { async fn to_asc_obj( &self, heap: &mut H, gas: &GasCounter, ) -> Result { let mut logs = Vec::with_capacity(self.len()); - for log in self { + for log in *self { logs.push(asc_new(heap, log, gas).await?); } @@ -190,13 +190,13 @@ impl FromAscObj for UnresolvedContractCall { #[repr(C)] #[derive(AscType)] pub(crate) struct AscEthereumBlock { - pub hash: AscPtr, - pub parent_hash: AscPtr, - pub uncles_hash: AscPtr, + pub hash: AscPtr, + pub parent_hash: AscPtr, + pub uncles_hash: AscPtr, pub author: AscPtr, - pub state_root: AscPtr, - pub transactions_root: AscPtr, - pub receipts_root: AscPtr, + pub state_root: AscPtr, + pub transactions_root: AscPtr, + pub receipts_root: AscPtr, pub number: AscPtr, pub gas_used: AscPtr, pub gas_limit: AscPtr, @@ -214,13 +214,13 @@ impl AscIndexId for AscEthereumBlock { #[derive(AscType)] #[allow(non_camel_case_types)] pub(crate) struct AscEthereumBlock_0_0_6 { - pub hash: AscPtr, - pub parent_hash: AscPtr, - pub uncles_hash: AscPtr, + pub hash: AscPtr, + pub parent_hash: AscPtr, + pub uncles_hash: AscPtr, pub author: AscPtr, - pub state_root: AscPtr, - pub transactions_root: AscPtr, - pub receipts_root: AscPtr, + pub state_root: AscPtr, + pub transactions_root: AscPtr, + pub receipts_root: AscPtr, pub number: AscPtr, pub gas_used: AscPtr, pub gas_limit: AscPtr, @@ -239,7 +239,7 @@ impl AscIndexId for AscEthereumBlock_0_0_6 { #[derive(AscType)] #[allow(non_camel_case_types)] pub(crate) struct AscEthereumTransaction_0_0_1 { - pub hash: AscPtr, + pub hash: AscPtr, pub index: AscPtr, pub from: AscPtr, pub to: AscPtr, @@ -256,7 +256,7 @@ impl AscIndexId for AscEthereumTransaction_0_0_1 { #[derive(AscType)] #[allow(non_camel_case_types)] pub(crate) struct AscEthereumTransaction_0_0_2 { - pub hash: AscPtr, + pub hash: AscPtr, pub index: AscPtr, pub from: AscPtr, pub to: AscPtr, @@ -274,7 +274,7 @@ impl AscIndexId for AscEthereumTransaction_0_0_2 { #[derive(AscType)] #[allow(non_camel_case_types)] pub(crate) struct AscEthereumTransaction_0_0_6 { - pub hash: AscPtr, + pub hash: AscPtr, pub index: AscPtr, pub from: AscPtr, pub to: AscPtr, @@ -323,9 +323,9 @@ pub(crate) struct AscEthereumLog { pub address: AscPtr, pub topics: AscPtr, pub data: AscPtr, - pub block_hash: AscPtr, - pub block_number: AscPtr, - pub transaction_hash: AscPtr, + pub block_hash: AscPtr, + pub block_number: AscPtr, + pub transaction_hash: AscPtr, pub transaction_index: AscPtr, pub log_index: AscPtr, pub transaction_log_index: AscPtr, @@ -340,16 +340,16 @@ impl AscIndexId for AscEthereumLog { #[repr(C)] #[derive(AscType)] pub(crate) struct AscEthereumTransactionReceipt { - pub transaction_hash: AscPtr, + pub transaction_hash: AscPtr, pub transaction_index: AscPtr, - pub block_hash: AscPtr, + pub block_hash: AscPtr, pub block_number: AscPtr, pub cumulative_gas_used: AscPtr, pub gas_used: AscPtr, pub contract_address: AscPtr, pub logs: AscPtr, pub status: AscPtr, - pub root: AscPtr, + pub root: AscPtr, pub logs_bloom: AscPtr, } @@ -437,10 +437,7 @@ impl<'a> ToAscObj for EthereumBlockData<'a> { heap: &mut H, gas: &GasCounter, ) -> Result { - let size = match self.size() { - Some(size) => asc_new(heap, &BigInt::from_unsigned_u256(size), gas).await?, - None => AscPtr::null(), - }; + let size = asc_new_or_null_u256(heap, self.size(), gas).await?; Ok(AscEthereumBlock { hash: asc_new(heap, self.hash(), gas).await?, @@ -451,9 +448,9 @@ impl<'a> ToAscObj for EthereumBlockData<'a> { transactions_root: asc_new(heap, self.transactions_root(), gas).await?, receipts_root: asc_new(heap, self.receipts_root(), gas).await?, number: asc_new(heap, &BigInt::from(self.number()), gas).await?, - gas_used: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_used()), gas).await?, - gas_limit: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_limit()), gas).await?, - timestamp: asc_new(heap, &BigInt::from_unsigned_u256(self.timestamp()), gas).await?, + gas_used: asc_new(heap, &BigInt::from(self.gas_used()), gas).await?, + gas_limit: asc_new(heap, &BigInt::from(self.gas_limit()), gas).await?, + timestamp: asc_new(heap, &BigInt::from(self.timestamp()), gas).await?, difficulty: asc_new(heap, &BigInt::from_unsigned_u256(self.difficulty()), gas).await?, total_difficulty: asc_new( heap, @@ -473,14 +470,8 @@ impl<'a> ToAscObj for EthereumBlockData<'a> { heap: &mut H, gas: &GasCounter, ) -> Result { - let size = match self.size() { - Some(size) => asc_new(heap, &BigInt::from_unsigned_u256(size), gas).await?, - None => AscPtr::null(), - }; - let base_fee_per_block = match self.base_fee_per_gas() { - Some(base_fee) => asc_new(heap, &BigInt::from_unsigned_u256(base_fee), gas).await?, - None => AscPtr::null(), - }; + let size = asc_new_or_null_u256(heap, self.size(), gas).await?; + let base_fee_per_block = asc_new_or_null_u64(heap, self.base_fee_per_gas(), gas).await?; Ok(AscEthereumBlock_0_0_6 { hash: asc_new(heap, self.hash(), gas).await?, @@ -491,9 +482,9 @@ impl<'a> ToAscObj for EthereumBlockData<'a> { transactions_root: asc_new(heap, self.transactions_root(), gas).await?, receipts_root: asc_new(heap, self.receipts_root(), gas).await?, number: asc_new(heap, &BigInt::from(self.number()), gas).await?, - gas_used: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_used()), gas).await?, - gas_limit: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_limit()), gas).await?, - timestamp: asc_new(heap, &BigInt::from_unsigned_u256(self.timestamp()), gas).await?, + gas_used: asc_new(heap, &BigInt::from(self.gas_used()), gas).await?, + gas_limit: asc_new(heap, &BigInt::from(self.gas_limit()), gas).await?, + timestamp: asc_new(heap, &BigInt::from(self.timestamp()), gas).await?, difficulty: asc_new(heap, &BigInt::from_unsigned_u256(self.difficulty()), gas).await?, total_difficulty: asc_new( heap, @@ -515,13 +506,13 @@ impl<'a> ToAscObj for EthereumTransactionData<'a> gas: &GasCounter, ) -> Result { Ok(AscEthereumTransaction_0_0_1 { - hash: asc_new(heap, self.hash(), gas).await?, - index: asc_new(heap, &BigInt::from_unsigned_u128(self.index()), gas).await?, - from: asc_new(heap, self.from(), gas).await?, - to: asc_new_or_null(heap, self.to(), gas).await?, - value: asc_new(heap, &BigInt::from_unsigned_u256(self.value()), gas).await?, - gas_limit: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_limit()), gas).await?, - gas_price: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_price()), gas).await?, + hash: asc_new(heap, &self.hash(), gas).await?, + index: asc_new(heap, &BigInt::from(self.index()), gas).await?, + from: asc_new(heap, &self.from(), gas).await?, + to: asc_new_or_null(heap, &self.to(), gas).await?, + value: asc_new(heap, &BigInt::from_unsigned_u256(&self.value()), gas).await?, + gas_limit: asc_new(heap, &BigInt::from(self.gas_limit()), gas).await?, + gas_price: asc_new(heap, &BigInt::from(self.gas_price()), gas).await?, }) } } @@ -534,13 +525,13 @@ impl<'a> ToAscObj for EthereumTransactionData<'a> gas: &GasCounter, ) -> Result { Ok(AscEthereumTransaction_0_0_2 { - hash: asc_new(heap, self.hash(), gas).await?, - index: asc_new(heap, &BigInt::from_unsigned_u128(self.index()), gas).await?, - from: asc_new(heap, self.from(), gas).await?, - to: asc_new_or_null(heap, self.to(), gas).await?, - value: asc_new(heap, &BigInt::from_unsigned_u256(self.value()), gas).await?, - gas_limit: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_limit()), gas).await?, - gas_price: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_price()), gas).await?, + hash: asc_new(heap, &self.hash(), gas).await?, + index: asc_new(heap, &BigInt::from(self.index()), gas).await?, + from: asc_new(heap, &self.from(), gas).await?, + to: asc_new_or_null(heap, &self.to(), gas).await?, + value: asc_new(heap, &BigInt::from_unsigned_u256(&self.value()), gas).await?, + gas_limit: asc_new(heap, &BigInt::from(self.gas_limit()), gas).await?, + gas_price: asc_new(heap, &BigInt::from(self.gas_price()), gas).await?, input: asc_new(heap, self.input(), gas).await?, }) } @@ -554,15 +545,15 @@ impl<'a> ToAscObj for EthereumTransactionData<'a> gas: &GasCounter, ) -> Result { Ok(AscEthereumTransaction_0_0_6 { - hash: asc_new(heap, self.hash(), gas).await?, - index: asc_new(heap, &BigInt::from_unsigned_u128(self.index()), gas).await?, - from: asc_new(heap, self.from(), gas).await?, - to: asc_new_or_null(heap, self.to(), gas).await?, - value: asc_new(heap, &BigInt::from_unsigned_u256(self.value()), gas).await?, - gas_limit: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_limit()), gas).await?, - gas_price: asc_new(heap, &BigInt::from_unsigned_u256(self.gas_price()), gas).await?, + hash: asc_new(heap, &self.hash(), gas).await?, + index: asc_new(heap, &BigInt::from(self.index()), gas).await?, + from: asc_new(heap, &self.from(), gas).await?, + to: asc_new_or_null(heap, &self.to(), gas).await?, + value: asc_new(heap, &BigInt::from_unsigned_u256(&self.value()), gas).await?, + gas_limit: asc_new(heap, &BigInt::from(self.gas_limit()), gas).await?, + gas_price: asc_new(heap, &BigInt::from(self.gas_price()), gas).await?, input: asc_new(heap, self.input(), gas).await?, - nonce: asc_new(heap, &BigInt::from_unsigned_u256(self.nonce()), gas).await?, + nonce: asc_new(heap, &BigInt::from(self.nonce()), gas).await?, }) } } @@ -582,14 +573,10 @@ where ) -> Result, HostExportError> { Ok(AscEthereumEvent { address: asc_new(heap, self.address(), gas).await?, - log_index: asc_new(heap, &BigInt::from_unsigned_u256(self.log_index()), gas).await?, - transaction_log_index: asc_new( - heap, - &BigInt::from_unsigned_u256(self.transaction_log_index()), - gas, - ) - .await?, - log_type: asc_new_or_null(heap, self.log_type(), gas).await?, + log_index: asc_new(heap, &BigInt::from(self.log_index()), gas).await?, + transaction_log_index: asc_new(heap, &BigInt::from(self.transaction_log_index()), gas) + .await?, + log_type: asc_new_or_null(heap, &self.log_type().as_ref(), gas).await?, block: asc_new::(heap, &self.block, gas).await?, transaction: asc_new::(heap, &self.transaction, gas) .await?, @@ -599,13 +586,18 @@ where } #[async_trait] -impl<'a, T, B> ToAscObj> - for (EthereumEventData<'a>, Option<&TransactionReceipt>) +impl<'a, T, B, Inner> ToAscObj> + for ( + EthereumEventData<'a>, + Option<&WithOtherFields>>, + ) where T: AscType + AscIndexId + Send, B: AscType + AscIndexId + Send, EthereumTransactionData<'a>: ToAscObj, EthereumBlockData<'a>: ToAscObj, + Inner: Send + Sync, + TransactionReceipt: ToAscObj, { async fn to_asc_obj( &self, @@ -623,7 +615,7 @@ where params, } = event_data.to_asc_obj(heap, gas).await?; let receipt = if let Some(receipt_data) = optional_receipt { - asc_new(heap, receipt_data, gas).await? + asc_new(heap, &receipt_data.inner(), gas).await? } else { AscPtr::null() }; @@ -642,7 +634,7 @@ where async fn asc_new_or_null_u256( heap: &mut H, - value: &Option, + value: &Option, gas: &GasCounter, ) -> Result, HostExportError> { match value { @@ -653,7 +645,7 @@ async fn asc_new_or_null_u256( async fn asc_new_or_null_u64( heap: &mut H, - value: &Option, + value: &Option, gas: &GasCounter, ) -> Result, HostExportError> { match value { @@ -669,51 +661,60 @@ impl ToAscObj for Log { heap: &mut H, gas: &GasCounter, ) -> Result { - let removed = match self.removed { - Some(removed) => asc_new(heap, &AscWrapped { inner: removed }, gas).await?, - None => AscPtr::null(), - }; Ok(AscEthereumLog { - address: asc_new(heap, &self.address, gas).await?, - topics: asc_new(heap, &self.topics, gas).await?, - data: asc_new(heap, self.data.0.as_slice(), gas).await?, + address: asc_new(heap, &self.address(), gas).await?, + topics: asc_new(heap, &self.topics(), gas).await?, + data: asc_new(heap, self.data().data.as_ref(), gas).await?, block_hash: asc_new_or_null(heap, &self.block_hash, gas).await?, block_number: asc_new_or_null_u64(heap, &self.block_number, gas).await?, transaction_hash: asc_new_or_null(heap, &self.transaction_hash, gas).await?, transaction_index: asc_new_or_null_u64(heap, &self.transaction_index, gas).await?, - log_index: asc_new_or_null_u256(heap, &self.log_index, gas).await?, - transaction_log_index: asc_new_or_null_u256(heap, &self.transaction_log_index, gas) - .await?, - log_type: asc_new_or_null(heap, &self.log_type, gas).await?, - removed, + log_index: asc_new_or_null_u64(heap, &self.log_index, gas).await?, + transaction_log_index: AscPtr::null(), // Non-standard field, not available in alloy + log_type: AscPtr::null(), // Non-standard field, not available in alloy + removed: asc_new( + heap, + &AscWrapped { + inner: self.removed, + }, + gas, + ) + .await?, }) } } #[async_trait] -impl ToAscObj for &TransactionReceipt { +impl ToAscObj + for TransactionReceipt> +{ async fn to_asc_obj( &self, heap: &mut H, gas: &GasCounter, ) -> Result { + let transaction_index = self + .transaction_index + .ok_or(HostExportError::Unknown(anyhow!( + "Transaction index is missing" + )))?; + let status = match self.inner.status_or_post_state().as_eip658() { + Some(success) => asc_new(heap, &BigInt::from(success as u64), gas).await?, + None => AscPtr::null(), // Pre-EIP-658 (pre-Byzantium) receipt + }; Ok(AscEthereumTransactionReceipt { transaction_hash: asc_new(heap, &self.transaction_hash, gas).await?, - transaction_index: asc_new(heap, &BigInt::from(self.transaction_index), gas).await?, + transaction_index: asc_new(heap, &BigInt::from(transaction_index), gas).await?, block_hash: asc_new_or_null(heap, &self.block_hash, gas).await?, block_number: asc_new_or_null_u64(heap, &self.block_number, gas).await?, - cumulative_gas_used: asc_new( - heap, - &BigInt::from_unsigned_u256(&self.cumulative_gas_used), - gas, - ) - .await?, - gas_used: asc_new_or_null_u256(heap, &self.gas_used, gas).await?, + cumulative_gas_used: asc_new(heap, &BigInt::from(self.cumulative_gas_used()), gas) + .await?, + gas_used: asc_new(heap, &BigInt::from(self.gas_used), gas).await?, contract_address: asc_new_or_null(heap, &self.contract_address, gas).await?, - logs: asc_new(heap, &self.logs, gas).await?, - status: asc_new_or_null_u64(heap, &self.status, gas).await?, - root: asc_new_or_null(heap, &self.root, gas).await?, - logs_bloom: asc_new(heap, self.logs_bloom.as_bytes(), gas).await?, + logs: asc_new(heap, &self.logs(), gas).await?, + status, + root: asc_new_or_null(heap, &self.state_root(), gas).await?, + logs_bloom: asc_new(heap, self.inner.bloom().as_slice(), gas).await?, }) } } @@ -782,7 +783,7 @@ impl<'a> ToAscObj for ethabi::LogParam { +impl ToAscObj for abi::DynSolParam { async fn to_asc_obj( &self, heap: &mut H, diff --git a/chain/ethereum/src/runtime/runtime_adapter.rs b/chain/ethereum/src/runtime/runtime_adapter.rs index 2d06b28733e..a5597efcd4d 100644 --- a/chain/ethereum/src/runtime/runtime_adapter.rs +++ b/chain/ethereum/src/runtime/runtime_adapter.rs @@ -7,6 +7,8 @@ use crate::{ }; use anyhow::{anyhow, Context, Error}; use blockchain::HostFn; +use graph::abi; +use graph::abi::DynSolValueExt; use graph::blockchain::ChainIdentifier; use graph::components::subgraph::HostMetrics; use graph::data::store::ethereum::call; @@ -14,18 +16,14 @@ use graph::data::store::scalar::BigInt; use graph::data::subgraph::{API_VERSION_0_0_4, API_VERSION_0_0_9}; use graph::data_source; use graph::data_source::common::{ContractCall, MappingABI}; -use graph::futures03::FutureExt as _; -use graph::prelude::web3::types::H160; use graph::runtime::gas::Gas; use graph::runtime::{AscIndexId, IndexForAscTypeId}; use graph::slog::debug; use graph::{ blockchain::{self, BlockPtr, HostFnCtx}, cheap_clone::CheapClone, - prelude::{ - ethabi::{self, Address, Token}, - EthereumCallCache, - }, + futures03::FutureExt, + prelude::{alloy::primitives::Address, EthereumCallCache}, runtime::{asc_get, asc_new, AscPtr, HostExportError}, slog::Logger, }; @@ -244,7 +242,7 @@ async fn eth_get_balance( let logger = &ctx.logger; let block_ptr = &ctx.block_ptr; - let address: H160 = asc_get(ctx.heap, wasm_ptr.into(), &ctx.gas, 0)?; + let address: Address = asc_get(ctx.heap, wasm_ptr.into(), &ctx.gas, 0)?; let result = eth_adapter .get_balance(logger, address, block_ptr.clone()) @@ -256,7 +254,7 @@ async fn eth_get_balance( Ok(asc_new(ctx.heap, &bigint, &ctx.gas).await?) } // Retry on any kind of error - Err(EthereumRpcError::Web3Error(e)) => Err(HostExportError::PossibleReorg(e.into())), + Err(EthereumRpcError::AlloyError(e)) => Err(HostExportError::PossibleReorg(e.into())), Err(EthereumRpcError::Timeout) => Err(HostExportError::PossibleReorg( EthereumRpcError::Timeout.into(), )), @@ -280,7 +278,7 @@ async fn eth_has_code( let logger = &ctx.logger; let block_ptr = &ctx.block_ptr; - let address: H160 = asc_get(ctx.heap, wasm_ptr.into(), &ctx.gas, 0)?; + let address: Address = asc_get(ctx.heap, wasm_ptr.into(), &ctx.gas, 0)?; let result = eth_adapter .get_code(logger, address, block_ptr.clone()) @@ -290,7 +288,7 @@ async fn eth_has_code( match result { Ok(v) => Ok(asc_new(ctx.heap, &AscWrapped { inner: v }, &ctx.gas).await?), // Retry on any kind of error - Err(EthereumRpcError::Web3Error(e)) => Err(HostExportError::PossibleReorg(e.into())), + Err(EthereumRpcError::AlloyError(e)) => Err(HostExportError::PossibleReorg(e.into())), Err(EthereumRpcError::Timeout) => Err(HostExportError::PossibleReorg( EthereumRpcError::Timeout.into(), )), @@ -307,20 +305,7 @@ async fn eth_call( abis: &[Arc], eth_call_gas: Option, metrics: Arc, -) -> Result>, HostExportError> { - // Helpers to log the result of the call at the end - fn tokens_as_string(tokens: &[Token]) -> String { - tokens.iter().map(|arg| arg.to_string()).join(", ") - } - - fn result_as_string(result: &Result>, HostExportError>) -> String { - match result { - Ok(Some(tokens)) => format!("({})", tokens_as_string(tokens)), - Ok(None) => "none".to_string(), - Err(_) => "error".to_string(), - } - } - +) -> Result>, HostExportError> { let start_time = Instant::now(); // Obtain the path to the contract ABI @@ -366,7 +351,7 @@ async fn eth_call( // Any error reported by the Ethereum node could be due to the block no longer being on // the main chain. This is very unespecific but we don't want to risk failing a // subgraph due to a transient error such as a reorg. - Err(ContractCallError::Web3Error(e)) => Err(HostExportError::PossibleReorg(anyhow::anyhow!( + Err(ContractCallError::AlloyError(e)) => Err(HostExportError::PossibleReorg(anyhow::anyhow!( "Ethereum node returned an error when calling function \"{}\" of contract \"{}\": {}", unresolved_call.function_name, unresolved_call.contract_name, @@ -398,16 +383,26 @@ async fn eth_call( ); } - debug!(logger, "Contract call finished"; - "address" => format!("0x{:x}", &unresolved_call.contract_address), - "contract" => &unresolved_call.contract_name, - "signature" => &unresolved_call.function_signature, - "args" => format!("[{}]", tokens_as_string(&unresolved_call.function_args)), - "time_ms" => format!("{}ms", elapsed.as_millis()), - "result" => result_as_string(&result), - "block_hash" => block_ptr.hash_hex(), - "block_number" => block_ptr.block_number(), - "source" => source.to_string()); + let args_as_string = format!("[{}]", values_to_string(&unresolved_call.function_args)); + + let result_as_string = match &result { + Ok(Some(values)) => format!("({})", values_to_string(values)), + Ok(None) => "none".to_owned(), + Err(_err) => "error".to_owned(), + }; + + debug!( + logger, "Contract call finished"; + "address" => format!("0x{:x}", &unresolved_call.contract_address), + "contract" => &unresolved_call.contract_name, + "signature" => &unresolved_call.function_signature, + "args" => args_as_string, + "time_ms" => format!("{}ms", elapsed.as_millis()), + "result" => result_as_string, + "block_hash" => block_ptr.hash_hex(), + "block_number" => block_ptr.block_number(), + "source" => source.to_string(), + ); result } @@ -418,9 +413,18 @@ pub struct UnresolvedContractCall { pub contract_address: Address, pub function_name: String, pub function_signature: Option, - pub function_args: Vec, + pub function_args: Vec, } impl AscIndexId for AscUnresolvedContractCall { const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::SmartContractCall; } + +#[inline] +fn values_to_string(values: &[abi::DynSolValue]) -> String { + values + .iter() + .map(|x| x.to_string()) + .collect_vec() + .join(", ") +} diff --git a/chain/ethereum/src/tests.rs b/chain/ethereum/src/tests.rs index 6ed05265cf3..ab5907a7c40 100644 --- a/chain/ethereum/src/tests.rs +++ b/chain/ethereum/src/tests.rs @@ -3,7 +3,12 @@ use std::sync::Arc; use graph::{ blockchain::{block_stream::BlockWithTriggers, BlockPtr, Trigger}, prelude::{ - web3::types::{Address, Bytes, Log, H160, H256, U64}, + alloy::{ + self, + primitives::{Address, Bytes, LogData, B256}, + rpc::types::{Block, Log}, + }, + rand::{self, Rng}, EthereumCall, LightEthereumBlock, }, slog::{self, o, Logger}, @@ -14,15 +19,55 @@ use crate::{ trigger::{EthereumBlockTriggerType, EthereumTrigger, LogRef}, }; +pub trait Random { + fn random() -> Self; +} + +impl Random for B256 { + fn random() -> Self { + let mut rng = rand::rng(); + let mut bytes = [0u8; 32]; + rng.fill(&mut bytes); + Self::from(bytes) + } +} + +impl Random for Address { + fn random() -> Self { + let mut rng = rand::rng(); + let mut bytes = [0u8; 20]; + rng.fill(&mut bytes); + Self::from(bytes) + } +} + +fn create_log(tx_index: u64, log_index: u64) -> Arc { + let log = Log { + inner: alloy::primitives::Log { + address: Address::default(), + data: LogData::new_unchecked(vec![], Bytes::from(vec![])), + }, + block_hash: Some(B256::ZERO), + block_number: Some(0), + block_timestamp: Some(0), + transaction_hash: Some(B256::ZERO), + transaction_index: Some(tx_index), + log_index: Some(log_index), + removed: false, + }; + + Arc::new(log) +} + #[test] fn test_trigger_ordering() { let block1 = EthereumTrigger::Block( - BlockPtr::from((H256::random(), 1u64)), + BlockPtr::from((B256::random(), 1u64)), EthereumBlockTriggerType::End, ); let block2 = EthereumTrigger::Block( - BlockPtr::from((H256::random(), 0u64)), + BlockPtr::from((B256::random(), 0u64)), EthereumBlockTriggerType::WithCallTo(Address::random()), ); @@ -32,11 +77,9 @@ fn test_trigger_ordering() { }; let call1 = EthereumTrigger::Call(Arc::new(call1)); - let call2 = EthereumCall { - transaction_index: 2, - input: Bytes(vec![0]), - ..Default::default() - }; + let mut call2 = EthereumCall::default(); + call2.transaction_index = 2; + call2.input = Bytes::from(vec![0]); let call2 = EthereumTrigger::Call(Arc::new(call2)); let call3 = EthereumCall { @@ -46,29 +89,12 @@ fn test_trigger_ordering() { let call3 = EthereumTrigger::Call(Arc::new(call3)); // Call with the same tx index as call2 - let call4 = EthereumCall { - transaction_index: 2, - input: Bytes(vec![1]), - ..Default::default() - }; + let mut call4 = EthereumCall::default(); + call4.transaction_index = 2; + // different than call2 so they don't get mistaken as the same + call4.input = Bytes::from(vec![1]); let call4 = EthereumTrigger::Call(Arc::new(call4)); - fn create_log(tx_index: u64, log_index: u64) -> Arc { - Arc::new(Log { - address: H160::default(), - topics: vec![], - data: Bytes::default(), - block_hash: Some(H256::zero()), - block_number: Some(U64::zero()), - transaction_hash: Some(H256::zero()), - transaction_index: Some(tx_index.into()), - log_index: Some(log_index.into()), - transaction_log_index: Some(log_index.into()), - log_type: Some("".into()), - removed: Some(false), - }) - } - // Event with transaction_index 1 and log_index 0; // should be the first element after sorting let log1 = EthereumTrigger::Log(LogRef::FullLog(create_log(1, 0), None)); @@ -99,14 +125,9 @@ fn test_trigger_ordering() { let logger = Logger::root(slog::Discard, o!()); - // The field initializers are necessary because inside of - // BlockWithTriggers::new there's a log for both fields. So just using - // Default above gives None on them. - let b: LightEthereumBlock = LightEthereumBlock { - number: Some(Default::default()), - hash: Some(Default::default()), - ..Default::default() - }; + let b = Block::default(); + + let b = LightEthereumBlock::new(graph::components::ethereum::AnyBlock::from(b)); // Test that `BlockWithTriggers` sorts the triggers. let block_with_triggers = BlockWithTriggers::::new( @@ -126,12 +147,12 @@ fn test_trigger_ordering() { #[test] fn test_trigger_dedup() { let block1 = EthereumTrigger::Block( - BlockPtr::from((H256::random(), 1u64)), + BlockPtr::from((B256::random(), 1u64)), EthereumBlockTriggerType::End, ); let block2 = EthereumTrigger::Block( - BlockPtr::from((H256::random(), 0u64)), + BlockPtr::from((B256::random(), 0u64)), EthereumBlockTriggerType::WithCallTo(Address::random()), ); @@ -163,22 +184,6 @@ fn test_trigger_dedup() { }; let call4 = EthereumTrigger::Call(Arc::new(call4)); - fn create_log(tx_index: u64, log_index: u64) -> Arc { - Arc::new(Log { - address: H160::default(), - topics: vec![], - data: Bytes::default(), - block_hash: Some(H256::zero()), - block_number: Some(U64::zero()), - transaction_hash: Some(H256::zero()), - transaction_index: Some(tx_index.into()), - log_index: Some(log_index.into()), - transaction_log_index: Some(log_index.into()), - log_type: Some("".into()), - removed: Some(false), - }) - } - let log1 = EthereumTrigger::Log(LogRef::FullLog(create_log(1, 0), None)); let log2 = EthereumTrigger::Log(LogRef::FullLog(create_log(1, 1), None)); let log3 = EthereumTrigger::Log(LogRef::FullLog(create_log(2, 5), None)); @@ -206,14 +211,11 @@ fn test_trigger_dedup() { let logger = Logger::root(slog::Discard, o!()); - // The field initializers are necessary because inside of - // BlockWithTriggers::new there's a log for both fields. So just using - // Default above gives None on them. - let b: LightEthereumBlock = LightEthereumBlock { - number: Some(Default::default()), - hash: Some(Default::default()), - ..Default::default() - }; + #[allow(unused_variables)] + let b = Block::default(); + + #[allow(unreachable_code)] + let b = LightEthereumBlock::new(graph::components::ethereum::AnyBlock::from(b)); // Test that `BlockWithTriggers` sorts the triggers. let block_with_triggers = BlockWithTriggers::::new( diff --git a/chain/ethereum/src/transport.rs b/chain/ethereum/src/transport.rs index ef571efacb8..d5fac8523bb 100644 --- a/chain/ethereum/src/transport.rs +++ b/chain/ethereum/src/transport.rs @@ -1,35 +1,31 @@ use graph::components::network_provider::ProviderName; -use graph::endpoint::{EndpointMetrics, RequestLabels}; -use jsonrpc_core::types::Call; -use jsonrpc_core::Value; - -use web3::transports::{http, ipc, ws}; -use web3::RequestId; - +use graph::endpoint::{ConnectionType, EndpointMetrics, RequestLabels}; +use graph::prelude::alloy::rpc::json_rpc::{RequestPacket, ResponsePacket}; use graph::prelude::*; use graph::url::Url; -use std::future::Future; +use std::sync::Arc; +use std::task::{Context, Poll}; +use tower::Service; + +use alloy::transports::{TransportError, TransportFut}; + +use graph::prelude::alloy::transports::{http::Http, ipc::IpcConnect, ws::WsConnect}; -/// Abstraction over the different web3 transports. +/// Abstraction over different transport types for Alloy providers. #[derive(Clone, Debug)] pub enum Transport { - RPC { - client: http::Http, - metrics: Arc, - provider: ProviderName, - }, - IPC(ipc::Ipc), - WS(ws::WebSocket), + RPC(alloy::rpc::client::RpcClient), + IPC(IpcConnect), + WS(WsConnect), } impl Transport { /// Creates an IPC transport. #[cfg(unix)] pub async fn new_ipc(ipc: &str) -> Self { - ipc::Ipc::new(ipc) - .await - .map(Transport::IPC) - .expect("Failed to connect to Ethereum IPC") + let transport = IpcConnect::new(ipc.to_string()); + + Transport::IPC(transport) } #[cfg(not(unix))] @@ -39,107 +35,93 @@ impl Transport { /// Creates a WebSocket transport. pub async fn new_ws(ws: &str) -> Self { - ws::WebSocket::new(ws) - .await - .map(Transport::WS) - .expect("Failed to connect to Ethereum WS") + let transport = WsConnect::new(ws.to_string()); + + Transport::WS(transport) } /// Creates a JSON-RPC over HTTP transport. - /// - /// Note: JSON-RPC over HTTP doesn't always support subscribing to new - /// blocks (one such example is Infura's HTTP endpoint). pub fn new_rpc( rpc: Url, headers: graph::http::HeaderMap, metrics: Arc, provider: impl AsRef, ) -> Self { - // Unwrap: This only fails if something is wrong with the system's TLS config. let client = reqwest::Client::builder() .default_headers(headers) .build() - .unwrap(); + .expect("Failed to build HTTP client"); + + let http_transport = Http::with_client(client, rpc); + let metrics_transport = MetricsHttp::new(http_transport, metrics, provider.as_ref().into()); + let rpc_client = alloy::rpc::client::RpcClient::new(metrics_transport, false); + + Transport::RPC(rpc_client) + } +} - Transport::RPC { - client: http::Http::with_client(client, rpc), +/// Custom HTTP transport wrapper that collects metrics +#[derive(Clone)] +pub struct MetricsHttp { + inner: Http, + metrics: Arc, + provider: ProviderName, +} + +impl MetricsHttp { + pub fn new( + inner: Http, + metrics: Arc, + provider: ProviderName, + ) -> Self { + Self { + inner, metrics, - provider: provider.as_ref().into(), + provider, } } } -impl web3::Transport for Transport { - type Out = Pin> + Send + 'static>>; - - fn prepare(&self, method: &str, params: Vec) -> (RequestId, Call) { - match self { - Transport::RPC { - client, - metrics: _, - provider: _, - } => client.prepare(method, params), - Transport::IPC(ipc) => ipc.prepare(method, params), - Transport::WS(ws) => ws.prepare(method, params), - } +// Implement tower::Service trait for MetricsHttp to intercept RPC calls +impl Service for MetricsHttp { + type Response = ResponsePacket; + type Error = TransportError; + type Future = TransportFut<'static>; + + fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { + self.inner.poll_ready(cx) } - fn send(&self, id: RequestId, request: Call) -> Self::Out { - match self { - Transport::RPC { - client, - metrics, + fn call(&mut self, request: RequestPacket) -> Self::Future { + let metrics = self.metrics.clone(); + let provider = self.provider.clone(); + let mut inner = self.inner.clone(); + + Box::pin(async move { + // Extract method name from request + let method = match &request { + RequestPacket::Single(req) => req.method().to_string(), + RequestPacket::Batch(reqs) => reqs + .first() + .map(|r| r.method().to_string()) + .unwrap_or_else(|| "batch".to_string()), + }; + + let labels = RequestLabels { provider, - } => { - let metrics = metrics.cheap_clone(); - let client = client.clone(); - let method = match request { - Call::MethodCall(ref m) => m.method.as_str(), - _ => "unknown", - }; - - let labels = RequestLabels { - provider: provider.clone(), - req_type: method.into(), - conn_type: graph::endpoint::ConnectionType::Rpc, - }; - let out = async move { - let out = client.send(id, request).await; - match out { - Ok(_) => metrics.success(&labels), - Err(_) => metrics.failure(&labels), - } - - out - }; - - Box::pin(out) + req_type: method.into(), + conn_type: ConnectionType::Rpc, + }; + + // Call inner transport and track metrics + let result = inner.call(request).await; + + match &result { + Ok(_) => metrics.success(&labels), + Err(_) => metrics.failure(&labels), } - Transport::IPC(ipc) => Box::pin(ipc.send(id, request)), - Transport::WS(ws) => Box::pin(ws.send(id, request)), - } - } -} -impl web3::BatchTransport for Transport { - type Batch = Box< - dyn Future>, web3::error::Error>> - + Send - + Unpin, - >; - - fn send_batch(&self, requests: T) -> Self::Batch - where - T: IntoIterator, - { - match self { - Transport::RPC { - client, - metrics: _, - provider: _, - } => Box::new(client.send_batch(requests)), - Transport::IPC(ipc) => Box::new(ipc.send_batch(requests)), - Transport::WS(ws) => Box::new(ws.send_batch(requests)), - } + result + }) } } diff --git a/chain/ethereum/src/trigger.rs b/chain/ethereum/src/trigger.rs index c9225cc3ce9..f969fa7063c 100644 --- a/chain/ethereum/src/trigger.rs +++ b/chain/ethereum/src/trigger.rs @@ -1,23 +1,20 @@ use async_trait::async_trait; +use graph::abi; use graph::blockchain::MappingTriggerTrait; use graph::blockchain::TriggerData; +use graph::components::ethereum::AnyTransaction; use graph::data::subgraph::API_VERSION_0_0_2; use graph::data::subgraph::API_VERSION_0_0_6; use graph::data::subgraph::API_VERSION_0_0_7; use graph::data_source::common::DeclaredCall; -use graph::prelude::ethabi::ethereum_types::H160; -use graph::prelude::ethabi::ethereum_types::H256; -use graph::prelude::ethabi::ethereum_types::U128; -use graph::prelude::ethabi::ethereum_types::U256; -use graph::prelude::ethabi::ethereum_types::U64; -use graph::prelude::ethabi::Address; -use graph::prelude::ethabi::LogParam; -use graph::prelude::web3::types::Block; -use graph::prelude::web3::types::Log; -use graph::prelude::web3::types::Transaction; -use graph::prelude::web3::types::TransactionReceipt; +use graph::prelude::alloy::consensus::Transaction as TransactionTrait; +use graph::prelude::alloy::network::AnyTransactionReceipt as AlloyTransactionReceipt; +use graph::prelude::alloy::network::TransactionResponse; +use graph::prelude::alloy::primitives::{Address, B256, U256}; +use graph::prelude::alloy::rpc::types::Log; use graph::prelude::BlockNumber; use graph::prelude::BlockPtr; +use graph::prelude::LightEthereumBlock; use graph::prelude::{CheapClone, EthereumCall}; use graph::runtime::asc_new; use graph::runtime::gas::GasCounter; @@ -38,26 +35,23 @@ use crate::runtime::abi::AscEthereumTransaction_0_0_1; use crate::runtime::abi::AscEthereumTransaction_0_0_2; use crate::runtime::abi::AscEthereumTransaction_0_0_6; -// ETHDEP: This should be defined in only one place. -type LightEthereumBlock = Block; - -static U256_DEFAULT: U256 = U256::zero(); +static U256_DEFAULT: U256 = U256::ZERO; pub enum MappingTrigger { Log { block: Arc, - transaction: Arc, + transaction: Arc, log: Arc, - params: Vec, - receipt: Option>, + params: Vec, + receipt: Option>, calls: Vec, }, Call { block: Arc, - transaction: Arc, + transaction: Arc, call: Arc, - inputs: Vec, - outputs: Vec, + inputs: Vec, + outputs: Vec, }, Block { block: Arc, @@ -65,7 +59,7 @@ pub enum MappingTrigger { } impl MappingTriggerTrait for MappingTrigger { - fn error_context(&self) -> std::string::String { + fn error_context(&self) -> String { let transaction_id = match self { MappingTrigger::Log { log, .. } => log.transaction_hash, MappingTrigger::Call { call, .. } => call.transaction_hash, @@ -85,15 +79,15 @@ impl std::fmt::Debug for MappingTrigger { #[derive(Debug)] enum MappingTriggerWithoutBlock { Log { - _transaction: Arc, + _transaction: Arc, _log: Arc, - _params: Vec, + _params: Vec, }, Call { - _transaction: Arc, + _transaction: Arc, _call: Arc, - _inputs: Vec, - _outputs: Vec, + _inputs: Vec, + _outputs: Vec, }, Block, } @@ -238,13 +232,13 @@ impl ToAscPtr for MappingTrigger { #[derive(Clone, Debug)] pub struct LogPosition { pub index: usize, - pub receipt: Arc, + pub receipt: Arc, pub requires_transaction_receipt: bool, } #[derive(Clone, Debug)] pub enum LogRef { - FullLog(Arc, Option>), + FullLog(Arc, Option>), LogPosition(LogPosition), } @@ -252,7 +246,7 @@ impl LogRef { pub fn log(&self) -> &Log { match self { LogRef::FullLog(log, _) => log.as_ref(), - LogRef::LogPosition(pos) => pos.receipt.logs.get(pos.index).unwrap(), + LogRef::LogPosition(pos) => pos.receipt.logs().get(pos.index).unwrap(), } } @@ -262,7 +256,7 @@ impl LogRef { /// For `LogPosition` variants, only returns the receipt if the /// `requires_transaction_receipt` flag is true, otherwise returns None /// even though the receipt is stored internally. - pub fn receipt(&self) -> Option<&Arc> { + pub fn receipt(&self) -> Option<&Arc> { match self { LogRef::FullLog(_, receipt) => receipt.as_ref(), LogRef::LogPosition(pos) => { @@ -275,28 +269,28 @@ impl LogRef { } } - pub fn log_index(&self) -> Option { + pub fn log_index(&self) -> Option { self.log().log_index } - pub fn transaction_index(&self) -> Option { + pub fn transaction_index(&self) -> Option { self.log().transaction_index } - fn transaction_hash(&self) -> Option { + fn transaction_hash(&self) -> Option { self.log().transaction_hash } - pub fn block_hash(&self) -> Option { + pub fn block_hash(&self) -> Option { self.log().block_hash } - pub fn block_number(&self) -> Option { + pub fn block_number(&self) -> Option { self.log().block_number } - pub fn address(&self) -> &H160 { - &self.log().address + pub fn address(&self) -> &Address { + &self.log().inner.address } } @@ -339,14 +333,14 @@ impl EthereumTrigger { EthereumTrigger::Block(block_ptr, _) => block_ptr.number, EthereumTrigger::Call(call) => call.block_number, EthereumTrigger::Log(log_ref) => { - i32::try_from(log_ref.block_number().unwrap().as_u64()).unwrap() + i32::try_from(log_ref.block_number().unwrap()).unwrap() } } } - pub fn block_hash(&self) -> H256 { + pub fn block_hash(&self) -> B256 { match self { - EthereumTrigger::Block(block_ptr, _) => block_ptr.hash_as_h256(), + EthereumTrigger::Block(block_ptr, _) => block_ptr.hash.as_b256(), EthereumTrigger::Call(call) => call.block_hash, EthereumTrigger::Log(log_ref) => log_ref.block_hash().unwrap(), } @@ -390,23 +384,21 @@ impl Ord for EthereumTrigger { // Calls vs. events are logged by their tx index; // if they are from the same transaction, events come first (Self::Call(a), Self::Log(b)) - if a.transaction_index == b.transaction_index().unwrap().as_u64() => + if a.transaction_index == b.transaction_index().unwrap() => { Ordering::Greater } (Self::Log(a), Self::Call(b)) - if a.transaction_index().unwrap().as_u64() == b.transaction_index => + if a.transaction_index().unwrap() == b.transaction_index => { Ordering::Less } - (Self::Call(a), Self::Log(b)) => a - .transaction_index - .cmp(&b.transaction_index().unwrap().as_u64()), - (Self::Log(a), Self::Call(b)) => a - .transaction_index() - .unwrap() - .as_u64() - .cmp(&b.transaction_index), + (Self::Call(a), Self::Log(b)) => { + a.transaction_index.cmp(&b.transaction_index().unwrap()) + } + (Self::Log(a), Self::Call(b)) => { + a.transaction_index().unwrap().cmp(&b.transaction_index) + } } } } @@ -437,137 +429,140 @@ impl TriggerData for EthereumTrigger { } fn address_match(&self) -> Option<&[u8]> { - self.address().map(|address| address.as_bytes()) + self.address().map(|address| address.as_slice()) } } /// Ethereum block data. #[derive(Clone, Debug)] pub struct EthereumBlockData<'a> { - block: &'a Block, + block: &'a LightEthereumBlock, } -impl<'a> From<&'a Block> for EthereumBlockData<'a> { - fn from(block: &'a Block) -> EthereumBlockData<'a> { +impl<'a> From<&'a LightEthereumBlock> for EthereumBlockData<'a> { + fn from(block: &'a LightEthereumBlock) -> EthereumBlockData<'a> { EthereumBlockData { block } } } impl<'a> EthereumBlockData<'a> { - pub fn hash(&self) -> &H256 { - self.block.hash.as_ref().unwrap() + pub fn hash(&self) -> &B256 { + &self.block.inner().header.hash } - pub fn parent_hash(&self) -> &H256 { - &self.block.parent_hash + pub fn parent_hash(&self) -> &B256 { + &self.block.inner().header.parent_hash } - pub fn uncles_hash(&self) -> &H256 { - &self.block.uncles_hash + pub fn uncles_hash(&self) -> &B256 { + &self.block.inner().header.ommers_hash } - pub fn author(&self) -> &H160 { - &self.block.author + pub fn author(&self) -> &Address { + &self.block.inner().header.beneficiary } - pub fn state_root(&self) -> &H256 { - &self.block.state_root + pub fn state_root(&self) -> &B256 { + &self.block.inner().header.state_root } - pub fn transactions_root(&self) -> &H256 { - &self.block.transactions_root + pub fn transactions_root(&self) -> &B256 { + &self.block.inner().header.transactions_root } - pub fn receipts_root(&self) -> &H256 { - &self.block.receipts_root + pub fn receipts_root(&self) -> &B256 { + &self.block.inner().header.receipts_root } - pub fn number(&self) -> U64 { - self.block.number.unwrap() + pub fn number(&self) -> u64 { + self.block.number_u64() } - pub fn gas_used(&self) -> &U256 { - &self.block.gas_used + pub fn gas_used(&self) -> u64 { + self.block.inner().header.gas_used } - pub fn gas_limit(&self) -> &U256 { - &self.block.gas_limit + pub fn gas_limit(&self) -> u64 { + self.block.inner().header.gas_limit } - pub fn timestamp(&self) -> &U256 { - &self.block.timestamp + pub fn timestamp(&self) -> u64 { + self.block.inner().header.timestamp } pub fn difficulty(&self) -> &U256 { - &self.block.difficulty + &self.block.inner().header.difficulty } pub fn total_difficulty(&self) -> &U256 { self.block + .inner() + .header .total_difficulty .as_ref() .unwrap_or(&U256_DEFAULT) } pub fn size(&self) -> &Option { - &self.block.size + &self.block.inner().header.size } - pub fn base_fee_per_gas(&self) -> &Option { - &self.block.base_fee_per_gas + pub fn base_fee_per_gas(&self) -> &Option { + &self.block.inner().header.base_fee_per_gas } } /// Ethereum transaction data. #[derive(Clone, Debug)] pub struct EthereumTransactionData<'a> { - tx: &'a Transaction, + tx: &'a AnyTransaction, + base_fee_per_gas: Option, } impl<'a> EthereumTransactionData<'a> { // We don't implement `From` because it causes confusion with the `from` // accessor method - fn new(tx: &'a Transaction) -> EthereumTransactionData<'a> { - EthereumTransactionData { tx } + fn new(tx: &'a AnyTransaction, base_fee_per_gas: Option) -> EthereumTransactionData<'a> { + EthereumTransactionData { + tx, + base_fee_per_gas, + } } - pub fn hash(&self) -> &H256 { - &self.tx.hash + pub fn hash(&self) -> B256 { + self.tx.tx_hash() } - pub fn index(&self) -> U128 { - self.tx.transaction_index.unwrap().as_u64().into() + pub fn index(&self) -> u64 { + self.tx.transaction_index.unwrap() } - pub fn from(&self) -> &H160 { - // unwrap: this is always `Some` for txns that have been mined - // (see https://github.com/tomusdrw/rust-web3/pull/407) - self.tx.from.as_ref().unwrap() + pub fn from(&self) -> Address { + self.tx.from() } - pub fn to(&self) -> &Option { - &self.tx.to + pub fn to(&self) -> Option
{ + self.tx.to() } - pub fn value(&self) -> &U256 { - &self.tx.value + pub fn value(&self) -> U256 { + self.tx.value() } - pub fn gas_limit(&self) -> &U256 { - &self.tx.gas + pub fn gas_limit(&self) -> u64 { + self.tx.gas_limit() } - pub fn gas_price(&self) -> &U256 { - // EIP-1559 made this optional. - self.tx.gas_price.as_ref().unwrap_or(&U256_DEFAULT) + pub fn gas_price(&self) -> u128 { + self.tx.effective_gas_price(self.base_fee_per_gas) } pub fn input(&self) -> &[u8] { - &self.tx.input.0 + self.tx.input() } - pub fn nonce(&self) -> &U256 { - &self.tx.nonce + pub fn nonce(&self) -> u64 { + self.tx.nonce() } } @@ -576,45 +571,46 @@ impl<'a> EthereumTransactionData<'a> { pub struct EthereumEventData<'a> { pub block: EthereumBlockData<'a>, pub transaction: EthereumTransactionData<'a>, - pub params: &'a [LogParam], + pub params: &'a [abi::DynSolParam], log: &'a Log, } impl<'a> EthereumEventData<'a> { pub fn new( - block: &'a Block, - tx: &'a Transaction, + block: &'a LightEthereumBlock, + tx: &'a AnyTransaction, log: &'a Log, - params: &'a [LogParam], + params: &'a [abi::DynSolParam], ) -> Self { EthereumEventData { block: EthereumBlockData::from(block), - transaction: EthereumTransactionData::new(tx), + transaction: EthereumTransactionData::new(tx, block.base_fee_per_gas()), log, params, } } pub fn address(&self) -> &Address { - &self.log.address + &self.log.inner.address } - pub fn log_index(&self) -> &U256 { - self.log.log_index.as_ref().unwrap_or(&U256_DEFAULT) + pub fn log_index(&self) -> u64 { + self.log.log_index.unwrap_or(0) } - pub fn transaction_log_index(&self) -> &U256 { + pub fn transaction_log_index(&self) -> u64 { // We purposely use the `log_index` here. Geth does not support // `transaction_log_index`, and subgraphs that use it only care that // it identifies the log, the specific value is not important. Still // this will change the output of subgraphs that use this field. // // This was initially changed in commit b95c6953 - self.log.log_index.as_ref().unwrap_or(&U256_DEFAULT) + self.log.log_index.unwrap_or(0) } - pub fn log_type(&self) -> &Option { - &self.log.log_type + pub fn log_type(&self) -> Option { + // This field was present in old rust-web3 Block, but alloy doesn't have it. + None } } @@ -623,22 +619,22 @@ impl<'a> EthereumEventData<'a> { pub struct EthereumCallData<'a> { pub block: EthereumBlockData<'a>, pub transaction: EthereumTransactionData<'a>, - pub inputs: &'a [LogParam], - pub outputs: &'a [LogParam], + pub inputs: &'a [abi::DynSolParam], + pub outputs: &'a [abi::DynSolParam], call: &'a EthereumCall, } impl<'a> EthereumCallData<'a> { fn new( - block: &'a Block, - transaction: &'a Transaction, + block: &'a LightEthereumBlock, + transaction: &'a AnyTransaction, call: &'a EthereumCall, - inputs: &'a [LogParam], - outputs: &'a [LogParam], + inputs: &'a [abi::DynSolParam], + outputs: &'a [abi::DynSolParam], ) -> EthereumCallData<'a> { EthereumCallData { block: EthereumBlockData::from(block), - transaction: EthereumTransactionData::new(transaction), + transaction: EthereumTransactionData::new(transaction, block.base_fee_per_gas()), inputs, outputs, call, diff --git a/chain/near/src/codec.rs b/chain/near/src/codec.rs index bbcfd6646a4..fca0a4f4c9e 100644 --- a/chain/near/src/codec.rs +++ b/chain/near/src/codec.rs @@ -3,18 +3,17 @@ pub mod pbcodec; use graph::{ - blockchain::Block as BlockchainBlock, - blockchain::{BlockPtr, BlockTime}, - prelude::{hex, web3::types::H256, BlockNumber}, + blockchain::{Block as BlockchainBlock, BlockPtr, BlockTime}, + prelude::{alloy::primitives::B256, hex, BlockNumber}, }; use std::convert::TryFrom; use std::fmt::LowerHex; pub use pbcodec::*; -impl From<&CryptoHash> for H256 { +impl From<&CryptoHash> for B256 { fn from(input: &CryptoHash) -> Self { - H256::from_slice(&input.bytes) + B256::from_slice(&input.bytes) } } @@ -27,7 +26,7 @@ impl LowerHex for &CryptoHash { impl BlockHeader { pub fn parent_ptr(&self) -> Option { match (self.prev_hash.as_ref(), self.prev_height) { - (Some(hash), number) => Some(BlockPtr::from((H256::from(hash), number))), + (Some(hash), number) => Some(BlockPtr::from((B256::from(hash), number))), _ => None, } } @@ -35,7 +34,7 @@ impl BlockHeader { impl<'a> From<&'a BlockHeader> for BlockPtr { fn from(b: &'a BlockHeader) -> BlockPtr { - BlockPtr::from((H256::from(b.hash.as_ref().unwrap()), b.height)) + BlockPtr::from((B256::from(b.hash.as_ref().unwrap()), b.height)) } } diff --git a/chain/near/src/trigger.rs b/chain/near/src/trigger.rs index c929d7caa19..ab68ab71beb 100644 --- a/chain/near/src/trigger.rs +++ b/chain/near/src/trigger.rs @@ -3,8 +3,8 @@ use graph::blockchain::Block; use graph::blockchain::MappingTriggerTrait; use graph::blockchain::TriggerData; use graph::derive::CheapClone; +use graph::prelude::alloy::primitives::B256; use graph::prelude::hex; -use graph::prelude::web3::types::H256; use graph::prelude::BlockNumber; use graph::runtime::HostExportError; use graph::runtime::{asc_new, gas::GasCounter, AscHeap, AscPtr}; @@ -80,10 +80,10 @@ impl NearTrigger { } } - pub fn block_hash(&self) -> H256 { + pub fn block_hash(&self) -> B256 { match self { - NearTrigger::Block(block) => block.ptr().hash_as_h256(), - NearTrigger::Receipt(receipt) => receipt.block.ptr().hash_as_h256(), + NearTrigger::Block(block) => block.ptr().hash.as_b256(), + NearTrigger::Receipt(receipt) => receipt.block.ptr().hash.as_b256(), } } diff --git a/graph/Cargo.toml b/graph/Cargo.toml index 7fa31765100..8e1703cb65b 100644 --- a/graph/Cargo.toml +++ b/graph/Cargo.toml @@ -4,6 +4,7 @@ version.workspace = true edition.workspace = true [dependencies] +alloy = { workspace = true } base64 = "=0.21.7" anyhow = "1.0" async-trait = { workspace = true } @@ -83,19 +84,12 @@ portable-atomic = { version = "1.11", features = ["fallback"] } itertools = "0.14.0" defer = "0.2" -# Our fork contains patches to make some fields optional for Celo and Fantom compatibility. -# Without the "arbitrary_precision" feature, we get the error `data did not match any variant of untagged enum Response`. -web3 = { git = "https://github.com/graphprotocol/rust-web3", branch = "graph-patches-onto-0.18", features = [ - "arbitrary_precision", - "test", -] } serde_plain = "1.0.2" csv = "1.4.0" object_store = { version = "0.12.4", features = ["gcp"] } # Dependencies related to Amp subgraphs ahash.workspace = true -alloy.workspace = true arrow-flight.workspace = true arrow.workspace = true half.workspace = true diff --git a/graph/src/abi/event_ext.rs b/graph/src/abi/event_ext.rs new file mode 100644 index 00000000000..94088dfcaae --- /dev/null +++ b/graph/src/abi/event_ext.rs @@ -0,0 +1,169 @@ +use std::collections::VecDeque; + +use alloy::json_abi::Event; +use alloy::rpc::types::Log; +use anyhow::anyhow; +use anyhow::Result; + +use crate::abi::{DynSolParam, DynSolValue}; + +pub trait EventExt { + fn decode_log(&self, log: &Log) -> Result>; +} + +impl EventExt for Event { + fn decode_log(&self, log: &Log) -> Result> { + let log_data = log.data(); + let decoded_event = alloy::dyn_abi::EventExt::decode_log(self, &log_data)?; + let mut indexed: VecDeque = decoded_event.indexed.into(); + let mut body: VecDeque = decoded_event.body.into(); + + if self.inputs.len() != indexed.len() + body.len() { + return Err(anyhow!( + "unexpected number of decoded event inputs; expected {}, got {}", + self.inputs.len(), + indexed.len() + body.len(), + )); + } + + let mut decoded_params = Vec::with_capacity(self.inputs.len()); + + for input in &self.inputs { + decoded_params.push(DynSolParam { + name: input.name.clone(), + value: { + if input.indexed { + indexed.pop_front().unwrap() + } else { + body.pop_front().unwrap() + } + }, + }); + } + + Ok(decoded_params) + } +} + +#[cfg(test)] +mod tests { + use alloy::dyn_abi::DynSolValue; + use alloy::primitives::{LogData, U256}; + + use super::*; + + fn make_log(topics: &[[u8; 32]], data: Vec) -> Log { + Log { + inner: alloy::primitives::Log { + address: [1; 20].into(), + data: LogData::new_unchecked(topics.iter().map(Into::into).collect(), data.into()), + }, + block_hash: None, + block_number: None, + block_timestamp: None, + transaction_hash: None, + transaction_index: None, + log_index: None, + removed: false, + } + } + + #[test] + fn decode_log_no_topic_0() { + let event = Event::parse("event X(uint256 indexed a, bytes32 b)").unwrap(); + let a = U256::from(10).to_be_bytes::<32>(); + let b = DynSolValue::FixedBytes([10; 32].into(), 32).abi_encode(); + + let log = make_log(&[a], b); + let err = event.decode_log(&log).unwrap_err(); + + assert_eq!( + err.to_string(), + "invalid log topic list length: expected 2 topics, got 1", + ); + } + + #[test] + fn decode_log_invalid_topic_0() { + let event = Event::parse("event X(uint256 indexed a, bytes32 b)").unwrap(); + let a = U256::from(10).to_be_bytes::<32>(); + let b = DynSolValue::FixedBytes([10; 32].into(), 32).abi_encode(); + + let log = make_log(&[[0; 32], a], b); + let err = event.decode_log(&log).unwrap_err(); + + assert!(err.to_string().starts_with("invalid event signature:")); + } + + #[test] + fn decode_log_success() { + let event = Event::parse("event X(uint256 indexed a, bytes32 b)").unwrap(); + let topic_0 = event.selector().0; + let a = U256::from(10).to_be_bytes::<32>(); + let b = DynSolValue::FixedBytes([10; 32].into(), 32).abi_encode(); + + let log = make_log(&[topic_0, a], b); + let resp = event.decode_log(&log).unwrap(); + + assert_eq!( + resp, + vec![ + DynSolParam { + name: "a".to_owned(), + value: DynSolValue::Uint(U256::from(10), 256), + }, + DynSolParam { + name: "b".to_owned(), + value: DynSolValue::FixedBytes([10; 32].into(), 32), + } + ], + ); + } + + #[test] + fn decode_log_too_many_topics() { + let event = Event::parse("event X(uint256 indexed a, bytes32 b)").unwrap(); + let topic_0 = event.selector().0; + let a = U256::from(10).to_be_bytes::<32>(); + let b = DynSolValue::FixedBytes([10; 32].into(), 32).abi_encode(); + + let log = make_log(&[topic_0, a, a, a, a], b); + let err = event.decode_log(&log).unwrap_err(); + + assert_eq!( + err.to_string(), + "invalid log topic list length: expected 2 topics, got 5" + ); + } + + #[test] + fn decode_log_when_indexed_param_is_not_the_first() { + let event = Event::parse("event X(uint256 a, uint256 indexed b, bytes32 c)").unwrap(); + let topic_0 = event.selector().0; + let a = DynSolValue::Uint(U256::from(10), 32); + let b = U256::from(20).to_be_bytes::<32>(); + let c = DynSolValue::FixedBytes([30; 32].into(), 32); + let data = DynSolValue::Tuple(vec![a, c]).abi_encode(); + + let log = make_log(&[topic_0, b], data); + let resp = event.decode_log(&log).unwrap(); + + assert_eq!( + resp, + vec![ + DynSolParam { + name: "a".to_owned(), + value: DynSolValue::Uint(U256::from(10), 256), + }, + DynSolParam { + name: "b".to_owned(), + value: DynSolValue::Uint(U256::from(20), 256), + }, + DynSolParam { + name: "c".to_owned(), + value: DynSolValue::FixedBytes([30; 32].into(), 32), + } + ], + ); + } +} diff --git a/graph/src/abi/function_ext.rs b/graph/src/abi/function_ext.rs new file mode 100644 index 00000000000..3264dd10a35 --- /dev/null +++ b/graph/src/abi/function_ext.rs @@ -0,0 +1,303 @@ +use std::borrow::Cow; + +use alloy::dyn_abi::DynSolType; +use alloy::dyn_abi::DynSolValue; +use alloy::dyn_abi::Specifier; +use alloy::json_abi::Function; +use alloy::json_abi::Param; +use anyhow::anyhow; +use anyhow::Result; +use itertools::Itertools; + +use crate::abi::DynSolValueExt; + +pub trait FunctionExt { + /// Returns the signature of this function in the following formats: + /// - if the function has no outputs: `$name($($inputs),*)` + /// - if the function has outputs: `$name($($inputs),*):($(outputs),*)` + /// + /// Examples: + /// - `functionName()` + /// - `functionName():(uint256)` + /// - `functionName(bool):(uint256,string)` + /// - `functionName(uint256,bytes32):(string,uint256)` + fn signature_compat(&self) -> String; + + /// ABI-decodes the given data according to the function's input types. + fn abi_decode_input(&self, data: &[u8]) -> Result>; + + /// ABI-decodes the given data according to the function's output types. + fn abi_decode_output(&self, data: &[u8]) -> Result>; + + /// ABI-encodes the given values, prefixed by the function's selector, if any. + /// + /// This behaviour is to ensure consistency with `ethabi`. + fn abi_encode_input(&self, values: &[DynSolValue]) -> Result>; +} + +impl FunctionExt for Function { + fn signature_compat(&self) -> String { + let name = &self.name; + let inputs = &self.inputs; + let outputs = &self.outputs; + + // This is what `alloy` uses internally when creating signatures. + const MAX_SOL_TYPE_LEN: usize = 32; + + let mut sig_cap = name.len() + 1 + inputs.len() * MAX_SOL_TYPE_LEN + 1; + + if !outputs.is_empty() { + sig_cap = sig_cap + 2 + outputs.len() * MAX_SOL_TYPE_LEN + 1; + } + + let mut sig = String::with_capacity(sig_cap); + + sig.push_str(&name); + signature_part(&inputs, &mut sig); + + if !outputs.is_empty() { + sig.push(':'); + signature_part(&outputs, &mut sig); + } + + sig + } + + fn abi_decode_input(&self, data: &[u8]) -> Result> { + (self as &dyn alloy::dyn_abi::FunctionExt) + .abi_decode_input(data) + .map_err(Into::into) + } + + fn abi_decode_output(&self, data: &[u8]) -> Result> { + (self as &dyn alloy::dyn_abi::FunctionExt) + .abi_decode_output(data) + .map_err(Into::into) + } + + fn abi_encode_input(&self, values: &[DynSolValue]) -> Result> { + let inputs = &self.inputs; + + if inputs.len() != values.len() { + return Err(anyhow!( + "unexpected number of values; expected {}, got {}", + inputs.len(), + values.len(), + )); + } + + let mut fixed_values = Vec::with_capacity(values.len()); + + for (i, input) in inputs.iter().enumerate() { + let ty = input.resolve()?; + let val = &values[i]; + + fixed_values.push(fix_type_size(&ty, val)?); + } + + if fixed_values.iter().all(|x| matches!(x, Cow::Borrowed(_))) { + return (self as &dyn alloy::dyn_abi::JsonAbiExt) + .abi_encode_input(values) + .map_err(Into::into); + } + + // Required because of `alloy::dyn_abi::JsonAbiExt::abi_encode_input` API; + let owned_fixed_values = fixed_values + .into_iter() + .map(|x| x.into_owned()) + .collect_vec(); + + (self as &dyn alloy::dyn_abi::JsonAbiExt) + .abi_encode_input(&owned_fixed_values) + .map_err(Into::into) + } +} + +// An efficient way to compute a part of the signature without new allocations. +fn signature_part(params: &[Param], out: &mut String) { + out.push('('); + + match params.len() { + 0 => {} + 1 => { + params[0].selector_type_raw(out); + } + n => { + params[0].selector_type_raw(out); + + for i in 1..n { + out.push(','); + params[i].selector_type_raw(out); + } + } + } + + out.push(')'); +} + +// Alloy is stricter in type checking than `ehtabi` and requires that the decoded values have +// exactly the same number of bits / bytes as the type used for checking. +// +// This is a problem because in some ASC conversions we lose the original number of bits / bytes +// if the actual data takes less memory. +// +// This method fixes that in a simple but not very cheap way, by encoding the value and trying +// to decode it again using the given type. The result fixes the number of bits / bytes in the +// decoded values, so we can use `alloy` methods that have strict type checking internally. +fn fix_type_size<'a>(ty: &DynSolType, val: &'a DynSolValue) -> Result> { + if val.matches(ty) { + return Ok(Cow::Borrowed(val)); + } + + if !val.type_check(ty) { + return Err(anyhow!( + "invalid value type; expected '{}', got '{:?}'", + ty.sol_type_name(), + val.sol_type_name(), + )); + } + + let bytes = val.abi_encode(); + let new_val = ty.abi_decode(&bytes)?; + + Ok(Cow::Owned(new_val)) +} + +#[cfg(test)] +mod tests { + use alloy::primitives::I256; + use alloy::primitives::U256; + + use super::*; + + fn s(f: &str) -> String { + Function::parse(f).unwrap().signature_compat() + } + + fn u256(u: u64) -> U256 { + U256::from(u) + } + + fn i256(i: i32) -> I256 { + I256::try_from(i).unwrap() + } + + #[test] + fn signature_compat_no_inputs_no_outputs() { + assert_eq!(s("x()"), "x()"); + } + + #[test] + fn signature_compat_one_input_no_outputs() { + assert_eq!(s("x(uint256 a)"), "x(uint256)"); + } + + #[test] + fn signature_compat_multiple_inputs_no_outputs() { + assert_eq!(s("x(uint256 a, bytes32 b)"), "x(uint256,bytes32)"); + } + + #[test] + fn signature_compat_no_inputs_one_output() { + assert_eq!(s("x() returns (uint256)"), "x():(uint256)"); + } + + #[test] + fn signature_compat_no_inputs_multiple_outputs() { + assert_eq!(s("x() returns (uint256, bytes32)"), "x():(uint256,bytes32)"); + } + + #[test] + fn signature_compat_multiple_inputs_multiple_outputs() { + assert_eq!( + s("x(bytes32 a, uint256 b) returns (uint256, bytes32)"), + "x(bytes32,uint256):(uint256,bytes32)", + ); + } + + #[test] + fn abi_decode_input() { + use DynSolValue::{Int, Tuple, Uint}; + + let f = Function::parse("x(uint256 a, int256 b)").unwrap(); + let data = Tuple(vec![Uint(u256(10), 256), Int(i256(-10), 256)]).abi_encode_params(); + let inputs = f.abi_decode_input(&data).unwrap(); + + assert_eq!(inputs, vec![Uint(u256(10), 256), Int(i256(-10), 256)]); + } + + #[test] + fn abi_decode_output() { + use DynSolValue::{Int, Tuple, Uint}; + + let f = Function::parse("x() returns (uint256 a, int256 b)").unwrap(); + let data = Tuple(vec![Uint(u256(10), 256), Int(i256(-10), 256)]).abi_encode_params(); + let outputs = f.abi_decode_output(&data).unwrap(); + + assert_eq!(outputs, vec![Uint(u256(10), 256), Int(i256(-10), 256)]); + } + + #[test] + fn abi_encode_input_no_values() { + let f = Function::parse("x(uint256 a, int256 b)").unwrap(); + let err = f.abi_encode_input(&[]).unwrap_err(); + + assert_eq!( + err.to_string(), + "unexpected number of values; expected 2, got 0", + ); + } + + #[test] + fn abi_encode_input_too_many_values() { + use DynSolValue::Bool; + + let f = Function::parse("x(uint256 a, int256 b)").unwrap(); + + let err = f + .abi_encode_input(&[Bool(true), Bool(false), Bool(true)]) + .unwrap_err(); + + assert_eq!( + err.to_string(), + "unexpected number of values; expected 2, got 3", + ); + } + + #[test] + fn abi_encode_input_invalid_types() { + use DynSolValue::Bool; + + let f = Function::parse("x(uint256 a, int256 b)").unwrap(); + let err = f.abi_encode_input(&[Bool(true), Bool(false)]).unwrap_err(); + assert!(err.to_string().starts_with("invalid value type;")); + } + + #[test] + fn abi_encode_success() { + use DynSolValue::{Bool, Uint}; + + let f = Function::parse("x(uint256 a, bool b)").unwrap(); + let a = Uint(u256(10), 256); + let b = Bool(true); + + let data = f.abi_encode_input(&[a.clone(), b.clone()]).unwrap(); + let inputs = f.abi_decode_input(&data[4..]).unwrap(); + + assert_eq!(inputs, vec![a, b]); + } + + #[test] + fn abi_encode_success_with_size_fix() { + use DynSolValue::{Int, Uint}; + + let f = Function::parse("x(uint256 a, int256 b)").unwrap(); + let a = Uint(u256(10), 32); + let b = Int(i256(-10), 32); + + let data = f.abi_encode_input(&[a, b]).unwrap(); + let inputs = f.abi_decode_input(&data[4..]).unwrap(); + + assert_eq!(inputs, vec![Uint(u256(10), 256), Int(i256(-10), 256)]); + } +} diff --git a/graph/src/abi/mod.rs b/graph/src/abi/mod.rs new file mode 100644 index 00000000000..9bedeb0e3b2 --- /dev/null +++ b/graph/src/abi/mod.rs @@ -0,0 +1,20 @@ +mod event_ext; +mod function_ext; +mod param; +mod value_ext; + +pub use alloy::dyn_abi::DynSolType; +pub use alloy::dyn_abi::DynSolValue; + +pub use alloy::json_abi::Event; +pub use alloy::json_abi::Function; +pub use alloy::json_abi::JsonAbi; +pub use alloy::json_abi::StateMutability; + +pub use alloy::primitives::I256; +pub use alloy::primitives::U256 as AlloyU256; + +pub use self::event_ext::EventExt; +pub use self::function_ext::FunctionExt; +pub use self::param::DynSolParam; +pub use self::value_ext::DynSolValueExt; diff --git a/graph/src/abi/param.rs b/graph/src/abi/param.rs new file mode 100644 index 00000000000..49e0f0878ea --- /dev/null +++ b/graph/src/abi/param.rs @@ -0,0 +1,7 @@ +use alloy::dyn_abi::DynSolValue; + +#[derive(Clone, Debug, PartialEq)] +pub struct DynSolParam { + pub name: String, + pub value: DynSolValue, +} diff --git a/graph/src/abi/value_ext.rs b/graph/src/abi/value_ext.rs new file mode 100644 index 00000000000..cb0f220e036 --- /dev/null +++ b/graph/src/abi/value_ext.rs @@ -0,0 +1,277 @@ +use alloy::dyn_abi::DynSolType; +use alloy::dyn_abi::DynSolValue; +use anyhow::anyhow; +use anyhow::Result; +use itertools::Itertools; + +pub trait DynSolValueExt { + /// Creates a fixed-byte decoded value from a slice. + /// + /// Fails if the source slice exceeds 32 bytes. + fn fixed_bytes_from_slice(s: &[u8]) -> Result; + + /// Returns the decoded value as a string. + /// + /// The resulting string contains no type information. + fn to_string(&self) -> String; + + /// Checks whether the value is of the specified type. + /// + /// For types with additional size information, returns true if the size of the value is less + /// than or equal to the size of the specified type. + #[must_use] + fn type_check(&self, ty: &DynSolType) -> bool; +} + +impl DynSolValueExt for DynSolValue { + fn fixed_bytes_from_slice(s: &[u8]) -> Result { + let num_bytes = s.len(); + + if num_bytes > 32 { + return Err(anyhow!( + "input slice must contain a maximum of 32 bytes, got {num_bytes}" + )); + } + + let mut bytes = [0u8; 32]; + + // Access: If `x` is of type `bytesI`, then `x[k]` for `0 <= k < I` returns the `k`th byte. + // Ref: + bytes[..num_bytes].copy_from_slice(s); + + Ok(Self::FixedBytes(bytes.into(), num_bytes)) + } + + fn to_string(&self) -> String { + let s = |v: &[Self]| v.iter().map(|x| x.to_string()).collect_vec().join(","); + + // Output format is taken from `ethabi`; + // See: + match self { + Self::Bool(v) => v.to_string(), + Self::Int(v, _) => format!("{v:x}"), + Self::Uint(v, _) => format!("{v:x}"), + Self::FixedBytes(v, _) => hex::encode(v), + Self::Address(v) => format!("{v:x}"), + Self::Function(v) => format!("{v:x}"), + Self::Bytes(v) => hex::encode(v), + Self::String(v) => v.to_owned(), + Self::Array(v) => format!("[{}]", s(v)), + Self::FixedArray(v) => format!("[{}]", s(v)), + Self::Tuple(v) => format!("({})", s(v)), + } + } + + fn type_check(&self, ty: &DynSolType) -> bool { + match self { + Self::Bool(_) => *ty == DynSolType::Bool, + Self::Int(_, a) => { + if let DynSolType::Int(b) = ty { + b >= a + } else { + false + } + } + Self::Uint(_, a) => { + if let DynSolType::Uint(b) = ty { + b >= a + } else { + false + } + } + Self::FixedBytes(_, a) => { + if let DynSolType::FixedBytes(b) = ty { + b >= a + } else { + false + } + } + Self::Address(_) => *ty == DynSolType::Address, + Self::Function(_) => *ty == DynSolType::Function, + Self::Bytes(_) => *ty == DynSolType::Bytes, + Self::String(_) => *ty == DynSolType::String, + Self::Array(values) => { + if let DynSolType::Array(ty) = ty { + values.iter().all(|x| x.type_check(ty)) + } else { + false + } + } + Self::FixedArray(values) => { + if let DynSolType::FixedArray(ty, size) = ty { + *size == values.len() && values.iter().all(|x| x.type_check(ty)) + } else { + false + } + } + Self::Tuple(values) => { + if let DynSolType::Tuple(types) = ty { + types.len() == values.len() + && values + .iter() + .enumerate() + .all(|(i, x)| x.type_check(&types[i])) + } else { + false + } + } + } + } +} + +#[cfg(test)] +mod tests { + use alloy::primitives::I256; + use alloy::primitives::U256; + + use super::*; + + #[test] + fn fixed_bytes_from_slice_empty_slice() { + let val = DynSolValue::fixed_bytes_from_slice(&[]).unwrap(); + let bytes = [0; 32]; + + assert_eq!(val, DynSolValue::FixedBytes(bytes.into(), 0)); + } + + #[test] + fn fixed_bytes_from_slice_one_byte() { + let val = DynSolValue::fixed_bytes_from_slice(&[10]).unwrap(); + let mut bytes = [0; 32]; + bytes[0] = 10; + + assert_eq!(val, DynSolValue::FixedBytes(bytes.into(), 1)); + } + + #[test] + fn fixed_bytes_from_slice_multiple_bytes() { + let val = DynSolValue::fixed_bytes_from_slice(&[10, 20, 30]).unwrap(); + let mut bytes = [0; 32]; + bytes[0] = 10; + bytes[1] = 20; + bytes[2] = 30; + + assert_eq!(val, DynSolValue::FixedBytes(bytes.into(), 3)); + } + + #[test] + fn fixed_bytes_from_slice_max_bytes() { + let val = DynSolValue::fixed_bytes_from_slice(&[10; 32]).unwrap(); + let bytes = [10; 32]; + + assert_eq!(val, DynSolValue::FixedBytes(bytes.into(), 32)); + } + + #[test] + fn fixed_bytes_from_slice_too_many_bytes() { + DynSolValue::fixed_bytes_from_slice(&[10; 33]).unwrap_err(); + } + + #[test] + fn to_string() { + use DynSolValue::*; + + assert_eq!(Bool(false).to_string(), "false"); + assert_eq!(Bool(true).to_string(), "true"); + + assert_eq!( + Int(I256::try_from(-10).unwrap(), 256).to_string(), + "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff6", + ); + + assert_eq!(Uint(U256::from(10), 256).to_string(), "a"); + + assert_eq!( + FixedBytes([10; 32].into(), 32).to_string(), + "0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a", + ); + + assert_eq!( + Address([10; 20].into()).to_string(), + "0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a", + ); + + assert_eq!( + Function([10; 24].into()).to_string(), + "0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a", + ); + + assert_eq!(Bytes(vec![10, 20, 30]).to_string(), "0a141e"); + + assert_eq!( + String("one two three".to_owned()).to_string(), + "one two three" + ); + + assert_eq!( + Array(vec![String("one".to_owned()), String("two".to_owned())]).to_string(), + "[one,two]", + ); + + assert_eq!( + FixedArray(vec![String("one".to_owned()), String("two".to_owned())]).to_string(), + "[one,two]" + ); + + assert_eq!( + Tuple(vec![String("one".to_owned()), String("two".to_owned())]).to_string(), + "(one,two)" + ); + } + + #[test] + fn type_check() { + use DynSolType as T; + use DynSolValue::*; + + assert!(Bool(true).type_check(&T::Bool)); + assert!(!Bool(true).type_check(&T::Int(256))); + + assert!(!Int(I256::try_from(-10).unwrap(), 32).type_check(&T::Int(24))); + assert!(Int(I256::try_from(-10).unwrap(), 32).type_check(&T::Int(32))); + assert!(Int(I256::try_from(-10).unwrap(), 32).type_check(&T::Int(256))); + assert!(!Int(I256::try_from(-10).unwrap(), 32).type_check(&T::Uint(256))); + + assert!(!Uint(U256::from(10), 32).type_check(&T::Uint(24))); + assert!(Uint(U256::from(10), 32).type_check(&T::Uint(32))); + assert!(Uint(U256::from(10), 32).type_check(&T::Uint(256))); + assert!(!Uint(U256::from(10), 32).type_check(&T::FixedBytes(32))); + + assert!(!FixedBytes([0; 32].into(), 16).type_check(&T::FixedBytes(8))); + assert!(FixedBytes([0; 32].into(), 16).type_check(&T::FixedBytes(16))); + assert!(FixedBytes([0; 32].into(), 16).type_check(&T::FixedBytes(32))); + assert!(!FixedBytes([0; 32].into(), 32).type_check(&T::Address)); + + assert!(Address([0; 20].into()).type_check(&T::Address)); + assert!(!Address([0; 20].into()).type_check(&T::Function)); + + assert!(Function([0; 24].into()).type_check(&T::Function)); + assert!(!Function([0; 24].into()).type_check(&T::Bytes)); + + assert!(Bytes(vec![0, 0, 0]).type_check(&T::Bytes)); + assert!(!Bytes(vec![0, 0, 0]).type_check(&T::String)); + + assert!(String("".to_owned()).type_check(&T::String)); + assert!(!String("".to_owned()).type_check(&T::Array(Box::new(T::Bool)))); + + assert!(Array(vec![Bool(true)]).type_check(&T::Array(Box::new(T::Bool)))); + assert!(!Array(vec![Bool(true)]).type_check(&T::Array(Box::new(T::String)))); + assert!(!Array(vec![Bool(true)]).type_check(&T::FixedArray(Box::new(T::Bool), 1))); + + assert!(!FixedArray(vec![String("".to_owned())]) + .type_check(&T::FixedArray(Box::new(T::Bool), 1))); + assert!(!FixedArray(vec![Bool(true), Bool(false)]) + .type_check(&T::FixedArray(Box::new(T::Bool), 1))); + assert!(FixedArray(vec![Bool(true), Bool(false)]) + .type_check(&T::FixedArray(Box::new(T::Bool), 2))); + assert!(!FixedArray(vec![Bool(true), Bool(false)]) + .type_check(&T::FixedArray(Box::new(T::Bool), 3))); + assert!(!FixedArray(vec![Bool(true), Bool(false)]) + .type_check(&T::Tuple(vec![T::Bool, T::Bool]))); + + assert!(!Tuple(vec![Bool(true), Bool(false)]).type_check(&T::Tuple(vec![T::Bool]))); + assert!(Tuple(vec![Bool(true), Bool(false)]).type_check(&T::Tuple(vec![T::Bool, T::Bool]))); + assert!(!Tuple(vec![Bool(true)]).type_check(&T::Tuple(vec![T::Bool, T::Bool]))); + assert!(!Tuple(vec![Bool(true)]).type_check(&T::Bool)); + } +} diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index a4a411d8e5c..31980a996f2 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -16,6 +16,7 @@ use crate::{ DataSourceTemplateInfo, StoreError, }, }; +use alloy::primitives::{B256, U256}; use anyhow::{Error, Result}; use async_trait::async_trait; use serde::Deserialize; @@ -26,7 +27,6 @@ use std::{ convert::TryFrom, sync::Arc, }; -use web3::types::H256; use super::{ block_stream::{self, BlockStream, FirehoseCursor}, @@ -73,7 +73,7 @@ pub fn test_ptr(n: BlockNumber) -> BlockPtr { } pub fn test_ptr_reorged(n: BlockNumber, reorg_n: u32) -> BlockPtr { - let mut hash = H256::from_low_u64_be(n as u64); + let mut hash = B256::from(U256::from(n as u64)); hash[0..4].copy_from_slice(&reorg_n.to_be_bytes()); BlockPtr { hash: hash.into(), @@ -524,7 +524,7 @@ impl ChainStore for MockChainStore { async fn attempt_chain_head_update( self: Arc, _ancestor_count: BlockNumber, - ) -> Result, Error> { + ) -> Result, Error> { unimplemented!() } async fn blocks(self: Arc, _hashes: Vec) -> Result, Error> { @@ -571,7 +571,7 @@ impl ChainStore for MockChainStore { } async fn transaction_receipts_in_block( &self, - _block_ptr: &H256, + _block_ptr: &B256, ) -> Result, StoreError> { unimplemented!() } diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index d71f54f7779..f65fcea2e5b 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -31,6 +31,7 @@ use crate::{ components::store::BlockNumber, prelude::{thiserror::Error, LinkResolver}, }; +use alloy::primitives::B256; use anyhow::{anyhow, Context, Error}; use async_trait::async_trait; use futures03::future::BoxFuture; @@ -45,7 +46,6 @@ use std::{ str::FromStr, sync::Arc, }; -use web3::types::H256; pub use block_stream::{ChainHeadUpdateListener, ChainHeadUpdateStream, TriggersAdapter}; pub use empty_node_capabilities::EmptyNodeCapabilities; @@ -222,32 +222,26 @@ pub enum IngestorError { /// The Ethereum node does not know about this block for some reason, probably because it /// disappeared in a chain reorg. #[error("Block data unavailable, block was likely uncled (block hash = {0:?})")] - BlockUnavailable(H256), + BlockUnavailable(B256), /// The Ethereum node does not know about this block for some reason, probably because it /// disappeared in a chain reorg. #[error("Receipt for tx {1:?} unavailable, block was likely uncled (block hash = {0:?})")] - ReceiptUnavailable(H256, H256), + ReceiptUnavailable(B256, B256), /// The Ethereum node does not know about this block for some reason #[error("Transaction receipts for block (block hash = {0:?}) is unavailable")] - BlockReceiptsUnavailable(H256), + BlockReceiptsUnavailable(B256), /// The Ethereum node does not know about this block for some reason #[error("Received confliciting block receipts for block (block hash = {0:?})")] - BlockReceiptsMismatched(H256), + BlockReceiptsMismatched(B256), /// An unexpected error occurred. #[error("Ingestor error: {0:#}")] Unknown(#[from] Error), } -impl From for IngestorError { - fn from(e: web3::Error) -> Self { - IngestorError::Unknown(anyhow::anyhow!(e)) - } -} - /// The `TriggerFilterWrapper` is a higher-level wrapper around the chain-specific `TriggerFilter`, /// enabling subgraph-based trigger filtering for subgraph datasources. This abstraction is necessary /// because subgraph filtering operates at a higher level than chain-based filtering. By using this wrapper, diff --git a/graph/src/blockchain/types.rs b/graph/src/blockchain/types.rs index 34b44f2723e..f3f3fff58f1 100644 --- a/graph/src/blockchain/types.rs +++ b/graph/src/blockchain/types.rs @@ -1,3 +1,4 @@ +use alloy::primitives::B256; use anyhow::anyhow; use diesel::deserialize::FromSql; use diesel::pg::Pg; @@ -9,7 +10,8 @@ use serde::{Deserialize, Deserializer}; use std::convert::TryFrom; use std::time::Duration; use std::{fmt, str::FromStr}; -use web3::types::{Block, H256, U256, U64}; + +use crate::components::ethereum::LightEthereumBlock; use crate::cheap_clone::CheapClone; use crate::components::store::BlockNumber; @@ -32,8 +34,8 @@ impl BlockHash { &self.0 } - pub fn as_h256(&self) -> H256 { - H256::from_slice(self.as_slice()) + pub fn as_b256(&self) -> B256 { + B256::from_slice(self.as_slice()) } /// Encodes the block hash into a hexadecimal string **without** a "0x" @@ -45,7 +47,7 @@ impl BlockHash { } pub fn zero() -> Self { - Self::from(H256::zero()) + Self::from(B256::ZERO) } } @@ -83,18 +85,18 @@ impl fmt::LowerHex for BlockHash { } } -impl From for BlockHash { - fn from(hash: H256) -> Self { - BlockHash(hash.as_bytes().into()) - } -} - impl From> for BlockHash { fn from(bytes: Vec) -> Self { BlockHash(bytes.as_slice().into()) } } +impl From for BlockHash { + fn from(hash: B256) -> Self { + BlockHash(hash.as_slice().into()) + } +} + impl TryFrom<&str> for BlockHash { type Error = anyhow::Error; @@ -170,13 +172,6 @@ impl BlockPtr { self.number } - // FIXME: - // - // workaround for arweave - pub fn hash_as_h256(&self) -> H256 { - H256::from_slice(&self.hash_slice()[..32]) - } - pub fn hash_slice(&self) -> &[u8] { self.hash.0.as_ref() } @@ -205,15 +200,15 @@ impl slog::Value for BlockPtr { } } -impl From> for BlockPtr { - fn from(b: Block) -> BlockPtr { - BlockPtr::from((b.hash.unwrap(), b.number.unwrap().as_u64())) +impl From for BlockPtr { + fn from(b: LightEthereumBlock) -> BlockPtr { + BlockPtr::from((b.hash(), b.number_u64())) } } -impl<'a, T> From<&'a Block> for BlockPtr { - fn from(b: &'a Block) -> BlockPtr { - BlockPtr::from((b.hash.unwrap(), b.number.unwrap().as_u64())) +impl From<&LightEthereumBlock> for BlockPtr { + fn from(b: &LightEthereumBlock) -> BlockPtr { + BlockPtr::from((b.hash(), b.number_u64())) } } @@ -226,50 +221,51 @@ impl From<(Vec, i32)> for BlockPtr { } } -impl From<(H256, i32)> for BlockPtr { - fn from((hash, number): (H256, i32)) -> BlockPtr { +impl From<(B256, i32)> for BlockPtr { + fn from((hash, number): (B256, i32)) -> BlockPtr { BlockPtr { hash: hash.into(), number, } } } - -impl From<(Vec, u64)> for BlockPtr { - fn from((bytes, number): (Vec, u64)) -> Self { +impl From<(B256, u64)> for BlockPtr { + fn from((hash, number): (B256, u64)) -> BlockPtr { let number = i32::try_from(number).unwrap(); BlockPtr { - hash: BlockHash::from(bytes), + hash: hash.into(), number, } } } -impl From<(Vec, i64)> for BlockPtr { - fn from((bytes, number): (Vec, i64)) -> Self { +impl From<(B256, i64)> for BlockPtr { + fn from((hash, number): (B256, i64)) -> BlockPtr { let number = i32::try_from(number).unwrap(); BlockPtr { - hash: BlockHash::from(bytes), + hash: hash.into(), number, } } } -impl From<(H256, u64)> for BlockPtr { - fn from((hash, number): (H256, u64)) -> BlockPtr { +impl From<(Vec, u64)> for BlockPtr { + fn from((bytes, number): (Vec, u64)) -> Self { let number = i32::try_from(number).unwrap(); - - BlockPtr::from((hash, number)) + BlockPtr { + hash: BlockHash::from(bytes), + number, + } } } -impl From<(H256, i64)> for BlockPtr { - fn from((hash, number): (H256, i64)) -> BlockPtr { - if number < 0 { - panic!("block number out of range: {}", number); +impl From<(Vec, i64)> for BlockPtr { + fn from((bytes, number): (Vec, i64)) -> Self { + let number = i32::try_from(number).unwrap(); + BlockPtr { + hash: BlockHash::from(bytes), + number, } - - BlockPtr::from((hash, number as u64)) } } @@ -288,14 +284,14 @@ impl TryFrom<(&[u8], i64)> for BlockPtr { type Error = anyhow::Error; fn try_from((bytes, number): (&[u8], i64)) -> Result { - let hash = if bytes.len() == H256::len_bytes() { - H256::from_slice(bytes) + let hash = if bytes.len() == B256::len_bytes() { + B256::from_slice(bytes) } else { return Err(anyhow!( - "invalid H256 value `{}` has {} bytes instead of {}", + "invalid B256 value `{}` has {} bytes instead of {}", hex::encode(bytes), bytes.len(), - H256::len_bytes() + B256::len_bytes() )); }; Ok(BlockPtr::from((hash, number))) @@ -312,9 +308,9 @@ impl IntoValue for BlockPtr { } } -impl From for H256 { +impl From for B256 { fn from(ptr: BlockPtr) -> Self { - ptr.hash_as_h256() + ptr.hash.as_b256() } } @@ -398,22 +394,6 @@ impl ExtendedBlockPtr { pub fn block_number(&self) -> BlockNumber { self.number } - - pub fn hash_as_h256(&self) -> H256 { - H256::from_slice(&self.hash_slice()[..32]) - } - - pub fn parent_hash_as_h256(&self) -> H256 { - H256::from_slice(&self.parent_hash_slice()[..32]) - } - - pub fn hash_slice(&self) -> &[u8] { - self.hash.0.as_ref() - } - - pub fn parent_hash_slice(&self) -> &[u8] { - self.parent_hash.0.as_ref() - } } impl fmt::Display for ExtendedBlockPtr { @@ -463,44 +443,15 @@ impl IntoValue for ExtendedBlockPtr { } } -impl TryFrom<(Option, Option, H256, U256)> for ExtendedBlockPtr { - type Error = anyhow::Error; - - fn try_from(tuple: (Option, Option, H256, U256)) -> Result { - let (hash_opt, number_opt, parent_hash, timestamp_u256) = tuple; - - let hash = hash_opt.ok_or_else(|| anyhow!("Block hash is missing"))?; - let number = number_opt - .ok_or_else(|| anyhow!("Block number is missing"))? - .as_u64(); - - let block_number = - i32::try_from(number).map_err(|_| anyhow!("Block number out of range"))?; - - // Convert `U256` to `BlockTime` - let secs = - i64::try_from(timestamp_u256).map_err(|_| anyhow!("Timestamp out of range for i64"))?; - let block_time = BlockTime::since_epoch(secs, 0); - - Ok(ExtendedBlockPtr { - hash: hash.into(), - number: block_number, - parent_hash: parent_hash.into(), - timestamp: block_time, - }) - } -} - -impl TryFrom<(H256, i32, H256, U256)> for ExtendedBlockPtr { +impl TryFrom<(B256, i32, B256, u64)> for ExtendedBlockPtr { type Error = anyhow::Error; - fn try_from(tuple: (H256, i32, H256, U256)) -> Result { - let (hash, block_number, parent_hash, timestamp_u256) = tuple; + fn try_from(tuple: (B256, i32, B256, u64)) -> Result { + let (hash, block_number, parent_hash, timestamp) = tuple; - // Convert `U256` to `BlockTime` - let secs = - i64::try_from(timestamp_u256).map_err(|_| anyhow!("Timestamp out of range for i64"))?; - let block_time = BlockTime::since_epoch(secs, 0); + // Convert timestamp to `BlockTime` + let secs = timestamp; + let block_time = BlockTime::since_epoch(secs as i64, 0); Ok(ExtendedBlockPtr { hash: hash.into(), @@ -510,11 +461,6 @@ impl TryFrom<(H256, i32, H256, U256)> for ExtendedBlockPtr { }) } } -impl From for H256 { - fn from(ptr: ExtendedBlockPtr) -> Self { - ptr.hash_as_h256() - } -} impl From for BlockNumber { fn from(ptr: ExtendedBlockPtr) -> Self { @@ -539,7 +485,7 @@ impl Default for ChainIdentifier { fn default() -> Self { Self { net_version: String::default(), - genesis_block_hash: BlockHash::from(H256::zero()), + genesis_block_hash: BlockHash::from(B256::ZERO), } } } diff --git a/graph/src/cheap_clone.rs b/graph/src/cheap_clone.rs index adcb823c303..a8250bd6fb0 100644 --- a/graph/src/cheap_clone.rs +++ b/graph/src/cheap_clone.rs @@ -119,6 +119,6 @@ cheap_clone_is_copy!( &'static str, std::time::Duration ); -cheap_clone_is_copy!(ethabi::Address); +cheap_clone_is_copy!(alloy::primitives::Address); cheap_clone_is_clone!(tokio_util::sync::CancellationToken); diff --git a/graph/src/components/ethereum/mod.rs b/graph/src/components/ethereum/mod.rs index 45f1f5d98ad..0c068c24dee 100644 --- a/graph/src/components/ethereum/mod.rs +++ b/graph/src/components/ethereum/mod.rs @@ -1,6 +1,9 @@ mod types; pub use self::types::{ - evaluate_transaction_status, EthereumBlock, EthereumBlockWithCalls, EthereumCall, + AnyBlock, AnyTransaction, EthereumBlock, EthereumBlockWithCalls, EthereumCall, LightEthereumBlock, LightEthereumBlockExt, }; + +// Re-export Alloy network types for convenience +pub use alloy::network::{AnyHeader, AnyRpcBlock, AnyRpcHeader, AnyRpcTransaction, AnyTxEnvelope}; diff --git a/graph/src/components/ethereum/types.rs b/graph/src/components/ethereum/types.rs index b43730590d4..c9a4174bf0a 100644 --- a/graph/src/components/ethereum/types.rs +++ b/graph/src/components/ethereum/types.rs @@ -1,68 +1,157 @@ -use serde::{Deserialize, Serialize}; -use std::{convert::TryFrom, sync::Arc}; -use web3::types::{ - Action, Address, Block, Bytes, Log, Res, Trace, Transaction, TransactionReceipt, H256, U256, - U64, +use alloy::{ + network::{AnyRpcBlock, AnyRpcHeader, AnyRpcTransaction, ReceiptResponse, TransactionResponse}, + primitives::{Address, Bytes, B256, U256}, + rpc::types::{ + trace::parity::{Action, LocalizedTransactionTrace, TraceOutput}, + Log, + }, }; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; use crate::{ blockchain::{BlockPtr, BlockTime}, prelude::BlockNumber, }; -pub type LightEthereumBlock = Block; +// Use Alloy's official types for handling any transaction type +pub type AnyTransaction = AnyRpcTransaction; +pub type AnyBlock = AnyRpcBlock; + +#[allow(dead_code)] +#[derive(Debug, Deserialize, Serialize)] +pub struct LightEthereumBlock(AnyBlock); + +impl Default for LightEthereumBlock { + fn default() -> Self { + use alloy::rpc::types::{Block, BlockTransactions}; + use alloy::serde::WithOtherFields; + + let default_block = Block { + header: AnyRpcHeader::default(), + transactions: BlockTransactions::Full(vec![]), + uncles: vec![], + withdrawals: None, + }; + Self(AnyBlock::new(WithOtherFields::new(default_block))) + } +} + +impl LightEthereumBlock { + pub fn new(block: AnyBlock) -> Self { + Self(block) + } + + pub fn hash(&self) -> B256 { + self.0.header.hash + } + + pub fn number_u64(&self) -> u64 { + self.0.header.number + } + + pub fn timestamp_u64(&self) -> u64 { + self.0.header.timestamp + } + + pub fn transactions(&self) -> Option<&[AnyTransaction]> { + self.0.transactions.as_transactions() + } + + pub fn inner(&self) -> &AnyBlock { + &self.0 + } + + pub fn base_fee_per_gas(&self) -> Option { + self.0.header.base_fee_per_gas + } +} pub trait LightEthereumBlockExt { fn number(&self) -> BlockNumber; - fn transaction_for_log(&self, log: &Log) -> Option; - fn transaction_for_call(&self, call: &EthereumCall) -> Option; + fn transaction_for_log(&self, log: &Log) -> Option; + fn transaction_for_call(&self, call: &EthereumCall) -> Option; fn parent_ptr(&self) -> Option; fn format(&self) -> String; fn block_ptr(&self) -> BlockPtr; fn timestamp(&self) -> BlockTime; } -impl LightEthereumBlockExt for LightEthereumBlock { +impl LightEthereumBlockExt for AnyBlock { fn number(&self) -> BlockNumber { - BlockNumber::try_from(self.number.unwrap().as_u64()).unwrap() + BlockNumber::try_from(self.header.number).unwrap() } - fn transaction_for_log(&self, log: &Log) -> Option { - log.transaction_hash - .and_then(|hash| self.transactions.iter().find(|tx| tx.hash == hash)) - .cloned() + fn timestamp(&self) -> BlockTime { + let time = self.header.timestamp; + let time = i64::try_from(time).unwrap(); + BlockTime::since_epoch(time, 0) } - fn transaction_for_call(&self, call: &EthereumCall) -> Option { - call.transaction_hash - .and_then(|hash| self.transactions.iter().find(|tx| tx.hash == hash)) - .cloned() + fn transaction_for_log(&self, log: &Log) -> Option { + log.transaction_hash.and_then(|hash| { + self.transactions + .txns() + .find(|tx| &tx.tx_hash() == &hash) + .cloned() + }) + } + + fn transaction_for_call(&self, call: &EthereumCall) -> Option { + call.transaction_hash.and_then(|hash| { + self.transactions + .txns() + .find(|tx| &tx.tx_hash() == &hash) + .cloned() + }) } fn parent_ptr(&self) -> Option { - match self.number() { + match self.header.number { 0 => None, - n => Some(BlockPtr::from((self.parent_hash, n - 1))), + n => { + let number = i32::try_from(n - 1).unwrap(); + Some(BlockPtr::new(self.header.parent_hash.into(), number)) + } } } fn format(&self) -> String { - format!( - "{} ({})", - self.number - .map_or(String::from("none"), |number| format!("#{}", number)), - self.hash - .map_or(String::from("-"), |hash| format!("{:x}", hash)) - ) + format!("{} ({})", self.header.number, self.header.hash) } fn block_ptr(&self) -> BlockPtr { - BlockPtr::from((self.hash.unwrap(), self.number.unwrap().as_u64())) + BlockPtr::from((self.header.hash, self.header.number)) + } +} + +impl LightEthereumBlockExt for LightEthereumBlock { + fn number(&self) -> BlockNumber { + self.0.header.number.try_into().unwrap() + } + + fn transaction_for_log(&self, log: &alloy::rpc::types::Log) -> Option { + self.0.transaction_for_log(log) + } + + fn transaction_for_call(&self, call: &EthereumCall) -> Option { + self.0.transaction_for_call(call) + } + + fn parent_ptr(&self) -> Option { + self.0.parent_ptr() + } + + fn format(&self) -> String { + self.0.format() + } + + fn block_ptr(&self) -> BlockPtr { + self.0.block_ptr() } fn timestamp(&self) -> BlockTime { - let ts = i64::try_from(self.timestamp.as_u64()).unwrap(); - BlockTime::since_epoch(ts, 0) + self.0.timestamp() } } @@ -90,24 +179,14 @@ impl EthereumBlockWithCalls { "failed to find the receipt for this transaction" ))?; - Ok(evaluate_transaction_status(receipt.status)) + Ok(receipt.status()) } } -/// Evaluates if a given transaction was successful. -/// -/// Returns `true` on success and `false` on failure. -/// If a receipt does not have a status value (EIP-658), assume the transaction was successful. -pub fn evaluate_transaction_status(receipt_status: Option) -> bool { - receipt_status - .map(|status| !status.is_zero()) - .unwrap_or(true) -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq)] +#[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct EthereumBlock { pub block: Arc, - pub transaction_receipts: Vec>, + pub transaction_receipts: Vec>, } #[derive(Debug, Default, Clone, PartialEq, Eq)] @@ -115,31 +194,34 @@ pub struct EthereumCall { pub from: Address, pub to: Address, pub value: U256, - pub gas_used: U256, + pub gas_used: u64, pub input: Bytes, pub output: Bytes, pub block_number: BlockNumber, - pub block_hash: H256, - pub transaction_hash: Option, + pub block_hash: B256, + pub transaction_hash: Option, pub transaction_index: u64, } impl EthereumCall { - pub fn try_from_trace(trace: &Trace) -> Option { + pub fn try_from_trace(trace: &LocalizedTransactionTrace) -> Option { // The parity-ethereum tracing api returns traces for operations which had execution errors. // Filter errorful traces out, since call handlers should only run on successful CALLs. - if trace.error.is_some() { + + let tx_trace = &trace.trace; + + if tx_trace.error.is_some() { return None; } // We are only interested in traces from CALLs - let call = match &trace.action { + let call = match &tx_trace.action { // Contract to contract value transfers compile to the CALL opcode // and have no input. Call handlers are for triggering on explicit method calls right now. Action::Call(call) if call.input.0.len() >= 4 => call, _ => return None, }; - let (output, gas_used) = match &trace.result { - Some(Res::Call(result)) => (result.output.clone(), result.gas_used), + let (output, gas_used) = match &tx_trace.result { + Some(TraceOutput::Call(result)) => (result.output.clone(), result.gas_used), _ => return None, }; @@ -151,29 +233,24 @@ impl EthereumCall { from: call.from, to: call.to, value: call.value, - gas_used, + gas_used: gas_used, input: call.input.clone(), - output, - block_number: trace.block_number as BlockNumber, - block_hash: trace.block_hash, + output: output, + block_number: BlockNumber::try_from( + trace + .block_number + .expect("localized trace must have block_number"), + ) + .unwrap(), + block_hash: trace + .block_hash + .expect("localized trace must have block_hash"), transaction_hash: trace.transaction_hash, transaction_index, }) } } -impl From for BlockPtr { - fn from(b: EthereumBlock) -> BlockPtr { - BlockPtr::from((b.block.hash.unwrap(), b.block.number.unwrap().as_u64())) - } -} - -impl<'a> From<&'a EthereumBlock> for BlockPtr { - fn from(b: &'a EthereumBlock) -> BlockPtr { - BlockPtr::from((b.block.hash.unwrap(), b.block.number.unwrap().as_u64())) - } -} - impl<'a> From<&'a EthereumCall> for BlockPtr { fn from(call: &'a EthereumCall) -> BlockPtr { BlockPtr::from((call.block_hash, call.block_number)) diff --git a/graph/src/components/store/mod.rs b/graph/src/components/store/mod.rs index a9b65e01f9a..b2b6783349b 100644 --- a/graph/src/components/store/mod.rs +++ b/graph/src/components/store/mod.rs @@ -3,6 +3,7 @@ mod err; mod traits; pub mod write; +use alloy::primitives::Address; use diesel::deserialize::FromSql; use diesel::pg::Pg; use diesel::serialize::{Output, ToSql}; @@ -1138,7 +1139,7 @@ pub struct CachedEthereumCall { pub block_ptr: BlockPtr, /// The address to the called contract. - pub contract_address: ethabi::Address, + pub contract_address: Address, /// The encoded return value of this call. pub return_value: Vec, diff --git a/graph/src/components/store/traits.rs b/graph/src/components/store/traits.rs index 8eb915790f4..658baa8be3e 100644 --- a/graph/src/components/store/traits.rs +++ b/graph/src/components/store/traits.rs @@ -3,7 +3,6 @@ use std::ops::Range; use anyhow::Error; use async_trait::async_trait; -use web3::types::{Address, H256}; use super::*; use crate::blockchain::block_stream::{EntitySourceOperation, FirehoseCursor}; @@ -19,7 +18,10 @@ use crate::data::store::ethereum::call; use crate::data::store::{QueryObject, SqlQueryObject}; use crate::data::subgraph::{status, DeploymentFeatures}; use crate::data::{query::QueryTarget, subgraph::schema::*}; -use crate::prelude::{DeploymentState, NodeId, QueryExecutionError, SubgraphName}; +use crate::prelude::{ + alloy::primitives::{Address, B256}, + DeploymentState, NodeId, QueryExecutionError, SubgraphName, +}; use crate::schema::{ApiSchema, InputSchema}; pub trait SubscriptionManager: Send + Sync + 'static { @@ -549,7 +551,7 @@ pub trait ChainStore: ChainHeadStore { async fn attempt_chain_head_update( self: Arc, ancestor_count: BlockNumber, - ) -> Result, Error>; + ) -> Result, Error>; /// Returns the blocks present in the store. async fn blocks( @@ -626,7 +628,7 @@ pub trait ChainStore: ChainHeadStore { /// Tries to retrieve all transactions receipts for a given block. async fn transaction_receipts_in_block( &self, - block_ptr: &H256, + block_ptr: &B256, ) -> Result, StoreError>; /// Clears call cache of the chain for the given `from` and `to` block number. diff --git a/graph/src/components/subgraph/proof_of_indexing/mod.rs b/graph/src/components/subgraph/proof_of_indexing/mod.rs index b3861c0cea6..78bbaf6255f 100644 --- a/graph/src/components/subgraph/proof_of_indexing/mod.rs +++ b/graph/src/components/subgraph/proof_of_indexing/mod.rs @@ -87,6 +87,7 @@ mod tests { prelude::{BlockPtr, DeploymentHash, Value}, schema::InputSchema, }; + use alloy::primitives::{Address, B256}; use maplit::hashmap; use online::ProofOfIndexingFinisher; use reference::*; @@ -96,7 +97,6 @@ mod tests { use stable_hash_legacy::utils::stable_hash as stable_hash_legacy; use std::collections::HashMap; use std::convert::TryInto; - use web3::types::{Address, H256}; /// The PoI is the StableHash of this struct. This reference implementation is /// mostly here just to make sure that the online implementation is @@ -106,22 +106,22 @@ mod tests { pub struct PoI<'a> { pub causality_regions: HashMap>, pub subgraph_id: DeploymentHash, - pub block_hash: H256, + pub block_hash: B256, pub indexer: Option
, } - fn h256_as_bytes(val: &H256) -> AsBytes<&[u8]> { - AsBytes(val.as_bytes()) + fn b256_as_bytes(val: &B256) -> AsBytes<&[u8]> { + AsBytes(val.as_slice()) } fn indexer_opt_as_bytes(val: &Option
) -> Option> { - val.as_ref().map(|v| AsBytes(v.as_bytes())) + val.as_ref().map(|v| AsBytes(v.as_slice())) } impl_stable_hash!(PoI<'_> { causality_regions, subgraph_id, - block_hash: h256_as_bytes, + block_hash: b256_as_bytes, indexer: indexer_opt_as_bytes }); @@ -246,7 +246,7 @@ mod tests { fast: "dced49c45eac68e8b3d8f857928e7be6c270f2db8b56b0d7f27ce725100bae01", data: PoI { subgraph_id: DeploymentHash::new("test").unwrap(), - block_hash: H256::repeat_byte(1), + block_hash: B256::repeat_byte(1), causality_regions: HashMap::new(), indexer: None, }, @@ -258,7 +258,7 @@ mod tests { fast: "8bb3373fb55e02bde3202bac0eeecf1bd9a676856a4dd6667bd809aceda41885", data: PoI { subgraph_id: DeploymentHash::new("test").unwrap(), - block_hash: H256::repeat_byte(1), + block_hash: B256::repeat_byte(1), causality_regions: hashmap! { "eth".to_owned() => PoICausalityRegion { blocks: vec! [ @@ -285,7 +285,7 @@ mod tests { fast: "8b0097ad96b21f7e4bd8dcc41985e6e5506b808f1185016ab1073dd8745238ce", data: PoI { subgraph_id: DeploymentHash::new("b").unwrap(), - block_hash: H256::repeat_byte(3), + block_hash: B256::repeat_byte(3), causality_regions: hashmap! { "eth".to_owned() => PoICausalityRegion { blocks: vec! [ @@ -323,7 +323,7 @@ mod tests { fast: "2041af28678e68406247a5cfb5fe336947da75256c79b35c2f61fc7985091c0e", data: PoI { subgraph_id: DeploymentHash::new("b").unwrap(), - block_hash: H256::repeat_byte(3), + block_hash: B256::repeat_byte(3), causality_regions: hashmap! { "eth".to_owned() => PoICausalityRegion { blocks: vec! [ @@ -385,7 +385,7 @@ mod tests { fast: "421ef30a03be64014b9eef2b999795dcabfc601368040df855635e7886eb3822", data: PoI { subgraph_id: DeploymentHash::new("test").unwrap(), - block_hash: H256::repeat_byte(1), + block_hash: B256::repeat_byte(1), causality_regions: hashmap! { "eth".to_owned() => PoICausalityRegion { blocks: vec! [ diff --git a/graph/src/components/subgraph/proof_of_indexing/online.rs b/graph/src/components/subgraph/proof_of_indexing/online.rs index ebf7a65e2f9..f9bd8b5f18d 100644 --- a/graph/src/components/subgraph/proof_of_indexing/online.rs +++ b/graph/src/components/subgraph/proof_of_indexing/online.rs @@ -9,6 +9,7 @@ use crate::{ prelude::{debug, BlockNumber, DeploymentHash, Logger, ENV_VARS}, util::stable_hash_glue::AsBytes, }; +use alloy::primitives::Address; use sha2::{Digest, Sha256}; use stable_hash::{fast::FastStableHasher, FieldAddress, StableHash, StableHasher}; use stable_hash_legacy::crypto::{Blake3SeqNo, SetHasher}; @@ -18,7 +19,6 @@ use stable_hash_legacy::prelude::{ use std::collections::HashMap; use std::convert::TryInto; use std::fmt; -use web3::types::Address; pub struct BlockEventStream { vec_length: u64, @@ -278,7 +278,7 @@ impl ProofOfIndexingFinisher { state.write(&AsBytes(block.hash_slice()), &[2]); // Add PoI.indexer - state.write(&indexer.as_ref().map(|i| AsBytes(i.as_bytes())), &[3]); + state.write(&indexer.as_ref().map(|i| AsBytes(i.as_slice())), &[3]); ProofOfIndexingFinisher { block_number: block.number, diff --git a/graph/src/components/transaction_receipt.rs b/graph/src/components/transaction_receipt.rs index dc8eaf6a730..526a0487180 100644 --- a/graph/src/components/transaction_receipt.rs +++ b/graph/src/components/transaction_receipt.rs @@ -3,37 +3,28 @@ //! This module exposes the [`LightTransactionReceipt`] type, which holds basic information about //! the retrieved transaction receipts. -use web3::types::{TransactionReceipt, H256, U256, U64}; +use alloy::network::ReceiptResponse; +use alloy::primitives::B256; -/// Like web3::types::Receipt, but with fewer fields. #[derive(Debug, PartialEq, Eq)] pub struct LightTransactionReceipt { - pub transaction_hash: H256, - pub transaction_index: U64, - pub block_hash: Option, - pub block_number: Option, - pub gas_used: Option, - pub status: Option, + pub transaction_hash: B256, + pub transaction_index: u64, + pub block_hash: Option, + pub block_number: Option, + pub gas_used: u64, + pub status: bool, } -impl From for LightTransactionReceipt { - fn from(receipt: TransactionReceipt) -> Self { - let TransactionReceipt { - transaction_hash, - transaction_index, - block_hash, - block_number, - gas_used, - status, - .. - } = receipt; +impl From for LightTransactionReceipt { + fn from(receipt: alloy::network::AnyTransactionReceipt) -> Self { LightTransactionReceipt { - transaction_hash, - transaction_index, - block_hash, - block_number, - gas_used, - status, + transaction_hash: receipt.transaction_hash, + transaction_index: receipt.transaction_index.unwrap(), // unwrap is safe because its None only for pending transactions, graph-node does not ingest pending transactions + block_hash: receipt.block_hash, + block_number: receipt.block_number, + gas_used: receipt.gas_used, + status: receipt.status(), } } } diff --git a/graph/src/data/graphql/values.rs b/graph/src/data/graphql/values.rs index 7f15d26dc98..b4923d6ce26 100644 --- a/graph/src/data/graphql/values.rs +++ b/graph/src/data/graphql/values.rs @@ -1,3 +1,4 @@ +use alloy::primitives::Address; use anyhow::{anyhow, Error}; use std::collections::HashMap; use std::convert::TryFrom; @@ -6,7 +7,6 @@ use std::str::FromStr; use crate::blockchain::BlockHash; use crate::data::value::Object; use crate::prelude::{r, BigInt}; -use web3::types::H160; pub trait TryFromValue: Sized { fn try_from_value(value: &r::Value) -> Result; @@ -74,16 +74,11 @@ impl TryFromValue for i32 { } } -impl TryFromValue for H160 { +impl TryFromValue for Address { fn try_from_value(value: &r::Value) -> Result { match value { - r::Value::String(s) => { - // `H160::from_str` takes a hex string with no leading `0x`. - let string = s.trim_start_matches("0x"); - H160::from_str(string).map_err(|e| { - anyhow!("Cannot parse Address/H160 value from string `{}`: {}", s, e) - }) - } + r::Value::String(s) => Address::from_str(s) + .map_err(|e| anyhow!("Cannot parse Address/H160 value from string `{}`: {}", s, e)), _ => Err(anyhow!( "Cannot parse value into an Address/H160: {:?}", value diff --git a/graph/src/data/store/ethereum.rs b/graph/src/data/store/ethereum.rs index 12d48f992df..469b4b14551 100644 --- a/graph/src/data/store/ethereum.rs +++ b/graph/src/data/store/ethereum.rs @@ -1,7 +1,7 @@ use super::scalar; use crate::derive::CheapClone; use crate::prelude::*; -use web3::types::{Address, Bytes, H2048, H256, H64, U64}; +use alloy::primitives::{aliases::B2048, Address, Bytes, B256, B64, U64}; impl From
for Value { fn from(address: Address) -> Value { @@ -9,27 +9,27 @@ impl From
for Value { } } -impl From for Value { - fn from(hash: H64) -> Value { +impl From for Value { + fn from(hash: B64) -> Value { Value::Bytes(scalar::Bytes::from(hash.as_ref())) } } -impl From for Value { - fn from(hash: H256) -> Value { +impl From for Value { + fn from(hash: B256) -> Value { Value::Bytes(scalar::Bytes::from(hash.as_ref())) } } -impl From for Value { - fn from(hash: H2048) -> Value { +impl From for Value { + fn from(hash: B2048) -> Value { Value::Bytes(scalar::Bytes::from(hash.as_ref())) } } impl From for Value { fn from(bytes: Bytes) -> Value { - Value::Bytes(scalar::Bytes::from(bytes.0.as_slice())) + Value::Bytes(scalar::Bytes::from(bytes.as_ref())) } } @@ -43,6 +43,8 @@ impl From for Value { pub mod call { use std::sync::Arc; + use alloy::primitives::Address; + use crate::data::store::scalar::Bytes; use super::CheapClone; @@ -104,7 +106,7 @@ pub mod call { /// on the call's return value #[derive(Debug, Clone, CheapClone)] pub struct Request { - pub address: ethabi::Address, + pub address: Address, pub encoded_call: Arc, /// The index is set by the caller and is used to identify the /// request in related data structures that the caller might have @@ -112,7 +114,7 @@ pub mod call { } impl Request { - pub fn new(address: ethabi::Address, encoded_call: Vec, index: u32) -> Self { + pub fn new(address: Address, encoded_call: Vec, index: u32) -> Self { Request { address, encoded_call: Arc::new(Bytes::from(encoded_call)), diff --git a/graph/src/data/store/scalar/bigint.rs b/graph/src/data/store/scalar/bigint.rs index 696a1fd49ec..0b6ae34e025 100644 --- a/graph/src/data/store/scalar/bigint.rs +++ b/graph/src/data/store/scalar/bigint.rs @@ -4,8 +4,8 @@ use serde::{self, Deserialize, Serialize}; use stable_hash::utils::AsInt; use stable_hash::StableHash; use thiserror::Error; -use web3::types::*; +use crate::prelude::alloy::primitives::{U128, U256, U64}; use std::convert::{TryFrom, TryInto}; use std::fmt; use std::ops::{Add, BitAnd, BitOr, Div, Mul, Rem, Shl, Shr, Sub}; @@ -174,22 +174,19 @@ impl BigInt { } pub fn from_unsigned_u128(n: U128) -> Self { - let mut bytes: [u8; 16] = [0; 16]; - n.to_little_endian(&mut bytes); + let bytes: [u8; U128::BYTES] = n.to_le_bytes(); // Unwrap: 128 bits is much less than BigInt::MAX_BITS BigInt::from_unsigned_bytes_le(&bytes).unwrap() } pub fn from_unsigned_u256(n: &U256) -> Self { - let mut bytes: [u8; 32] = [0; 32]; - n.to_little_endian(&mut bytes); + let bytes: [u8; U256::BYTES] = n.to_le_bytes(); // Unwrap: 256 bits is much less than BigInt::MAX_BITS BigInt::from_unsigned_bytes_le(&bytes).unwrap() } pub fn from_signed_u256(n: &U256) -> Self { - let mut bytes: [u8; 32] = [0; 32]; - n.to_little_endian(&mut bytes); + let bytes: [u8; U256::BYTES] = n.to_le_bytes(); BigInt::from_signed_bytes_le(&bytes).unwrap() } @@ -202,9 +199,9 @@ impl BigInt { ); let mut i_bytes: [u8; 32] = [255; 32]; i_bytes[..bytes.len()].copy_from_slice(&bytes); - U256::from_little_endian(&i_bytes) + U256::from_le_slice(&i_bytes) } else { - U256::from_little_endian(&bytes) + U256::from_le_slice(&bytes) } } @@ -216,7 +213,7 @@ impl BigInt { self ); } - Ok(U256::from_little_endian(&bytes)) + Ok(U256::from_le_slice(&bytes)) } pub fn pow(self, exponent: u8) -> Result { @@ -256,6 +253,12 @@ impl From for BigInt { } } +impl From for BigInt { + fn from(i: u128) -> BigInt { + BigInt::unchecked_new(i.into()) + } +} + impl From for BigInt { fn from(i: u8) -> BigInt { BigInt::unchecked_new(i.into()) @@ -287,7 +290,7 @@ impl From for BigInt { /// handle signed U64s, we should add the same /// `{to,from}_{signed,unsigned}_u64` methods that we have for U64. fn from(n: U64) -> BigInt { - BigInt::from(n.as_u64()) + BigInt::from(n.to::()) } } @@ -403,8 +406,9 @@ impl GasSizeOf for BigInt { #[cfg(test)] mod test { + use alloy::primitives::U64; + use super::{super::test::same_stable_hash, BigInt}; - use web3::types::U64; #[test] fn bigint_to_from_u64() { diff --git a/graph/src/data/store/scalar/bytes.rs b/graph/src/data/store/scalar/bytes.rs index 8c5f4f1fe08..0df3aa3bdae 100644 --- a/graph/src/data/store/scalar/bytes.rs +++ b/graph/src/data/store/scalar/bytes.rs @@ -1,9 +1,9 @@ +use alloy::primitives::Address; use diesel::deserialize::FromSql; use diesel::pg::PgValue; use diesel::serialize::ToSql; use hex; use serde::{self, Deserialize, Serialize}; -use web3::types::*; use std::fmt::{self, Display, Formatter}; use std::ops::Deref; @@ -64,12 +64,6 @@ impl From
for Bytes { } } -impl From for Bytes { - fn from(bytes: web3::types::Bytes) -> Bytes { - Bytes::from(bytes.0.as_slice()) - } -} - impl From for Bytes { fn from(hash: BlockHash) -> Self { Bytes(hash.0) @@ -123,3 +117,9 @@ impl FromSql for Bytes { as FromSql>::from_sql(value).map(Bytes::from) } } + +impl From for Bytes { + fn from(bytes: alloy::primitives::Bytes) -> Bytes { + Bytes::from(bytes.as_ref()) + } +} diff --git a/graph/src/data/subgraph/mod.rs b/graph/src/data/subgraph/mod.rs index ed0acc894a2..0f621f9b2dd 100644 --- a/graph/src/data/subgraph/mod.rs +++ b/graph/src/data/subgraph/mod.rs @@ -3,6 +3,7 @@ pub mod schema; /// API version and spec version. pub mod api_version; +use alloy::primitives::Address; pub use api_version::*; pub mod features; @@ -29,7 +30,6 @@ use std::{ }; use thiserror::Error; use wasmparser; -use web3::types::Address; use crate::{ amp, bail, diff --git a/graph/src/data_source/common.rs b/graph/src/data_source/common.rs index 511d18f3de7..8f38fa2e94d 100644 --- a/graph/src/data_source/common.rs +++ b/graph/src/data_source/common.rs @@ -1,3 +1,6 @@ +use crate::abi; +use crate::abi::DynSolValueExt; +use crate::abi::FunctionExt; use crate::blockchain::block_stream::EntitySourceOperation; use crate::data::subgraph::SPEC_VERSION_1_4_0; use crate::prelude::{BlockPtr, Value}; @@ -7,8 +10,9 @@ use crate::{ data::value::Word, prelude::Link, }; +use alloy::primitives::{Address, U256}; +use alloy::rpc::types::Log; use anyhow::{anyhow, Context, Error}; -use ethabi::{Address, Contract, Function, LogParam, ParamType, Token}; use graph_derive::CheapClone; use lazy_static::lazy_static; use num_bigint::Sign; @@ -19,12 +23,85 @@ use serde_json; use slog::Logger; use std::collections::HashMap; use std::{str::FromStr, sync::Arc}; -use web3::types::{Log, H160}; + +/// Normalizes ABI JSON to handle compatibility issues between the legacy `ethabi`/`rust-web3` +/// parser and the stricter `alloy` parser. +/// +/// Some deployed subgraph ABIs contain non-standard constructs that `ethabi` accepted but +/// `alloy` rejects. This function patches these issues to maintain backward compatibility: +/// +/// 1. **`stateMutability: "undefined"`** - Some ABIs use "undefined" which is not a valid +/// Solidity state mutability. We replace it with "nonpayable". +/// +/// 2. **Duplicate constructors** - Some ABIs contain multiple constructor definitions. +/// We keep only the first one. +/// +/// 3. **Duplicate fallback functions** - Similar to constructors, some ABIs have multiple +/// fallback definitions. We keep only the first one. +/// +/// 4. **`indexed` field in non-event params** - The `indexed` field is only valid for event +/// parameters, but some ABIs include it on function inputs/outputs. We strip it from +/// non-event items. +/// +/// These issues were identified by validating ABIs across deployed subgraphs in production +/// before the migration to alloy. +fn normalize_abi_json(json_bytes: &[u8]) -> Result, anyhow::Error> { + let mut value: serde_json::Value = serde_json::from_slice(json_bytes)?; + + if let Some(array) = value.as_array_mut() { + let mut found_constructor = false; + let mut found_fallback = false; + let mut indices_to_remove = Vec::new(); + + for (index, item) in array.iter_mut().enumerate() { + if let Some(obj) = item.as_object_mut() { + if let Some(state_mutability) = obj.get_mut("stateMutability") { + if let Some(s) = state_mutability.as_str() { + if s == "undefined" { + *state_mutability = serde_json::Value::String("nonpayable".to_string()); + } + } + } + + let item_type = obj.get("type").and_then(|t| t.as_str()); + + match item_type { + Some("constructor") if found_constructor => indices_to_remove.push(index), + Some("constructor") => found_constructor = true, + Some("fallback") if found_fallback => indices_to_remove.push(index), + Some("fallback") => found_fallback = true, + _ => {} + } + + if item_type != Some("event") { + strip_indexed_from_params(obj.get_mut("inputs")); + strip_indexed_from_params(obj.get_mut("outputs")); + } + } + } + + for index in indices_to_remove.iter().rev() { + array.remove(*index); + } + } + + Ok(serde_json::to_vec(&value)?) +} + +fn strip_indexed_from_params(params: Option<&mut serde_json::Value>) { + if let Some(serde_json::Value::Array(arr)) = params { + for param in arr.iter_mut() { + if let Some(obj) = param.as_object_mut() { + obj.remove("indexed"); + } + } + } +} #[derive(Clone, Debug, PartialEq)] pub struct MappingABI { pub name: String, - pub contract: Contract, + pub contract: abi::JsonAbi, } impl MappingABI { @@ -33,24 +110,27 @@ impl MappingABI { contract_name: &str, name: &str, signature: Option<&str>, - ) -> Result<&Function, Error> { + ) -> Result<&abi::Function, Error> { let contract = &self.contract; let function = match signature { // Behavior for apiVersion < 0.0.4: look up function by name; for overloaded // functions this always picks the same overloaded variant, which is incorrect // and may lead to encoding/decoding errors - None => contract.function(name).with_context(|| { - format!( - "Unknown function \"{}::{}\" called from WASM runtime", - contract_name, name - ) - })?, + None => contract + .function(name) + .and_then(|matches| matches.first()) + .with_context(|| { + format!( + "Unknown function \"{}::{}\" called from WASM runtime", + contract_name, name + ) + })?, // Behavior for apiVersion >= 0.0.04: look up function by signature of // the form `functionName(uint256,string) returns (bytes32,string)`; this // correctly picks the correct variant of an overloaded function Some(ref signature) => contract - .functions_by_name(name) + .function(name) .with_context(|| { format!( "Unknown function \"{}::{}\" called from WASM runtime", @@ -58,7 +138,7 @@ impl MappingABI { ) })? .iter() - .find(|f| signature == &f.signature()) + .find(|f| signature == &f.signature_compat()) .with_context(|| { format!( "Unknown function \"{}::{}\" with signature `{}` \ @@ -120,7 +200,7 @@ impl AbiJson { if param_type == "tuple" { if let Some(components) = input.get("components") { // Parse the ParamType from the JSON (simplified for now) - let param_type = ParamType::Tuple(vec![]); + let param_type = abi::DynSolType::Tuple(vec![]); return StructFieldInfo::from_components( param_name.to_string(), param_type, @@ -358,11 +438,16 @@ impl UnresolvedMappingABI { self.name, self.file.link ) })?; - let contract = Contract::load(&*contract_bytes) + // Normalize the ABI to handle compatibility issues between ethabi and alloy parsers. + // See `normalize_abi_json` for details on the specific issues being addressed. + let normalized_bytes = normalize_abi_json(&contract_bytes) + .with_context(|| format!("failed to normalize ABI JSON for {}", self.name))?; + + let contract = serde_json::from_slice(&normalized_bytes) .with_context(|| format!("failed to load ABI {}", self.name))?; // Parse ABI JSON for on-demand struct field extraction - let abi_json = AbiJson::new(&contract_bytes) + let abi_json = AbiJson::new(&normalized_bytes) .with_context(|| format!("Failed to parse ABI JSON for {}", self.name))?; Ok(( @@ -408,17 +493,25 @@ impl CallDecl { self.expr.validate_args() } - pub fn address_for_log(&self, log: &Log, params: &[LogParam]) -> Result { + pub fn address_for_log( + &self, + log: &Log, + params: &[abi::DynSolParam], + ) -> Result { self.address_for_log_with_abi(log, params) } - pub fn address_for_log_with_abi(&self, log: &Log, params: &[LogParam]) -> Result { + pub fn address_for_log_with_abi( + &self, + log: &Log, + params: &[abi::DynSolParam], + ) -> Result { let address = match &self.expr.address { CallArg::HexAddress(address) => *address, CallArg::Ethereum(arg) => match arg { - EthereumArg::Address => log.address, + EthereumArg::Address => log.address(), EthereumArg::Param(name) => { - let value = params + let value = ¶ms .iter() .find(|param| param.name == name.as_str()) .ok_or_else(|| { @@ -428,15 +521,17 @@ impl CallDecl { name ) })? - .value - .clone(); - value.into_address().ok_or_else(|| { + .value; + + let address = value.as_address().ok_or_else(|| { anyhow!( "In declarative call '{}': param {} is not an address", self.label, name ) - })? + })?; + + Address::from(address.into_array()) } EthereumArg::StructField(param_name, field_accesses) => { let param = params @@ -467,22 +562,27 @@ impl CallDecl { Ok(address) } - pub fn args_for_log(&self, log: &Log, params: &[LogParam]) -> Result, Error> { + pub fn args_for_log( + &self, + log: &Log, + params: &[abi::DynSolParam], + ) -> Result, Error> { self.args_for_log_with_abi(log, params) } pub fn args_for_log_with_abi( &self, log: &Log, - params: &[LogParam], - ) -> Result, Error> { + params: &[abi::DynSolParam], + ) -> Result, Error> { + use abi::DynSolValue; self.expr .args .iter() .map(|arg| match arg { - CallArg::HexAddress(address) => Ok(Token::Address(*address)), + CallArg::HexAddress(address) => Ok(DynSolValue::Address(*address)), CallArg::Ethereum(arg) => match arg { - EthereumArg::Address => Ok(Token::Address(log.address)), + EthereumArg::Address => Ok(DynSolValue::Address(log.address())), EthereumArg::Param(name) => { let value = params .iter() @@ -513,7 +613,10 @@ impl CallDecl { .collect() } - pub fn get_function(&self, mapping: &dyn FindMappingABI) -> Result { + pub fn get_function( + &self, + mapping: &dyn FindMappingABI, + ) -> Result { let contract_name = self.expr.abi.to_string(); let function_name = self.expr.func.as_str(); let abi = mapping.find_abi(&contract_name)?; @@ -524,6 +627,7 @@ impl CallDecl { // and may lead to encoding/decoding errors abi.contract .function(function_name) + .and_then(|matches| matches.first()) .cloned() .with_context(|| { format!( @@ -536,7 +640,7 @@ impl CallDecl { pub fn address_for_entity_handler( &self, entity: &EntitySourceOperation, - ) -> Result { + ) -> Result { match &self.expr.address { // Static hex address - just return it directly CallArg::HexAddress(address) => Ok(*address), @@ -557,7 +661,7 @@ impl CallDecl { // Make sure it's a bytes value and convert to address match value { Value::Bytes(bytes) => { - let address = H160::from_slice(bytes.as_slice()); + let address = Address::from_slice(bytes.as_slice()); Ok(address) } _ => Err(anyhow!("param '{name}' must be an address")), @@ -571,8 +675,8 @@ impl CallDecl { pub fn args_for_entity_handler( &self, entity: &EntitySourceOperation, - param_types: Vec, - ) -> Result, Error> { + param_types: Vec, + ) -> Result, Error> { self.validate_entity_handler_args(¶m_types)?; self.expr @@ -586,7 +690,7 @@ impl CallDecl { } /// Validates that the number of provided arguments matches the expected parameter types. - fn validate_entity_handler_args(&self, param_types: &[ParamType]) -> Result<(), Error> { + fn validate_entity_handler_args(&self, param_types: &[abi::DynSolType]) -> Result<(), Error> { if self.expr.args.len() != param_types.len() { return Err(anyhow!( "mismatched number of arguments: expected {}, got {}", @@ -602,9 +706,9 @@ impl CallDecl { fn process_entity_handler_arg( &self, arg: &CallArg, - expected_type: &ParamType, + expected_type: &abi::DynSolType, entity: &EntitySourceOperation, - ) -> Result { + ) -> Result { match arg { CallArg::HexAddress(address) => self.process_hex_address(*address, expected_type), CallArg::Ethereum(_) => Err(anyhow!( @@ -619,11 +723,11 @@ impl CallDecl { /// Converts a hex address to a token, ensuring it matches the expected parameter type. fn process_hex_address( &self, - address: H160, - expected_type: &ParamType, - ) -> Result { + address: Address, + expected_type: &abi::DynSolType, + ) -> Result { match expected_type { - ParamType::Address => Ok(Token::Address(address)), + abi::DynSolType::Address => Ok(abi::DynSolValue::Address(address)), _ => Err(anyhow!( "type mismatch: hex address provided for non-address parameter" )), @@ -634,9 +738,9 @@ impl CallDecl { fn process_entity_param( &self, name: &str, - expected_type: &ParamType, + expected_type: &abi::DynSolType, entity: &EntitySourceOperation, - ) -> Result { + ) -> Result { let value = entity .entity .get(name) @@ -650,27 +754,44 @@ impl CallDecl { fn convert_entity_value_to_token( &self, value: &Value, - expected_type: &ParamType, + expected_type: &abi::DynSolType, param_name: &str, - ) -> Result { + ) -> Result { + use abi::DynSolType; + use abi::DynSolValue; + match (expected_type, value) { - (ParamType::Address, Value::Bytes(b)) => { - Ok(Token::Address(H160::from_slice(b.as_slice()))) + (DynSolType::Address, Value::Bytes(b)) => { + Ok(DynSolValue::Address(b.as_slice().try_into()?)) + } + (DynSolType::Bytes, Value::Bytes(b)) => Ok(DynSolValue::Bytes(b.as_ref().to_vec())), + (DynSolType::FixedBytes(size), Value::Bytes(b)) if b.len() == *size => { + DynSolValue::fixed_bytes_from_slice(b.as_ref()) } - (ParamType::Bytes, Value::Bytes(b)) => Ok(Token::Bytes(b.as_ref().to_vec())), - (ParamType::FixedBytes(size), Value::Bytes(b)) if b.len() == *size => { - Ok(Token::FixedBytes(b.as_ref().to_vec())) + (DynSolType::String, Value::String(s)) => Ok(DynSolValue::String(s.to_string())), + (DynSolType::Bool, Value::Bool(b)) => Ok(DynSolValue::Bool(*b)), + (DynSolType::Int(_), Value::Int(i)) => { + let x = abi::I256::try_from(*i)?; + Ok(DynSolValue::Int(x, x.bits() as usize)) } - (ParamType::String, Value::String(s)) => Ok(Token::String(s.to_string())), - (ParamType::Bool, Value::Bool(b)) => Ok(Token::Bool(*b)), - (ParamType::Int(_), Value::Int(i)) => Ok(Token::Int((*i).into())), - (ParamType::Int(_), Value::Int8(i)) => Ok(Token::Int((*i).into())), - (ParamType::Int(_), Value::BigInt(i)) => Ok(Token::Int(i.to_signed_u256())), - (ParamType::Uint(_), Value::Int(i)) if *i >= 0 => Ok(Token::Uint((*i).into())), - (ParamType::Uint(_), Value::BigInt(i)) if i.sign() == Sign::Plus => { - Ok(Token::Uint(i.to_unsigned_u256()?)) + (DynSolType::Int(_), Value::Int8(i)) => { + let x = abi::I256::try_from(*i)?; + Ok(DynSolValue::Int(x, x.bits() as usize)) } - (ParamType::Array(inner_type), Value::List(values)) => { + (DynSolType::Int(_), Value::BigInt(i)) => { + let x = + abi::I256::from_le_bytes(i.to_signed_u256().to_le_bytes::<{ U256::BYTES }>()); + Ok(DynSolValue::Int(x, x.bits() as usize)) + } + (DynSolType::Uint(_), Value::Int(i)) if *i >= 0 => { + let x = U256::try_from(*i)?; + Ok(DynSolValue::Uint(x, x.bit_len())) + } + (DynSolType::Uint(_), Value::BigInt(i)) if i.sign() == Sign::Plus => { + let x = i.to_unsigned_u256()?; + Ok(DynSolValue::Uint(x, x.bit_len())) + } + (DynSolType::Array(inner_type), Value::List(values)) => { self.process_entity_array_values(values, inner_type.as_ref(), param_name) } _ => Err(anyhow!( @@ -684,41 +805,42 @@ impl CallDecl { fn process_entity_array_values( &self, values: &[Value], - inner_type: &ParamType, + inner_type: &abi::DynSolType, param_name: &str, - ) -> Result { - let tokens: Result, Error> = values + ) -> Result { + let tokens: Result, Error> = values .iter() .enumerate() .map(|(idx, v)| { self.convert_entity_value_to_token(v, inner_type, &format!("{param_name}[{idx}]")) }) .collect(); - Ok(Token::Array(tokens?)) + Ok(abi::DynSolValue::Array(tokens?)) } /// Extracts a nested field value from a struct parameter with mixed numeric/named access fn extract_nested_struct_field_as_address( - struct_token: &Token, + struct_token: &abi::DynSolValue, field_accesses: &[usize], call_label: &str, - ) -> Result { + ) -> Result { let field_token = Self::extract_nested_struct_field(struct_token, field_accesses, call_label)?; - field_token.into_address().ok_or_else(|| { + let address = field_token.as_address().ok_or_else(|| { anyhow!( "In declarative call '{}': nested struct field is not an address", call_label ) - }) + })?; + Ok(address) } /// Extracts a nested field value from a struct parameter using numeric indices fn extract_nested_struct_field( - struct_token: &Token, + struct_token: &abi::DynSolValue, field_accesses: &[usize], call_label: &str, - ) -> Result { + ) -> Result { assert!( !field_accesses.is_empty(), "Internal error: empty field access path should be caught at parse time" @@ -728,7 +850,7 @@ impl CallDecl { for (index, &field_index) in field_accesses.iter().enumerate() { match current_token { - Token::Tuple(fields) => { + abi::DynSolValue::Tuple(fields) => { let field_token = fields .get(field_index) .ok_or_else(|| { @@ -994,15 +1116,15 @@ pub struct StructFieldInfo { pub param_name: String, /// Mapping from field names to their indices in the tuple pub field_mappings: HashMap, - /// The ethabi ParamType for type validation - pub param_type: ParamType, + /// The alloy DynSolType for type validation + pub param_type: abi::DynSolType, } impl StructFieldInfo { /// Create a new StructFieldInfo from ABI JSON components pub fn from_components( param_name: String, - param_type: ParamType, + param_type: abi::DynSolType, components: &serde_json::Value, ) -> Result { let mut field_mappings = HashMap::new(); @@ -1185,16 +1307,16 @@ pub struct DeclaredCall { label: String, contract_name: String, address: Address, - function: Function, - args: Vec, + function: abi::Function, + args: Vec, } impl DeclaredCall { pub fn from_log_trigger( mapping: &dyn FindMappingABI, call_decls: &CallDecls, - log: &Log, - params: &[LogParam], + log: &alloy::rpc::types::Log, + params: &[abi::DynSolParam], ) -> Result, anyhow::Error> { Self::from_log_trigger_with_event(mapping, call_decls, log, params) } @@ -1203,7 +1325,7 @@ impl DeclaredCall { mapping: &dyn FindMappingABI, call_decls: &CallDecls, log: &Log, - params: &[LogParam], + params: &[abi::DynSolParam], ) -> Result, anyhow::Error> { Self::create_calls(mapping, call_decls, |decl, _| { Ok(( @@ -1217,13 +1339,13 @@ impl DeclaredCall { mapping: &dyn FindMappingABI, call_decls: &CallDecls, entity: &EntitySourceOperation, - ) -> Result, anyhow::Error> { + ) -> Result, Error> { Self::create_calls(mapping, call_decls, |decl, function| { let param_types = function .inputs .iter() - .map(|param| param.kind.clone()) - .collect::>(); + .map(|param| param.selector_type().parse()) + .collect::, _>>()?; Ok(( decl.address_for_entity_handler(entity)?, @@ -1243,7 +1365,7 @@ impl DeclaredCall { get_address_and_args: F, ) -> Result, anyhow::Error> where - F: Fn(&CallDecl, &Function) -> Result<(Address, Vec), anyhow::Error>, + F: Fn(&CallDecl, &abi::Function) -> Result<(Address, Vec), anyhow::Error>, { let mut calls = Vec::new(); for decl in call_decls.decls.iter() { @@ -1281,13 +1403,15 @@ pub struct ContractCall { pub contract_name: String, pub address: Address, pub block_ptr: BlockPtr, - pub function: Function, - pub args: Vec, + pub function: abi::Function, + pub args: Vec, pub gas: Option, } #[cfg(test)] mod tests { + use alloy::primitives::B256; + use crate::data::subgraph::SPEC_VERSION_1_3_0; use super::*; @@ -1526,7 +1650,7 @@ mod tests { let parser = ExprParser::new(); let addr = "0xDeaDbeefdEAdbeefdEadbEEFdeadbeEFdEaDbeeF"; - let hex_address = CallArg::HexAddress(web3::types::H160::from_str(addr).unwrap()); + let hex_address = CallArg::HexAddress(Address::from_str(addr).unwrap()); // Test HexAddress in address position let expr: CallExpr = parser.ok(&format!("Pool[{}].growth()", addr)); @@ -1593,18 +1717,19 @@ mod tests { #[test] fn test_struct_field_access_functions() { - use ethabi::Token; + use crate::abi::DynSolValue; + use alloy::primitives::{Address, U256}; let parser = ExprParser::new(); let tuple_fields = vec![ - Token::Uint(ethabi::Uint::from(8u8)), // index 0: uint8 - Token::Address([1u8; 20].into()), // index 1: address - Token::Uint(ethabi::Uint::from(1000u64)), // index 2: uint256 + DynSolValue::Uint(U256::from(8u8), 8), // index 0: uint8 + DynSolValue::Address(Address::from([1u8; 20])), // index 1: address + DynSolValue::Uint(U256::from(1000u64), 256), // index 2: uint256 ]; // Test extract_struct_field with numeric indices - let struct_token = Token::Tuple(tuple_fields.clone()); + let struct_token = DynSolValue::Tuple(tuple_fields.clone()); // Test accessing index 0 (uint8) let result = @@ -1642,8 +1767,9 @@ mod tests { #[test] fn test_declarative_call_error_context() { - use crate::prelude::web3::types::{Log, H160, H256}; - use ethabi::{LogParam, Token}; + use crate::abi::{DynSolParam, DynSolValue}; + use alloy::primitives::U256; + use alloy::rpc::types::Log; let parser = ExprParser::new(); @@ -1654,18 +1780,19 @@ mod tests { }; // Test scenario 1: Unknown parameter + let inner_log = alloy::primitives::Log { + address: Address::ZERO, + data: alloy::primitives::LogData::new_unchecked(vec![].into(), vec![].into()), + }; let log = Log { - address: H160::zero(), - topics: vec![], - data: vec![].into(), - block_hash: Some(H256::zero()), - block_number: Some(1.into()), - transaction_hash: Some(H256::zero()), - transaction_index: Some(0.into()), - log_index: Some(0.into()), - transaction_log_index: Some(0.into()), - log_type: None, - removed: Some(false), + inner: inner_log, + block_hash: Some(B256::ZERO), + block_number: Some(1), + block_timestamp: None, + transaction_hash: Some(B256::ZERO), + transaction_index: Some(0), + log_index: Some(0), + removed: false, }; let params = vec![]; // Empty params - 'asset' param is missing @@ -1676,9 +1803,9 @@ mod tests { assert!(error_msg.contains("unknown param asset")); // Test scenario 2: Struct field access error - let params = vec![LogParam { + let params = vec![DynSolParam { name: "asset".to_string(), - value: Token::Tuple(vec![Token::Uint(ethabi::Uint::from(1u8))]), // Only 1 field, but trying to access index 1 + value: DynSolValue::Tuple(vec![DynSolValue::Uint(U256::from(1u8), 8)]), // Only 1 field, but trying to access index 1 }]; let result = call_decl.address_for_log(&log, ¶ms); @@ -1689,11 +1816,11 @@ mod tests { assert!(error_msg.contains("struct has 1 fields")); // Test scenario 3: Non-address field access - let params = vec![LogParam { + let params = vec![DynSolParam { name: "asset".to_string(), - value: Token::Tuple(vec![ - Token::Uint(ethabi::Uint::from(1u8)), - Token::Uint(ethabi::Uint::from(2u8)), // Index 1 is uint, not address + value: DynSolValue::Tuple(vec![ + DynSolValue::Uint(U256::from(1u8), 8), + DynSolValue::Uint(U256::from(2u8), 8), // Index 1 is uint, not address ]), }]; @@ -1718,18 +1845,18 @@ mod tests { // Create a structure where base has only 2 fields instead of 3 // The parser thinks there should be 3 fields based on ABI, but at runtime we provide only 2 - let base_struct = Token::Tuple(vec![ - Token::Address([1u8; 20].into()), // addr at index 0 - Token::Uint(ethabi::Uint::from(100u64)), // amount at index 1 - // Missing the active field at index 2! + let base_struct = DynSolValue::Tuple(vec![ + DynSolValue::Address(Address::from([1u8; 20])), // addr at index 0 + DynSolValue::Uint(U256::from(100u64), 256), // amount at index 1 + // Missing the active field at index 2! ]); - let params = vec![LogParam { + let params = vec![DynSolParam { name: "complexAsset".to_string(), - value: Token::Tuple(vec![ - base_struct, // base with only 2 fields - Token::String("metadata".to_string()), // metadata at index 1 - Token::Array(vec![]), // values at index 2 + value: DynSolValue::Tuple(vec![ + base_struct, // base with only 2 fields + DynSolValue::String("metadata".to_string()), // metadata at index 1 + DynSolValue::Array(vec![]), // values at index 2 ]), }]; @@ -1743,7 +1870,8 @@ mod tests { #[test] fn test_struct_field_extraction_comprehensive() { - use ethabi::Token; + use crate::abi::DynSolValue; + use alloy::primitives::{Address, U256}; // Create a complex nested structure for comprehensive testing: // struct Asset { @@ -1755,37 +1883,37 @@ mod tests { // address addr; // index 0 // string name; // index 1 // } - let inner_struct = Token::Tuple(vec![ - Token::Address([0x42; 20].into()), // token.addr - Token::String("TokenName".to_string()), // token.name + let inner_struct = DynSolValue::Tuple(vec![ + DynSolValue::Address(Address::from([0x42; 20])), // token.addr + DynSolValue::String("TokenName".to_string()), // token.name ]); - let outer_struct = Token::Tuple(vec![ - Token::Uint(ethabi::Uint::from(1u8)), // asset.kind - inner_struct, // asset.token - Token::Uint(ethabi::Uint::from(1000u64)), // asset.amount + let outer_struct = DynSolValue::Tuple(vec![ + DynSolValue::Uint(U256::from(1u8), 8), // asset.kind + inner_struct, // asset.token + DynSolValue::Uint(U256::from(1000u64), 256), // asset.amount ]); // Test cases: (path, expected_value, description) let test_cases = vec![ ( vec![0], - Token::Uint(ethabi::Uint::from(1u8)), + DynSolValue::Uint(U256::from(1u8), 8), "Simple field access", ), ( vec![1, 0], - Token::Address([0x42; 20].into()), + DynSolValue::Address(Address::from([0x42; 20])), "Nested field access", ), ( vec![1, 1], - Token::String("TokenName".to_string()), + DynSolValue::String("TokenName".to_string()), "Nested string field", ), ( vec![2], - Token::Uint(ethabi::Uint::from(1000u64)), + DynSolValue::Uint(U256::from(1000u64), 256), "Last field access", ), ]; @@ -2048,6 +2176,146 @@ mod tests { assert!(error_msg.contains("is not a struct")); } + #[test] + fn test_normalize_abi_json_with_undefined_state_mutability() { + let abi_with_undefined = r#"[ + { + "type": "function", + "name": "testFunction", + "inputs": [], + "outputs": [], + "stateMutability": "undefined" + }, + { + "type": "function", + "name": "normalFunction", + "inputs": [], + "outputs": [], + "stateMutability": "view" + } + ]"#; + + let normalized = normalize_abi_json(abi_with_undefined.as_bytes()).unwrap(); + let result: serde_json::Value = serde_json::from_slice(&normalized).unwrap(); + + if let Some(array) = result.as_array() { + assert_eq!(array[0]["stateMutability"], "nonpayable"); + assert_eq!(array[1]["stateMutability"], "view"); + } else { + panic!("Expected JSON array"); + } + + let json_abi: abi::JsonAbi = serde_json::from_slice(&normalized).unwrap(); + assert_eq!(json_abi.len(), 2); + } + + #[test] + fn test_normalize_abi_json_with_duplicate_constructors() { + let abi_with_duplicate_constructors = r#"[ + { + "type": "constructor", + "inputs": [{"name": "param1", "type": "address"}], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "someFunction", + "inputs": [], + "outputs": [], + "stateMutability": "view" + }, + { + "type": "constructor", + "inputs": [{"name": "param2", "type": "uint256"}], + "stateMutability": "nonpayable" + } + ]"#; + + let normalized = normalize_abi_json(abi_with_duplicate_constructors.as_bytes()).unwrap(); + let result: serde_json::Value = serde_json::from_slice(&normalized).unwrap(); + + if let Some(array) = result.as_array() { + assert_eq!(array.len(), 2); + assert_eq!(array[0]["type"], "constructor"); + assert_eq!(array[0]["inputs"][0]["name"], "param1"); + assert_eq!(array[1]["type"], "function"); + } else { + panic!("Expected JSON array"); + } + + let json_abi: abi::JsonAbi = serde_json::from_slice(&normalized).unwrap(); + assert_eq!(json_abi.len(), 2); + } + + #[test] + fn test_normalize_abi_json_with_duplicate_fallbacks() { + let abi_with_duplicate_fallbacks = r#"[ + { + "type": "fallback", + "stateMutability": "payable" + }, + { + "type": "function", + "name": "someFunction", + "inputs": [], + "outputs": [], + "stateMutability": "view" + }, + { + "type": "fallback", + "stateMutability": "nonpayable" + } + ]"#; + + let normalized = normalize_abi_json(abi_with_duplicate_fallbacks.as_bytes()).unwrap(); + let result: serde_json::Value = serde_json::from_slice(&normalized).unwrap(); + + if let Some(array) = result.as_array() { + assert_eq!(array.len(), 2); + assert_eq!(array[0]["type"], "fallback"); + assert_eq!(array[0]["stateMutability"], "payable"); + assert_eq!(array[1]["type"], "function"); + } else { + panic!("Expected JSON array"); + } + + let json_abi: abi::JsonAbi = serde_json::from_slice(&normalized).unwrap(); + assert_eq!(json_abi.len(), 2); + } + + #[test] + fn test_normalize_abi_json_strips_indexed_from_non_events() { + let abi_with_indexed_in_function = r#"[ + { + "type": "function", + "name": "testFunction", + "inputs": [{"name": "x", "type": "uint256", "indexed": true}], + "outputs": [{"name": "y", "type": "address", "indexed": false}], + "stateMutability": "view" + }, + { + "type": "event", + "name": "TestEvent", + "anonymous": false, + "inputs": [{"name": "from", "type": "address", "indexed": true}] + } + ]"#; + + let normalized = normalize_abi_json(abi_with_indexed_in_function.as_bytes()).unwrap(); + let result: serde_json::Value = serde_json::from_slice(&normalized).unwrap(); + + if let Some(array) = result.as_array() { + assert!(array[0]["inputs"][0].get("indexed").is_none()); + assert!(array[0]["outputs"][0].get("indexed").is_none()); + assert_eq!(array[1]["inputs"][0]["indexed"], true); + } else { + panic!("Expected JSON array"); + } + + let json_abi: abi::JsonAbi = serde_json::from_slice(&normalized).unwrap(); + assert_eq!(json_abi.len(), 2); + } + // Helper function to create consistent test ABI fn create_test_mapping_abi() -> AbiJson { const ABI_JSON: &str = r#"[ diff --git a/graph/src/lib.rs b/graph/src/lib.rs index e076d64c736..030ca945709 100644 --- a/graph/src/lib.rs +++ b/graph/src/lib.rs @@ -33,6 +33,8 @@ pub mod env; pub mod ipfs; +pub mod abi; + pub mod amp; /// Wrapper for spawning tasks that abort on panic, which is our default. @@ -74,12 +76,12 @@ pub use url; /// ``` pub mod prelude { pub use ::anyhow; + pub use alloy; pub use anyhow::{anyhow, Context as _, Error}; pub use atty; pub use chrono; pub use diesel; pub use envconfig; - pub use ethabi; pub use hex; pub use lazy_static::lazy_static; pub use prost; @@ -103,7 +105,6 @@ pub mod prelude { pub use tokio; pub use toml; pub use tonic; - pub use web3; pub type DynTryFuture<'a, Ok = (), Err = Error> = Pin> + Send + 'a>>; @@ -171,6 +172,7 @@ pub mod prelude { pub use crate::util::cache_weight::CacheWeight; pub use crate::util::futures::{retry, TimeoutError}; pub use crate::util::stats::{AtomicMovingStats, MovingStats}; + pub use crate::util::test_utils::*; macro_rules! static_graphql { ($m:ident, $m2:ident, {$($n:ident,)*}) => { diff --git a/graph/src/runtime/mod.rs b/graph/src/runtime/mod.rs index 7958b991598..ac94d33c32f 100644 --- a/graph/src/runtime/mod.rs +++ b/graph/src/runtime/mod.rs @@ -260,7 +260,7 @@ pub enum IndexForAscTypeId { // Reserved discriminant space for more Ethereum type IDs: [1000, 1499] TransactionReceipt = 1000, Log = 1001, - ArrayH256 = 1002, + ArrayB256 = 1002, ArrayLog = 1003, ArrayTypedMapStringStoreValue = 1004, // Continue to add more Ethereum type IDs here. diff --git a/graph/src/util/mod.rs b/graph/src/util/mod.rs index 4cdf52a82a5..7a67c984a63 100644 --- a/graph/src/util/mod.rs +++ b/graph/src/util/mod.rs @@ -35,3 +35,6 @@ pub mod monitored; pub mod intern; pub mod herd_cache; + +/// Test utilities for creating mock blockchain data structures +pub mod test_utils; diff --git a/graph/src/util/test_utils.rs b/graph/src/util/test_utils.rs new file mode 100644 index 00000000000..38b618560c4 --- /dev/null +++ b/graph/src/util/test_utils.rs @@ -0,0 +1,57 @@ +use alloy::consensus::{TxEnvelope, TxLegacy}; +use alloy::primitives::Address; +use alloy::rpc::types::Transaction; + +use crate::prelude::alloy::consensus::Header as ConsensusHeader; +use crate::prelude::alloy::primitives::B256; +use crate::prelude::alloy::rpc::types::{Block, Header}; + +/// Creates a minimal Alloy Block for testing purposes. +pub fn create_minimal_block_for_test(block_number: u64, block_hash: B256) -> Block { + // Create consensus header with defaults, but set the specific number + let mut consensus_header = ConsensusHeader::default(); + consensus_header.number = block_number; + + // Create RPC header with the specific hash + let rpc_header = Header { + hash: block_hash, + inner: consensus_header, + total_difficulty: None, + size: None, + }; + + // Create an empty block with this header + Block::empty(rpc_header) +} + +/// Generic function that creates a mock legacy Transaction from ANY log +pub fn create_dummy_transaction( + block_number: u64, + block_hash: B256, + transaction_index: Option, + transaction_hash: B256, +) -> Transaction { + use alloy::{ + consensus::transaction::Recovered, + consensus::Signed, + primitives::{Signature, U256}, + }; + + let tx = TxLegacy::default(); + + // Create a dummy signature + let signature = Signature::new(U256::from(0x1111), U256::from(0x2222), false); + + let signed_tx = Signed::new_unchecked(tx, signature, transaction_hash); + let envelope = TxEnvelope::Legacy(signed_tx); + + let recovered = Recovered::new_unchecked(envelope, Address::ZERO); + + Transaction { + inner: recovered, + block_hash: Some(block_hash), + block_number: Some(block_number), + transaction_index: transaction_index, + effective_gas_price: None, + } +} diff --git a/graphql/src/store/resolver.rs b/graphql/src/store/resolver.rs index 426e921f2c6..779a9766fe7 100644 --- a/graphql/src/store/resolver.rs +++ b/graphql/src/store/resolver.rs @@ -10,6 +10,7 @@ use graph::data::query::{CacheStatus, QueryResults, Trace}; use graph::data::store::ID; use graph::data::value::{Object, Word}; use graph::derive::CheapClone; +use graph::prelude::alloy::primitives::B256; use graph::prelude::*; use graph::schema::{ ast as sast, INTROSPECTION_SCHEMA_FIELD_NAME, INTROSPECTION_TYPE_FIELD_NAME, META_FIELD_NAME, @@ -202,11 +203,11 @@ impl StoreResolver { // locate_block indicates that we do not have a block hash // by setting the hash to `zero` // See 7a7b9708-adb7-4fc2-acec-88680cb07ec1 - let hash_h256 = ptr.hash_as_h256(); - if hash_h256 == web3::types::H256::zero() { + let hash_b256 = ptr.hash.as_b256(); + if hash_b256 == B256::ZERO { None } else { - Some(r::Value::String(format!("0x{:x}", hash_h256))) + Some(r::Value::String(format!("0x{:x}", hash_b256))) } }) .unwrap_or(r::Value::Null); diff --git a/node/src/manager/commands/chain.rs b/node/src/manager/commands/chain.rs index 12b69d0bf4e..ad6d7bdd543 100644 --- a/node/src/manager/commands/chain.rs +++ b/node/src/manager/commands/chain.rs @@ -12,7 +12,7 @@ use graph::components::store::ChainIdStore; use graph::components::store::StoreError; use graph::prelude::BlockNumber; use graph::prelude::ChainStore as _; -use graph::prelude::LightEthereumBlockExt; +use graph::prelude::LightEthereumBlock; use graph::prelude::{anyhow, anyhow::bail}; use graph::slog::Logger; use graph::{ @@ -300,16 +300,18 @@ pub async fn ingest( else { bail!("block number {number} not found"); }; - let ptr = block.block_ptr(); + let hash = block.header.hash; + let number = block.header.number; // For inserting the block, it doesn't matter whether the block is final or not. - let block = Arc::new(BlockFinality::Final(Arc::new(block))); + let block = Arc::new(BlockFinality::Final(Arc::new(LightEthereumBlock::new( + block, + )))); chain_store.upsert_block(block).await?; - let rows = chain_store - .confirm_block_hash(ptr.number, &ptr.hash) - .await?; + let hash = hash.into(); + let rows = chain_store.confirm_block_hash(number as i32, &hash).await?; - println!("Inserted block {}", ptr); + println!("Inserted block {}", hash); if rows > 0 { println!(" (also deleted {rows} duplicate row(s) with that number)"); } diff --git a/node/src/manager/commands/check_blocks.rs b/node/src/manager/commands/check_blocks.rs index 15314067a49..f6a4506a2f8 100644 --- a/node/src/manager/commands/check_blocks.rs +++ b/node/src/manager/commands/check_blocks.rs @@ -4,8 +4,8 @@ use graph::{ cheap_clone::CheapClone, components::store::ChainStore as ChainStoreTrait, prelude::{ + alloy::primitives::B256, anyhow::{self, anyhow, Context}, - web3::types::H256, }, slog::Logger, }; @@ -105,7 +105,7 @@ pub async fn truncate(chain_store: Arc, skip_confirmation: bool) -> } async fn run( - block_hash: &H256, + block_hash: &B256, chain_store: Arc, ethereum_adapter: &EthereumAdapter, logger: &Logger, @@ -124,7 +124,7 @@ async fn run( async fn handle_multiple_block_hashes( block_number: i32, - block_hashes: &[H256], + block_hashes: &[B256], chain_store: &ChainStore, delete_duplicates: bool, ) -> anyhow::Result<()> { @@ -157,7 +157,10 @@ mod steps { use graph::{ anyhow::bail, - prelude::serde_json::{self, Value}, + prelude::{ + alloy::primitives::B256, + serde_json::{self, Value}, + }, }; use json_structural_diff::{colorize as diff_to_string, JsonDiff}; @@ -169,11 +172,11 @@ mod steps { pub(super) async fn resolve_block_hash_from_block_number( number: i32, chain_store: &ChainStore, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let block_hashes = chain_store.block_hashes_by_block_number(number).await?; Ok(block_hashes .into_iter() - .map(|x| H256::from_slice(&x.as_slice()[..32])) + .map(|x| B256::from_slice(&x.as_slice()[..32])) .collect()) } @@ -181,7 +184,7 @@ mod steps { /// /// Errors on a non-unary result. pub(super) async fn fetch_single_cached_block( - block_hash: H256, + block_hash: B256, chain_store: Arc, ) -> anyhow::Result { let blocks = chain_store.blocks(vec![block_hash.into()]).await?; @@ -199,7 +202,7 @@ mod steps { /// Errors on provider failure or if the returned block has a different hash than the one /// requested. pub(super) async fn fetch_single_provider_block( - block_hash: &H256, + block_hash: &B256, ethereum_adapter: &EthereumAdapter, logger: &Logger, ) -> anyhow::Result { @@ -209,7 +212,7 @@ mod steps { .with_context(|| format!("failed to fetch block {block_hash}"))? .ok_or_else(|| anyhow!("JRPC provider found no block with hash {block_hash:?}"))?; ensure!( - provider_block.hash == Some(*block_hash), + provider_block.header.hash == *block_hash, "Provider responded with a different block hash" ); serde_json::to_value(provider_block) @@ -237,7 +240,7 @@ mod steps { } /// Prints the difference between two [`serde_json::Value`] values to the user. - pub(super) fn report_difference(difference: Option<&str>, hash: &H256) { + pub(super) fn report_difference(difference: Option<&str>, hash: &B256) { if let Some(diff) = difference { eprintln!("block {hash} diverges from cache:"); eprintln!("{diff}"); @@ -247,7 +250,7 @@ mod steps { } /// Attempts to delete a block from the block cache. - pub(super) async fn delete_block(hash: &H256, chain_store: &ChainStore) -> anyhow::Result<()> { + pub(super) async fn delete_block(hash: &B256, chain_store: &ChainStore) -> anyhow::Result<()> { println!("Deleting block {hash} from cache."); chain_store.delete_blocks(&[hash]).await?; println!("Done."); @@ -263,13 +266,13 @@ mod steps { mod helpers { use super::*; - use graph::prelude::hex; + use graph::prelude::{alloy::primitives::B256, hex}; - /// Tries to parse a [`H256`] from a hex string. - pub(super) fn parse_block_hash(hash: &str) -> anyhow::Result { + /// Tries to parse a [`B256`] from a hex string. + pub(super) fn parse_block_hash(hash: &str) -> anyhow::Result { let hash = hash.trim_start_matches("0x"); let hash = hex::decode(hash)?; - Ok(H256::from_slice(&hash)) + Ok(B256::from_slice(&hash)) } } diff --git a/runtime/test/src/common.rs b/runtime/test/src/common.rs index cb5493fbe01..183a79a0bbe 100644 --- a/runtime/test/src/common.rs +++ b/runtime/test/src/common.rs @@ -1,4 +1,3 @@ -use ethabi::Contract; use graph::blockchain::BlockTime; use graph::components::store::DeploymentLocator; use graph::components::subgraph::SharedProofOfIndexing; @@ -8,6 +7,7 @@ use graph::data_source::common::MappingABI; use graph::env::EnvVars; use graph::ipfs::{IpfsMetrics, IpfsRpcClient, ServerAddress}; use graph::log; +use graph::prelude::alloy::primitives::Address; use graph::prelude::*; use graph_chain_ethereum::{Chain, DataSource, DataSourceTemplate, Mapping, TemplateSource}; use graph_runtime_wasm::host_exports::DataSourceDetails; @@ -15,7 +15,6 @@ use graph_runtime_wasm::{HostExports, MappingContext}; use semver::Version; use std::env; use std::str::FromStr; -use web3::types::Address; lazy_static! { pub static ref LOGGER: Logger = match env::var_os("GRAPH_LOG") { @@ -83,7 +82,7 @@ fn mock_host_exports( fn mock_abi() -> MappingABI { MappingABI { name: "mock_abi".to_string(), - contract: Contract::load( + contract: serde_json::from_str( r#"[ { "inputs": [ @@ -94,8 +93,7 @@ fn mock_abi() -> MappingABI { ], "type": "constructor" } - ]"# - .as_bytes(), + ]"#, ) .unwrap(), } diff --git a/runtime/test/src/test.rs b/runtime/test/src/test.rs index 6af2bf74cb2..2a367a25e69 100644 --- a/runtime/test/src/test.rs +++ b/runtime/test/src/test.rs @@ -6,7 +6,7 @@ use graph::data::store::{scalar, Id, IdType}; use graph::data::subgraph::*; use graph::data::value::Word; use graph::ipfs::test_utils::add_files_to_local_ipfs_node_for_testing; -use graph::prelude::web3::types::U256; +use graph::prelude::alloy::primitives::U256; use graph::runtime::gas::GasCounter; use graph::runtime::{AscIndexId, AscType, HostExportError}; use graph::runtime::{AscPtr, ToAscObj}; @@ -22,7 +22,6 @@ use std::collections::{BTreeMap, HashMap}; use std::str::FromStr; use test_store::{LOGGER, STORE}; use wasmtime::{AsContext, AsContextMut}; -use web3::types::H160; use crate::common::{mock_context, mock_data_source}; @@ -706,19 +705,19 @@ async fn test_big_int_to_hex(api_version: Version, gas_used: u64) { .await; // Convert zero to hex - let zero = BigInt::from_unsigned_u256(&U256::zero()); + let zero = BigInt::from_unsigned_u256(&U256::ZERO); let zero_hex_ptr: AscPtr = instance.invoke_export1("big_int_to_hex", &zero).await; let zero_hex_str: String = instance.asc_get(zero_hex_ptr).unwrap(); assert_eq!(zero_hex_str, "0x0"); // Convert 1 to hex - let one = BigInt::from_unsigned_u256(&U256::one()); + let one = BigInt::from_unsigned_u256(&U256::ONE); let one_hex_ptr: AscPtr = instance.invoke_export1("big_int_to_hex", &one).await; let one_hex_str: String = instance.asc_get(one_hex_ptr).unwrap(); assert_eq!(one_hex_str, "0x1"); // Convert U256::max_value() to hex - let u256_max = BigInt::from_unsigned_u256(&U256::max_value()); + let u256_max = BigInt::from_unsigned_u256(&U256::MAX); let u256_max_hex_ptr: AscPtr = instance.invoke_export1("big_int_to_hex", &u256_max).await; let u256_max_hex_str: String = instance.asc_get(u256_max_hex_ptr).unwrap(); diff --git a/runtime/test/src/test/abi.rs b/runtime/test/src/test/abi.rs index ba9048f6040..1d50ccc8f0a 100644 --- a/runtime/test/src/test/abi.rs +++ b/runtime/test/src/test/abi.rs @@ -1,4 +1,4 @@ -use graph::prelude::{ethabi::Token, web3::types::U256}; +use graph::{abi, prelude::alloy::primitives::Address}; use graph_runtime_wasm::asc_abi::class::{ ArrayBuffer, AscAddress, AscEnum, AscEnumArray, EthereumValueKind, StoreValueKind, TypedArray, }; @@ -182,9 +182,9 @@ async fn abi_bytes_and_fixed_bytes_v0_0_5() { test_abi_bytes_and_fixed_bytes(API_VERSION_0_0_5).await; } -async fn test_abi_ethabi_token_identity(api_version: Version) { +async fn test_abi_alloy_token_identity(api_version: Version) { let mut instance = test_module( - "abiEthabiTokenIdentity", + "abiAlloyTokenIdentity", mock_data_source( &wasm_file_path("abi_token.wasm", api_version.clone()), api_version.clone(), @@ -194,8 +194,8 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { .await; // Token::Address - let address = H160([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]); - let token_address = Token::Address(address); + let address = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]; + let token_address = abi::DynSolValue::Address(address.into()); let new_address_obj: AscPtr = instance .invoke_export1("token_to_address", &token_address) @@ -209,7 +209,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { assert_eq!(token_address, new_token); // Token::Bytes - let token_bytes = Token::Bytes(vec![42, 45, 7, 245, 45]); + let token_bytes = abi::DynSolValue::Bytes(vec![42, 45, 7, 245, 45]); let new_bytes_obj: AscPtr = instance .invoke_export1("token_to_bytes", &token_bytes) .await; @@ -221,7 +221,8 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { assert_eq!(token_bytes, new_token); // Token::Int - let int_token = Token::Int(U256([256, 453452345, 0, 42])); + let int = abi::I256::from_limbs([256, 453452345, 0, 42]); + let int_token = abi::DynSolValue::Int(int, int.bits() as usize); let new_int_obj: AscPtr = instance.invoke_export1("token_to_int", &int_token).await; @@ -233,7 +234,8 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { assert_eq!(int_token, new_token); // Token::Uint - let uint_token = Token::Uint(U256([256, 453452345, 0, 42])); + let uint = U256::from_limbs([256, 453452345, 0, 42]); + let uint_token = abi::DynSolValue::Uint(uint, uint.bit_len()); let new_uint_obj: AscPtr = instance.invoke_export1("token_to_uint", &uint_token).await; @@ -246,7 +248,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { assert_ne!(uint_token, int_token); // Token::Bool - let token_bool = Token::Bool(true); + let token_bool = abi::DynSolValue::Bool(true); let token_bool_ptr = instance.asc_new(&token_bool).await.unwrap(); let func = instance @@ -270,7 +272,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { assert_eq!(token_bool, new_token); // Token::String - let token_string = Token::String("漢字Go🇧🇷".into()); + let token_string = abi::DynSolValue::String("漢字Go🇧🇷".into()); let new_string_obj: AscPtr = instance .invoke_export1("token_to_string", &token_string) .await; @@ -282,8 +284,8 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { assert_eq!(token_string, new_token); // Token::Array - let token_array = Token::Array(vec![token_address, token_bytes, token_bool]); - let token_array_nested = Token::Array(vec![token_string, token_array]); + let token_array = abi::DynSolValue::Array(vec![token_address, token_bytes, token_bool]); + let token_array_nested = abi::DynSolValue::Array(vec![token_string, token_array]); let new_array_obj: AscEnumArray = instance .invoke_export1("token_to_array", &token_array_nested) .await; @@ -291,7 +293,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { let new_token_ptr = instance .takes_ptr_returns_ptr("token_from_array", new_array_obj) .await; - let new_token: Token = instance.asc_get(new_token_ptr).unwrap(); + let new_token: abi::DynSolValue = instance.asc_get(new_token_ptr).unwrap(); assert_eq!(new_token, token_array_nested); } @@ -300,14 +302,14 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { /// and assert the final token is the same as the starting one. #[graph::test] async fn abi_ethabi_token_identity_v0_0_4() { - test_abi_ethabi_token_identity(API_VERSION_0_0_4).await; + test_abi_alloy_token_identity(API_VERSION_0_0_4).await; } /// Test a roundtrip Token -> Payload -> Token identity conversion through asc, /// and assert the final token is the same as the starting one. #[graph::test] async fn abi_ethabi_token_identity_v0_0_5() { - test_abi_ethabi_token_identity(API_VERSION_0_0_5).await; + test_abi_alloy_token_identity(API_VERSION_0_0_5).await; } async fn test_abi_store_value(api_version: Version) { @@ -447,17 +449,17 @@ async fn test_abi_h160(api_version: Version) { api_version, ) .await; - let address = H160::zero(); + let address = Address::ZERO; // As an `Uint8Array` let new_address_obj: AscPtr = module.invoke_export1("test_address", &address).await; // This should have 1 added to the first and last byte. - let new_address: H160 = module.asc_get(new_address_obj).unwrap(); + let new_address: Address = module.asc_get(new_address_obj).unwrap(); assert_eq!( new_address, - H160([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) + Address::from([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) ) } @@ -509,14 +511,14 @@ async fn test_abi_big_int(api_version: Version) { .await; // Test passing in 0 and increment it by 1 - let old_uint = U256::zero(); + let old_uint = U256::ZERO; let new_uint_obj: AscPtr = module .invoke_export1("test_uint", &BigInt::from_unsigned_u256(&old_uint)) .await; let new_uint: BigInt = module.asc_get(new_uint_obj).unwrap(); assert_eq!(new_uint, BigInt::from(1_i32)); let new_uint = new_uint.to_unsigned_u256().unwrap(); - assert_eq!(new_uint, U256([1, 0, 0, 0])); + assert_eq!(new_uint, U256::from_limbs([1, 0, 0, 0])); // Test passing in -50 and increment it by 1 let old_uint = BigInt::from(-50); diff --git a/runtime/wasm/Cargo.toml b/runtime/wasm/Cargo.toml index c934cc943be..47ce3c2b288 100644 --- a/runtime/wasm/Cargo.toml +++ b/runtime/wasm/Cargo.toml @@ -5,7 +5,6 @@ edition.workspace = true [dependencies] async-trait = { workspace = true } -ethabi = "17.2" hex = "0.4.3" graph = { path = "../../graph" } bs58 = "0.4.0" diff --git a/runtime/wasm/src/asc_abi/class.rs b/runtime/wasm/src/asc_abi/class.rs index 0fac865ab0e..8d0f54049dc 100644 --- a/runtime/wasm/src/asc_abi/class.rs +++ b/runtime/wasm/src/asc_abi/class.rs @@ -1,8 +1,7 @@ //! Rust types that have with a direct correspondence to an Asc class, //! with their `AscType` implementations. use async_trait::async_trait; -use ethabi; - +use graph::abi; use graph::{ data::{ store::{self, scalar::Timestamp}, @@ -540,21 +539,25 @@ pub enum EthereumValueKind { FixedArray, Array, Tuple, + Function, } impl EthereumValueKind { - pub(crate) fn get_kind(token: ðabi::Token) -> Self { - match token { - ethabi::Token::Address(_) => EthereumValueKind::Address, - ethabi::Token::FixedBytes(_) => EthereumValueKind::FixedBytes, - ethabi::Token::Bytes(_) => EthereumValueKind::Bytes, - ethabi::Token::Int(_) => EthereumValueKind::Int, - ethabi::Token::Uint(_) => EthereumValueKind::Uint, - ethabi::Token::Bool(_) => EthereumValueKind::Bool, - ethabi::Token::String(_) => EthereumValueKind::String, - ethabi::Token::FixedArray(_) => EthereumValueKind::FixedArray, - ethabi::Token::Array(_) => EthereumValueKind::Array, - ethabi::Token::Tuple(_) => EthereumValueKind::Tuple, + pub(crate) fn get_kind(value: &abi::DynSolValue) -> Self { + use graph::abi::DynSolValue; + + match value { + DynSolValue::Bool(_) => Self::Bool, + DynSolValue::Int(_, _) => Self::Int, + DynSolValue::Uint(_, _) => Self::Uint, + DynSolValue::FixedBytes(_, _) => Self::FixedBytes, + DynSolValue::Address(_) => Self::Address, + DynSolValue::Function(_) => Self::Function, + DynSolValue::Bytes(_) => Self::Bytes, + DynSolValue::String(_) => Self::String, + DynSolValue::Array(_) => Self::Array, + DynSolValue::FixedArray(_) => Self::FixedArray, + DynSolValue::Tuple(_) => Self::Tuple, } } } diff --git a/runtime/wasm/src/host_exports.rs b/runtime/wasm/src/host_exports.rs index bc2ba76572d..0d9c0d2b3bc 100644 --- a/runtime/wasm/src/host_exports.rs +++ b/runtime/wasm/src/host_exports.rs @@ -5,12 +5,13 @@ use std::time::{Duration, Instant}; use graph::data::subgraph::API_VERSION_0_0_8; use graph::data::value::Word; -use graph::futures03::StreamExt; +use graph::futures03::stream::StreamExt; +use graph::prelude::alloy::primitives::Address; use graph::schema::EntityType; use never::Never; use semver::Version; -use web3::types::H160; +use graph::abi; use graph::blockchain::BlockTime; use graph::blockchain::Blockchain; use graph::components::link_resolver::LinkResolverContext; @@ -21,8 +22,6 @@ use graph::components::subgraph::{ use graph::data::store::{self}; use graph::data_source::{CausalityRegion, DataSource, EntityTypeAccess}; use graph::ensure; -use graph::prelude::ethabi::param_type::Reader; -use graph::prelude::ethabi::{decode, encode, Token}; use graph::prelude::serde_json; use graph::prelude::{slog::b, slog::record_static, *}; use graph::runtime::gas::{self, complexity, Gas, GasCounter}; @@ -1174,19 +1173,19 @@ impl HostExports { .map_err(|e| DeterministicHostError::from(Error::from(e))) } - pub(crate) fn string_to_h160( + pub(crate) fn string_to_address( &self, string: &str, gas: &GasCounter, state: &mut BlockState, - ) -> Result { + ) -> Result { Self::track_gas_and_ops( gas, state, gas::DEFAULT_GAS_OP.with_args(complexity::Size, &string), "string_to_h160", )?; - string_to_h160(string) + string_to_address(string) } pub(crate) fn bytes_to_string( @@ -1208,11 +1207,11 @@ impl HostExports { pub(crate) fn ethereum_encode( &self, - token: Token, + value: abi::DynSolValue, gas: &GasCounter, state: &mut BlockState, ) -> Result, DeterministicHostError> { - let encoded = encode(&[token]); + let encoded = value.abi_encode(); Self::track_gas_and_ops( gas, @@ -1230,7 +1229,7 @@ impl HostExports { data: Vec, gas: &GasCounter, state: &mut BlockState, - ) -> Result { + ) -> Result { Self::track_gas_and_ops( gas, state, @@ -1238,15 +1237,9 @@ impl HostExports { "ethereum_decode", )?; - let param_types = - Reader::read(&types).map_err(|e| anyhow::anyhow!("Failed to read types: {}", e))?; + let ty: abi::DynSolType = types.parse().context("Failed to read types")?; - decode(&[param_types], &data) - // The `.pop().unwrap()` here is ok because we're always only passing one - // `param_types` to `decode`, so the returned `Vec` has always size of one. - // We can't do `tokens[0]` because the value can't be moved out of the `Vec`. - .map(|mut tokens| tokens.pop().unwrap()) - .context("Failed to decode") + ty.abi_decode(&data).context("Failed to decode") } pub(crate) fn yaml_from_bytes( @@ -1277,11 +1270,9 @@ impl HostExports { } } -fn string_to_h160(string: &str) -> Result { - // `H160::from_str` takes a hex string with no leading `0x`. - let s = string.trim_start_matches("0x"); - H160::from_str(s) - .with_context(|| format!("Failed to convert string to Address/H160: '{}'", s)) +fn string_to_address(string: &str) -> Result { + Address::from_str(string) + .with_context(|| format!("Failed to convert string to Address: '{}'", string)) .map_err(DeterministicHostError::from) } @@ -1380,8 +1371,8 @@ pub mod test_support { #[test] fn test_string_to_h160_with_0x() { assert_eq!( - H160::from_str("A16081F360e3847006dB660bae1c6d1b2e17eC2A").unwrap(), - string_to_h160("0xA16081F360e3847006dB660bae1c6d1b2e17eC2A").unwrap() + Address::from_str("A16081F360e3847006dB660bae1c6d1b2e17eC2A").unwrap(), + string_to_address("0xA16081F360e3847006dB660bae1c6d1b2e17eC2A").unwrap() ) } diff --git a/runtime/wasm/src/module/context.rs b/runtime/wasm/src/module/context.rs index 490a2414c6b..6230a9a1596 100644 --- a/runtime/wasm/src/module/context.rs +++ b/runtime/wasm/src/module/context.rs @@ -455,7 +455,7 @@ impl WasmInstanceContext<'_> { let s: String = asc_get(self, str_ptr, gas)?; let host_exports = self.as_ref().ctx.host_exports.cheap_clone(); let ctx = &mut self.as_mut().ctx; - let h160 = host_exports.string_to_h160(&s, gas, &mut ctx.state)?; + let h160 = host_exports.string_to_address(&s, gas, &mut ctx.state)?; asc_new(self, &h160, gas).await } diff --git a/runtime/wasm/src/module/instance.rs b/runtime/wasm/src/module/instance.rs index 45a04e9afbc..d35730f9bfe 100644 --- a/runtime/wasm/src/module/instance.rs +++ b/runtime/wasm/src/module/instance.rs @@ -2,8 +2,8 @@ use std::sync::atomic::{AtomicBool, Ordering}; use std::time::Instant; use anyhow::Error; +use graph::futures03::future::BoxFuture; use graph::futures03::FutureExt as _; -use graph::prelude::web3::futures::future::BoxFuture; use graph::slog::SendSyncRefUnwindSafeKV; use semver::Version; diff --git a/runtime/wasm/src/to_from/external.rs b/runtime/wasm/src/to_from/external.rs index 022c07b0bc8..1ae4e6426fe 100644 --- a/runtime/wasm/src/to_from/external.rs +++ b/runtime/wasm/src/to_from/external.rs @@ -1,54 +1,58 @@ use async_trait::async_trait; -use ethabi; - +use graph::abi::DynSolValueExt; +use graph::abi::{self}; use graph::data::store::scalar::Timestamp; use graph::data::value::Word; +use graph::prelude::alloy::primitives::{Address, B256}; use graph::prelude::{BigDecimal, BigInt}; use graph::runtime::gas::GasCounter; +use graph::runtime::AscHeap; use graph::runtime::{ asc_get, asc_new, AscIndexId, AscPtr, AscType, AscValue, HostExportError, ToAscObj, }; use graph::{data::store, runtime::DeterministicHostError}; -use graph::{prelude::serde_json, runtime::FromAscObj}; -use graph::{prelude::web3::types as web3, runtime::AscHeap}; +use graph::{ + prelude::{alloy::primitives::U256, serde_json}, + runtime::FromAscObj, +}; use crate::asc_abi::class::*; +impl FromAscObj for Address { + fn from_asc_obj( + typed_array: Uint8Array, + heap: &H, + gas: &GasCounter, + depth: usize, + ) -> Result { + let data = <[u8; 20]>::from_asc_obj(typed_array, heap, gas, depth)?; + Ok(Self::from(data)) + } +} + #[async_trait] -impl ToAscObj for web3::H160 { +impl ToAscObj for Address { async fn to_asc_obj( &self, heap: &mut H, gas: &GasCounter, ) -> Result { - self.0.to_asc_obj(heap, gas).await + self.as_slice().to_asc_obj(heap, gas).await } } #[async_trait] -impl ToAscObj for web3::Bytes { +impl ToAscObj for B256 { async fn to_asc_obj( &self, heap: &mut H, gas: &GasCounter, ) -> Result { - self.0.to_asc_obj(heap, gas).await + self.as_slice().to_asc_obj(heap, gas).await } } -impl FromAscObj for web3::H160 { - fn from_asc_obj( - typed_array: Uint8Array, - heap: &H, - gas: &GasCounter, - depth: usize, - ) -> Result { - let data = <[u8; 20]>::from_asc_obj(typed_array, heap, gas, depth)?; - Ok(Self(data)) - } -} - -impl FromAscObj for web3::H256 { +impl FromAscObj for B256 { fn from_asc_obj( typed_array: Uint8Array, heap: &H, @@ -60,30 +64,6 @@ impl FromAscObj for web3::H256 { } } -#[async_trait] -impl ToAscObj for web3::H256 { - async fn to_asc_obj( - &self, - heap: &mut H, - gas: &GasCounter, - ) -> Result { - self.0.to_asc_obj(heap, gas).await - } -} - -#[async_trait] -impl ToAscObj for web3::U128 { - async fn to_asc_obj( - &self, - heap: &mut H, - gas: &GasCounter, - ) -> Result { - let mut bytes: [u8; 16] = [0; 16]; - self.to_little_endian(&mut bytes); - bytes.to_asc_obj(heap, gas).await - } -} - #[async_trait] impl ToAscObj for BigInt { async fn to_asc_obj( @@ -173,34 +153,47 @@ impl ToAscObj>> for Vec { } #[async_trait] -impl ToAscObj> for ethabi::Token { +impl ToAscObj> for abi::DynSolValue { async fn to_asc_obj( &self, heap: &mut H, gas: &GasCounter, ) -> Result, HostExportError> { - use ethabi::Token::*; - let kind = EthereumValueKind::get_kind(self); + let payload = match self { - Address(address) => asc_new::(heap, address, gas) - .await? - .to_payload(), - FixedBytes(bytes) | Bytes(bytes) => asc_new::(heap, &**bytes, gas) - .await? - .to_payload(), - Int(uint) => { - let n = BigInt::from_signed_u256(uint); + Self::Bool(val) => *val as u64, + Self::Int(val, _) => { + let bytes = val.to_le_bytes::<32>(); + let n = BigInt::from_signed_bytes_le(&bytes)?; + asc_new(heap, &n, gas).await?.to_payload() } - Uint(uint) => { - let n = BigInt::from_unsigned_u256(uint); + Self::Uint(val, _) => { + let bytes = val.to_le_bytes::<32>(); + let n = BigInt::from_unsigned_bytes_le(&bytes)?; + asc_new(heap, &n, gas).await?.to_payload() } - Bool(b) => *b as u64, - String(string) => asc_new(heap, &**string, gas).await?.to_payload(), - FixedArray(tokens) | Array(tokens) => asc_new(heap, &**tokens, gas).await?.to_payload(), - Tuple(tokens) => asc_new(heap, &**tokens, gas).await?.to_payload(), + Self::FixedBytes(val, size) => { + // FixedBytes stores the value in a 32-byte word, but we only want the first `size` bytes + asc_new::(heap, &val.as_slice()[..*size], gas) + .await? + .to_payload() + } + Self::Address(val) => asc_new::(heap, val.as_slice(), gas) + .await? + .to_payload(), + Self::Function(val) => asc_new::(heap, val.as_slice(), gas) + .await? + .to_payload(), + Self::Bytes(val) => asc_new::(heap, &**val, gas) + .await? + .to_payload(), + Self::String(val) => asc_new(heap, &**val, gas).await?.to_payload(), + Self::Array(values) => asc_new(heap, &**values, gas).await?.to_payload(), + Self::FixedArray(values) => asc_new(heap, &**values, gas).await?.to_payload(), + Self::Tuple(values) => asc_new(heap, &**values, gas).await?.to_payload(), }; Ok(AscEnum { @@ -211,34 +204,41 @@ impl ToAscObj> for ethabi::Token { } } -impl FromAscObj> for ethabi::Token { +impl FromAscObj> for abi::DynSolValue { fn from_asc_obj( asc_enum: AscEnum, heap: &H, gas: &GasCounter, depth: usize, ) -> Result { - use ethabi::Token; - let payload = asc_enum.payload; - Ok(match asc_enum.kind { - EthereumValueKind::Bool => Token::Bool(bool::from(payload)), + + let value = match asc_enum.kind { EthereumValueKind::Address => { let ptr: AscPtr = AscPtr::from(payload); - Token::Address(asc_get(heap, ptr, gas, depth)?) + let bytes: [u8; 20] = asc_get(heap, ptr, gas, depth)?; + + Self::Address(bytes.into()) } EthereumValueKind::FixedBytes => { let ptr: AscPtr = AscPtr::from(payload); - Token::FixedBytes(asc_get(heap, ptr, gas, depth)?) + let bytes: Vec = asc_get(heap, ptr, gas, depth)?; + + Self::fixed_bytes_from_slice(&bytes)? } EthereumValueKind::Bytes => { let ptr: AscPtr = AscPtr::from(payload); - Token::Bytes(asc_get(heap, ptr, gas, depth)?) + let bytes: Vec = asc_get(heap, ptr, gas, depth)?; + + Self::Bytes(bytes) } EthereumValueKind::Int => { let ptr: AscPtr = AscPtr::from(payload); let n: BigInt = asc_get(heap, ptr, gas, depth)?; - Token::Int(n.to_signed_u256()) + let x = + abi::I256::from_le_bytes(n.to_signed_u256().to_le_bytes::<{ U256::BYTES }>()); + + Self::Int(x, x.bits() as usize) } EthereumValueKind::Uint => { let ptr: AscPtr = AscPtr::from(payload); @@ -246,25 +246,38 @@ impl FromAscObj> for ethabi::Token { let uint = n .to_unsigned_u256() .map_err(DeterministicHostError::Other)?; - Token::Uint(uint) + Self::Uint(uint, uint.bit_len()) } + EthereumValueKind::Bool => Self::Bool(bool::from(payload)), EthereumValueKind::String => { let ptr: AscPtr = AscPtr::from(payload); - Token::String(asc_get(heap, ptr, gas, depth)?) + + Self::String(asc_get(heap, ptr, gas, depth)?) } EthereumValueKind::FixedArray => { let ptr: AscEnumArray = AscPtr::from(payload); - Token::FixedArray(asc_get(heap, ptr, gas, depth)?) + + Self::FixedArray(asc_get(heap, ptr, gas, depth)?) } EthereumValueKind::Array => { let ptr: AscEnumArray = AscPtr::from(payload); - Token::Array(asc_get(heap, ptr, gas, depth)?) + + Self::Array(asc_get(heap, ptr, gas, depth)?) } EthereumValueKind::Tuple => { let ptr: AscEnumArray = AscPtr::from(payload); - Token::Tuple(asc_get(heap, ptr, gas, depth)?) + + Self::Tuple(asc_get(heap, ptr, gas, depth)?) } - }) + EthereumValueKind::Function => { + let ptr: AscPtr = AscPtr::from(payload); + let bytes: [u8; 24] = asc_get(heap, ptr, gas, depth)?; + + Self::Function(bytes.into()) + } + }; + + Ok(value) } } diff --git a/server/index-node/src/resolver.rs b/server/index-node/src/resolver.rs index 2a479193bc2..b8385866d33 100644 --- a/server/index-node/src/resolver.rs +++ b/server/index-node/src/resolver.rs @@ -3,8 +3,8 @@ use std::collections::BTreeMap; use async_trait::async_trait; use graph::data::query::Trace; use graph::data::store::Id; +use graph::prelude::alloy::primitives::Address; use graph::schema::EntityType; -use web3::types::Address; use git_testament::{git_testament, CommitKind}; use graph::amp; @@ -375,7 +375,7 @@ where if !poi_protection.validate_access_token(self.bearer_token.as_deref()) { // Let's sign the POI with a zero'd address when the access token is // invalid. - indexer = Some(Address::zero()); + indexer = Some(Address::ZERO); } let poi_fut = self diff --git a/store/postgres/src/chain_store.rs b/store/postgres/src/chain_store.rs index 9eb5660552b..bdf63f52c31 100644 --- a/store/postgres/src/chain_store.rs +++ b/store/postgres/src/chain_store.rs @@ -9,6 +9,7 @@ use graph::components::store::ChainHeadStore; use graph::data::store::ethereum::call; use graph::env::ENV_VARS; use graph::parking_lot::RwLock; +use graph::prelude::alloy::primitives::B256; use graph::prelude::MetricsRegistry; use graph::prometheus::{CounterVec, GaugeVec}; use graph::slog::Logger; @@ -28,7 +29,6 @@ use std::{ use graph::blockchain::{Block, BlockHash, ChainIdentifier, ExtendedBlockPtr}; use graph::cheap_clone::CheapClone; -use graph::prelude::web3::types::{H256, U256}; use graph::prelude::{ serde_json as json, transaction_receipt::LightTransactionReceipt, BlockNumber, BlockPtr, CachedEthereumCall, ChainStore as ChainStoreTrait, Error, EthereumCallCache, StoreError, @@ -58,12 +58,12 @@ impl JsonBlock { } } - fn timestamp(&self) -> Option { + fn timestamp(&self) -> Option { self.data .as_ref() .and_then(|data| data.get("timestamp")) .and_then(|ts| ts.as_str()) - .and_then(|ts| U256::from_dec_str(ts).ok()) + .and_then(|ts| ts.parse::().ok()) } } @@ -105,9 +105,8 @@ mod data { use graph::blockchain::{Block, BlockHash}; use graph::data::store::scalar::Bytes; use graph::internal_error; - use graph::prelude::ethabi::ethereum_types::H160; + use graph::prelude::alloy::primitives::{Address, B256}; use graph::prelude::transaction_receipt::LightTransactionReceipt; - use graph::prelude::web3::types::H256; use graph::prelude::{ info, serde_json as json, BlockNumber, BlockPtr, CachedEthereumCall, Error, Logger, StoreError, @@ -177,17 +176,17 @@ mod data { hash: Vec, } - // Like H256::from_slice, but returns an error instead of panicking + // Like B256::from_slice, but returns an error instead of panicking // when `bytes` does not have the right length - fn h256_from_bytes(bytes: &[u8]) -> Result { - if bytes.len() == H256::len_bytes() { - Ok(H256::from_slice(bytes)) + fn b256_from_bytes(bytes: &[u8]) -> Result { + if bytes.len() == B256::len_bytes() { + Ok(B256::from_slice(bytes)) } else { Err(internal_error!( "invalid H256 value `{}` has {} bytes instead of {}", graph::prelude::hex::encode(bytes), bytes.len(), - H256::len_bytes() + B256::len_bytes() )) } } @@ -887,9 +886,9 @@ mod data { conn: &mut AsyncPgConnection, chain: &str, first_block: i64, - hash: H256, - genesis: H256, - ) -> Result, Error> { + hash: B256, + genesis: B256, + ) -> Result, Error> { match self { Storage::Shared => { // We recursively build a temp table 'chain' containing the hash and @@ -973,15 +972,15 @@ mod data { ); let missing = sql_query(query) - .bind::(hash.as_bytes()) - .bind::(genesis.as_bytes()) + .bind::(hash.as_slice()) + .bind::(genesis.as_slice()) .bind::(first_block) .load::(conn) .await?; let missing = match missing.len() { 0 => None, - 1 => Some(h256_from_bytes(&missing[0].hash)?), + 1 => Some(b256_from_bytes(&missing[0].hash)?), _ => { unreachable!("the query can only return no or one row") } @@ -1221,7 +1220,7 @@ mod data { &self, conn: &mut AsyncPgConnection, chain: &str, - block_hashes: &[&H256], + block_hashes: &[&B256], ) -> Result { match self { Storage::Shared => { @@ -1247,7 +1246,7 @@ mod data { ); let hashes: Vec<&[u8]> = - block_hashes.iter().map(|hash| hash.as_bytes()).collect(); + block_hashes.iter().map(|hash| hash.as_slice()).collect(); sql_query(query) .bind::, _>(hashes) @@ -1399,7 +1398,7 @@ mod data { .map(|row| CachedEthereumCall { blake3_id: row.0, block_ptr: block_ptr.clone(), - contract_address: H160::from_slice(&row.2[..]), + contract_address: Address::from_slice(&row.2[..]), return_value: row.1, }) .collect()) @@ -1814,7 +1813,7 @@ mod data { pub(crate) async fn find_transaction_receipts_in_block( &self, conn: &mut AsyncPgConnection, - block_hash: H256, + block_hash: B256, ) -> anyhow::Result> { let query = sql_query(format!( " @@ -1848,7 +1847,7 @@ from ( } Storage::Private(_) => { query - .bind::(block_hash.as_bytes()) + .bind::(block_hash.as_slice()) .get_results(conn) .await } @@ -2392,7 +2391,7 @@ impl ChainStore { self.recent_blocks_cache.blocks() } - pub async fn delete_blocks(&self, block_hashes: &[&H256]) -> Result { + pub async fn delete_blocks(&self, block_hashes: &[&B256]) -> Result { let mut conn = self.pool.get_permitted().await?; self.storage .delete_blocks_by_hash(&mut conn, &self.chain, block_hashes) @@ -2454,10 +2453,10 @@ impl ChainStore { async fn attempt_chain_head_update_inner( &self, ancestor_count: BlockNumber, - ) -> Result<(Option, Option<(String, i64)>), StoreError> { + ) -> Result<(Option, Option<(String, i64)>), StoreError> { use public::ethereum_networks as n; - let genesis_block_ptr = self.genesis_block_ptr().await?.hash_as_h256(); + let genesis_block_ptr = self.genesis_block_ptr().await?.hash.as_b256(); let mut conn = self.pool.get_permitted().await?; let candidate = self @@ -2475,7 +2474,7 @@ impl ChainStore { &mut conn, &self.chain, first_block as i64, - ptr.hash_as_h256(), + ptr.hash.as_b256(), genesis_block_ptr, ) .await? @@ -2488,7 +2487,7 @@ impl ChainStore { let hash = ptr.hash_hex(); let number = ptr.number as i64; - conn.transaction::<(Option, Option<(String, i64)>), StoreError, _>(|conn| { + conn.transaction::<(Option, Option<(String, i64)>), StoreError, _>(|conn| { async move { update(n::table.filter(n::name.eq(&self.chain))) .set(( @@ -2523,7 +2522,7 @@ fn json_block_to_block_ptr_ext(json_block: &JsonBlock) -> Result, ancestor_count: BlockNumber, - ) -> Result, Error> { + ) -> Result, Error> { let (missing, ptr) = self.attempt_chain_head_update_inner(ancestor_count).await?; if let Some((hash, number)) = ptr { @@ -3032,7 +3031,7 @@ impl ChainStoreTrait for ChainStore { async fn transaction_receipts_in_block( &self, - block_hash: &H256, + block_hash: &B256, ) -> Result, StoreError> { let mut conn = self.pool.get_permitted().await?; self.storage diff --git a/store/postgres/src/deployment.rs b/store/postgres/src/deployment.rs index 239ccdf61b3..f3d4a9f1362 100644 --- a/store/postgres/src/deployment.rs +++ b/store/postgres/src/deployment.rs @@ -12,6 +12,7 @@ use diesel::{ sql_types::{Nullable, Text}, }; use diesel_async::{RunQueryDsl, SimpleAsyncConnection}; +use graph::prelude::alloy::primitives::B256; use graph::{ blockchain::block_stream::FirehoseCursor, data::subgraph::schema::SubgraphError, @@ -20,18 +21,14 @@ use graph::{ slog::{debug, Logger}, }; use graph::{components::store::StoreResult, semver::Version}; -use graph::{ - data::store::scalar::ToPrimitive, - prelude::{ - anyhow, hex, web3::types::H256, BlockNumber, BlockPtr, DeploymentHash, DeploymentState, - StoreError, - }, - schema::InputSchema, -}; use graph::{ data::subgraph::schema::{DeploymentCreate, SubgraphManifestEntity}, util::backoff::ExponentialBackoff, }; +use graph::{ + prelude::{anyhow, hex, BlockNumber, BlockPtr, DeploymentHash, DeploymentState, StoreError}, + schema::InputSchema, +}; use stable_hash_legacy::crypto::SetHasher; use std::sync::Arc; use std::{convert::TryFrom, ops::Bound, time::Duration}; @@ -263,8 +260,7 @@ async fn graft( // FIXME: // // workaround for arweave - let hash = H256::from_slice(&hash.as_slice()[..32]); - let block = block.to_u64().expect("block numbers fit into a u64"); + let hash = B256::from_slice(&hash.as_slice()[..32]); let subgraph = DeploymentHash::new(subgraph.clone()).map_err(|_| { StoreError::Unknown(anyhow!( "the base subgraph for a graft must be a valid subgraph id but is `{}`", diff --git a/store/postgres/src/deployment_store.rs b/store/postgres/src/deployment_store.rs index b8c34b64e81..3703534979c 100644 --- a/store/postgres/src/deployment_store.rs +++ b/store/postgres/src/deployment_store.rs @@ -20,6 +20,7 @@ use graph::data::subgraph::{status, SPEC_VERSION_0_0_6}; use graph::data_source::CausalityRegion; use graph::derive::CheapClone; use graph::futures03::FutureExt; +use graph::prelude::alloy::primitives::Address; use graph::prelude::{ApiVersion, EntityOperation, PoolWaitStats, SubgraphDeploymentEntity}; use graph::semver::Version; use itertools::Itertools; @@ -38,12 +39,11 @@ use graph::components::subgraph::{ProofOfIndexingFinisher, ProofOfIndexingVersio use graph::data::subgraph::schema::{DeploymentCreate, SubgraphError}; use graph::internal_error; use graph::prelude::{ - anyhow, debug, info, o, warn, web3, AttributeNames, BlockNumber, BlockPtr, CheapClone, + anyhow, debug, info, o, warn, AttributeNames, BlockNumber, BlockPtr, CheapClone, DeploymentHash, DeploymentState, Entity, EntityQuery, Error, Logger, QueryExecutionError, StopwatchMetrics, StoreError, UnfailOutcome, Value, ENV_VARS, }; use graph::schema::{ApiSchema, EntityKey, EntityType, InputSchema}; -use web3::types::Address; use crate::block_range::{BLOCK_COLUMN, BLOCK_RANGE_COLUMN}; use crate::deployment::{self, OnSync}; diff --git a/store/postgres/src/detail.rs b/store/postgres/src/detail.rs index 74a6d546a4a..3be35320d8d 100644 --- a/store/postgres/src/detail.rs +++ b/store/postgres/src/detail.rs @@ -14,13 +14,14 @@ use git_testament::{git_testament, git_testament_macros}; use graph::blockchain::BlockHash; use graph::data::store::scalar::ToPrimitive; use graph::data::subgraph::schema::{SubgraphError, SubgraphManifestEntity}; +use graph::prelude::alloy::primitives::B256; use graph::prelude::BlockNumber; use graph::prelude::{ chrono::{DateTime, Utc}, BlockPtr, DeploymentHash, StoreError, SubgraphDeploymentEntity, }; use graph::schema::InputSchema; -use graph::{data::subgraph::status, internal_error, prelude::web3::types::H256}; +use graph::{data::subgraph::status, internal_error}; use itertools::Itertools; use std::collections::HashMap; use std::convert::TryFrom; @@ -191,7 +192,7 @@ impl TryFrom for SubgraphError { // FIXME: // // workaround for arweave - let block_hash = block_hash.map(|hash| H256::from_slice(&hash.as_slice()[..32])); + let block_hash = block_hash.map(|hash| B256::from_slice(&hash.as_slice()[..32])); // In existing databases, we have errors that have a `block_range` of // `UNVERSIONED_RANGE`, which leads to `None` as the block number, but // has a hash. Conversely, it is also possible for an error to not have a diff --git a/store/postgres/src/store.rs b/store/postgres/src/store.rs index 5acac2691b2..4adec80ab5b 100644 --- a/store/postgres/src/store.rs +++ b/store/postgres/src/store.rs @@ -12,8 +12,8 @@ use graph::{ data::subgraph::status, internal_error, prelude::{ - web3::types::Address, BlockNumber, BlockPtr, CheapClone, DeploymentHash, PartialBlockPtr, - QueryExecutionError, StoreError, + alloy::primitives::Address, BlockNumber, BlockPtr, CheapClone, DeploymentHash, + PartialBlockPtr, QueryExecutionError, StoreError, }, }; diff --git a/store/postgres/src/subgraph_store.rs b/store/postgres/src/subgraph_store.rs index 8f2faa9ecda..478d21eba02 100644 --- a/store/postgres/src/subgraph_store.rs +++ b/store/postgres/src/subgraph_store.rs @@ -27,16 +27,16 @@ use graph::{ internal_error, prelude::StoreEvent, prelude::{ - anyhow, lazy_static, o, web3::types::Address, ApiVersion, BlockNumber, BlockPtr, - ChainStore, DeploymentHash, EntityOperation, Logger, MetricsRegistry, NodeId, - PartialBlockPtr, StoreError, SubgraphDeploymentEntity, SubgraphName, - SubgraphStore as SubgraphStoreTrait, SubgraphVersionSwitchingMode, + anyhow, lazy_static, o, ApiVersion, BlockNumber, BlockPtr, ChainStore, DeploymentHash, + EntityOperation, Logger, MetricsRegistry, NodeId, PartialBlockPtr, StoreError, + SubgraphDeploymentEntity, SubgraphName, SubgraphStore as SubgraphStoreTrait, + SubgraphVersionSwitchingMode, }, schema::{ApiSchema, InputSchema}, url::Url, util::timed_cache::TimedCache, }; -use graph::{derive::CheapClone, futures03::future::join_all}; +use graph::{derive::CheapClone, futures03::future::join_all, prelude::alloy::primitives::Address}; use crate::{ deployment::{OnSync, SubgraphHealth}, @@ -1155,7 +1155,7 @@ impl Inner { }; let block_for_poi_query = BlockPtr::new(block_hash.clone(), block_number); - let indexer = Some(Address::zero()); + let indexer = Some(Address::ZERO); let poi = store .get_proof_of_indexing(site, &indexer, block_for_poi_query) .await?; diff --git a/store/postgres/src/transaction_receipt.rs b/store/postgres/src/transaction_receipt.rs index 115a32f1cc2..1177422f42b 100644 --- a/store/postgres/src/transaction_receipt.rs +++ b/store/postgres/src/transaction_receipt.rs @@ -39,15 +39,25 @@ impl TryFrom for LightTransactionReceipt { let block_hash = block_hash.map(drain_vector).transpose()?; let block_number = block_number.map(drain_vector).transpose()?; let gas_used = gas_used.map(drain_vector).transpose()?; - let status = status.map(drain_vector).transpose()?; + + // Convert big-endian bytes to numbers + let transaction_index = u64::from_be_bytes(transaction_index); + let block_number = block_number.map(u64::from_be_bytes); + let gas_used = gas_used.map(u64::from_be_bytes).unwrap_or(0); + + // Status is non-zero for success, zero for failure. Works for any byte length. + // Defaults to true for pre-Byzantium receipts (no status field), consistent with alloy. + let status = status + .map(|bytes| bytes.iter().any(|&b| b != 0)) + .unwrap_or(true); Ok(LightTransactionReceipt { transaction_hash: transaction_hash.into(), - transaction_index: transaction_index.into(), + transaction_index, block_hash: block_hash.map(Into::into), - block_number: block_number.map(Into::into), - gas_used: gas_used.map(Into::into), - status: status.map(Into::into), + block_number, + gas_used, + status, }) } } diff --git a/store/test-store/Cargo.toml b/store/test-store/Cargo.toml index 2f0d24a9489..aebee01399f 100644 --- a/store/test-store/Cargo.toml +++ b/store/test-store/Cargo.toml @@ -22,6 +22,7 @@ tokio = { workspace = true } [dev-dependencies] hex = "0.4.3" pretty_assertions = "1.4.1" +serde_json = { workspace = true } [lints] workspace = true diff --git a/store/test-store/src/block_store.rs b/store/test-store/src/block_store.rs index 76ae6d52937..2385574e527 100644 --- a/store/test-store/src/block_store.rs +++ b/store/test-store/src/block_store.rs @@ -1,17 +1,18 @@ use std::{convert::TryFrom, str::FromStr, sync::Arc}; use graph::blockchain::{BlockTime, ChainIdentifier}; +use graph::prelude::alloy::consensus::Header as ConsensusHeader; +use graph::prelude::alloy::primitives::{Bloom, B256, U256}; +use graph::prelude::alloy::rpc::types::{Block, Header}; +use graph::prelude::LightEthereumBlock; use lazy_static::lazy_static; use graph::components::store::BlockStore; use graph::{ blockchain::Block as BlockchainBlock, - prelude::{ - serde_json, web3::types::H256, web3::types::U256, BlockHash, BlockNumber, BlockPtr, - EthereumBlock, LightEthereumBlock, - }, + prelude::{serde_json, BlockHash, BlockNumber, BlockPtr, EthereumBlock}, }; -use graph_chain_ethereum::codec::{Block, BlockHeader}; +use graph_chain_ethereum::codec::{Block as FirehoseBlock, BlockHeader}; use prost_types::Timestamp; use crate::{GENESIS_PTR, NETWORK_VERSION}; @@ -103,23 +104,33 @@ impl FakeBlock { } pub fn as_ethereum_block(&self) -> EthereumBlock { - let parent_hash = H256::from_str(self.parent_hash.as_str()).expect("invalid parent hash"); + let parent_hash = B256::from_str(self.parent_hash.as_str()).expect("invalid parent hash"); + let block_hash = B256::from_str(self.hash.as_str()).expect("invalid block hash"); + + let mut consensus_header = ConsensusHeader::default(); + consensus_header.number = self.number as u64; + consensus_header.parent_hash = parent_hash; + consensus_header.logs_bloom = Bloom::default(); // Empty bloom filter for test blocks + if let Some(ts) = self.timestamp { + consensus_header.timestamp = ts.to::(); + } - let block = LightEthereumBlock { - number: Some(self.number.into()), - parent_hash, - hash: Some(H256(self.block_hash().as_slice().try_into().unwrap())), - timestamp: self.timestamp.unwrap_or_default(), - ..Default::default() + let rpc_header = Header { + hash: block_hash, + inner: consensus_header, + total_difficulty: None, + size: None, }; + let block = Block::empty(rpc_header); + EthereumBlock { - block: Arc::new(block), + block: Arc::new(LightEthereumBlock::new(block.into())), transaction_receipts: Vec::new(), } } - pub fn as_firehose_block(&self) -> Block { + pub fn as_firehose_block(&self) -> FirehoseBlock { let header = BlockHeader { parent_hash: self.parent_hash.clone().into_bytes(), timestamp: self.timestamp.map(|ts| Timestamp { @@ -129,7 +140,7 @@ impl FakeBlock { ..Default::default() }; - Block { + FirehoseBlock { hash: self.hash.clone().into_bytes(), number: self.number as u64, header: Some(header), diff --git a/store/test-store/src/store.rs b/store/test-store/src/store.rs index af973c32993..5f2cc52949b 100644 --- a/store/test-store/src/store.rs +++ b/store/test-store/src/store.rs @@ -10,6 +10,7 @@ use graph::data::subgraph::schema::{DeploymentCreate, SubgraphError}; use graph::data::subgraph::SubgraphFeature; use graph::data_source::DataSource; use graph::log; +use graph::prelude::alloy::primitives::B256; use graph::prelude::{QueryStoreManager as _, SubgraphStore as _, *}; use graph::schema::EntityType; use graph::schema::InputSchema; @@ -37,7 +38,6 @@ use std::collections::BTreeSet; use std::collections::HashMap; use std::time::Instant; use std::{marker::PhantomData, sync::Mutex}; -use web3::types::H256; pub const NETWORK_NAME: &str = "fake_network"; pub const DATA_SOURCE_KIND: &str = "mock/kind"; @@ -69,14 +69,14 @@ lazy_static! { pub static ref SUBGRAPH_STORE: Arc = STORE.subgraph_store(); static ref BLOCK_STORE: DieselBlockStore = STORE.block_store(); pub static ref GENESIS_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "bd34884280958002c51d3f7b5f853e6febeba33de0f40d15b0363006533c924f" )), 0u64 ) .into(); pub static ref BLOCK_ONE: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "8511fa04b64657581e3f00e14543c1d522d5d7e771b54aa3060b662ade47da13" )), 1u64 @@ -84,14 +84,15 @@ lazy_static! { .into(); pub static ref BLOCKS: [BlockPtr; 4] = { let two: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "b98fb783b49de5652097a989414c767824dff7e7fd765a63b493772511db81c1" )), 2u64, ) .into(); + let three: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "977c084229c72a0fa377cae304eda9099b6a2cb5d83b25cdf0f0969b69874255" )), 3u64, diff --git a/store/test-store/tests/chain/ethereum/manifest.rs b/store/test-store/tests/chain/ethereum/manifest.rs index e4dee44c9ab..a201a9c233e 100644 --- a/store/test-store/tests/chain/ethereum/manifest.rs +++ b/store/test-store/tests/chain/ethereum/manifest.rs @@ -19,7 +19,7 @@ use graph::data_source::offchain::OffchainDataSourceKind; use graph::data_source::{DataSourceEnum, DataSourceTemplate}; use graph::entity; use graph::env::ENV_VARS; -use graph::prelude::web3::types::H256; +use graph::prelude::alloy::primitives::B256; use graph::prelude::{ anyhow, serde_yaml, BigDecimal, BigInt, DeploymentHash, Link, SubgraphManifest, SubgraphManifestResolveError, SubgraphManifestValidationError, SubgraphStore, @@ -812,18 +812,18 @@ specVersion: 1.2.0 assert_eq!( Some(vec![ - H256::from_str("0000000000000000000000000000000000000000000000000000000000000000") + B256::from_str("0000000000000000000000000000000000000000000000000000000000000000") .unwrap(), - H256::from_str("0000000000000000000000000000000000000000000000000000000000000001") + B256::from_str("0000000000000000000000000000000000000000000000000000000000000001") .unwrap(), - H256::from_str("0000000000000000000000000000000000000000000000000000000000000002") + B256::from_str("0000000000000000000000000000000000000000000000000000000000000002") .unwrap() ]), topic1.clone() ); assert_eq!( - Some(vec![H256::from_str( + Some(vec![B256::from_str( "0000000000000000000000000000000000000000000000000000000000000001" ) .unwrap()]), @@ -831,7 +831,7 @@ specVersion: 1.2.0 ); assert_eq!( - Some(vec![H256::from_str( + Some(vec![B256::from_str( "0000000000000000000000000000000000000000000000000000000000000002" ) .unwrap()]), diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 0923b038254..02b02579440 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -8,6 +8,7 @@ use graph::components::store::{ use graph::data::store::Id; use graph::data::subgraph::schema::{DeploymentCreate, SubgraphError, SubgraphHealth}; use graph::data_source::CausalityRegion; +use graph::prelude::alloy::primitives::B256; use graph::schema::{EntityKey, EntityType, InputSchema}; use graph::{ components::store::{DeploymentId, DeploymentLocator}, @@ -22,7 +23,6 @@ use slog::Logger; use std::collections::{BTreeMap, BTreeSet}; use std::marker::PhantomData; use std::sync::Arc; -use web3::types::H256; use graph_store_postgres::SubgraphStore as DieselSubgraphStore; use test_store::*; @@ -399,7 +399,7 @@ lazy_static! { InputSchema::parse_latest(ACCOUNT_GQL, LOAD_RELATED_ID.clone()) .expect("Failed to parse user schema"); static ref TEST_BLOCK_1_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "8511fa04b64657581e3f00e14543c1d522d5d7e771b54aa3060b662ade47da13" )), 1u64 diff --git a/store/test-store/tests/postgres/chain_head.rs b/store/test-store/tests/postgres/chain_head.rs index 06f4fb9e937..6ee612de191 100644 --- a/store/test-store/tests/postgres/chain_head.rs +++ b/store/test-store/tests/postgres/chain_head.rs @@ -7,14 +7,14 @@ use graph::data::store::ethereum::call; use graph::data::store::scalar::Bytes; use graph::env::ENV_VARS; use graph::futures03::executor; +use graph::prelude::alloy::primitives::{Address, B256}; use std::future::Future; use std::sync::Arc; -use graph::prelude::web3::types::H256; +use graph::cheap_clone::CheapClone; +use graph::prelude::{alloy, serde_json as json, EthereumBlock}; use graph::prelude::{anyhow::anyhow, anyhow::Error}; -use graph::prelude::{serde_json as json, EthereumBlock}; use graph::prelude::{BlockNumber, QueryStoreManager, QueryTarget}; -use graph::{cheap_clone::CheapClone, prelude::web3::types::H160}; use graph::{components::store::BlockStore as _, prelude::DeploymentHash}; use graph::{ components::store::ChainHeadStore as _, components::store::ChainStore as _, @@ -329,7 +329,7 @@ fn check_ancestor( } let act_block = json::from_value::(act.0)?; - let act_hash = format!("{:x}", act_block.block.hash.unwrap()); + let act_hash = format!("{:x}", act_block.block.hash()); let exp_hash = &exp.hash; if &act_hash != exp_hash { @@ -441,7 +441,9 @@ fn eth_call_cache() { call::Retval::Value(Bytes::from(value)) } - let address = H160([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]); + let address = alloy::primitives::Address::from_slice(&[ + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, + ]); let call: [u8; 6] = [1, 2, 3, 4, 5, 6]; let return_value: [u8; 3] = [7, 8, 9]; @@ -537,7 +539,7 @@ fn test_disable_call_cache() { ENV_VARS.set_store_call_cache_disabled_for_tests(true); let logger = LOGGER.cheap_clone(); - let address = H160([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5]); + let address = Address::new([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5]); let call: [u8; 6] = [1, 2, 3, 4, 5, 6]; let return_value: [u8; 3] = [7, 8, 9]; @@ -577,7 +579,9 @@ fn test_clear_stale_call_cache() { run_test_async(chain, |chain_store, _, _| async move { let logger = LOGGER.cheap_clone(); - let address = H160([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3]); + let address = alloy::primitives::Address::from([ + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, + ]); let call: [u8; 6] = [1, 2, 3, 4, 5, 6]; let return_value: [u8; 3] = [7, 8, 9]; @@ -628,7 +632,7 @@ fn test_clear_stale_call_cache() { diesel::sql_query(format!( "UPDATE {meta_table} SET accessed_at = NOW() - INTERVAL '8 days' WHERE contract_address = $1" )) - .bind::(address.as_bytes()) + .bind::(address.as_slice()) .execute(&mut conn) .await .unwrap(); @@ -651,7 +655,7 @@ fn test_transaction_receipts_in_block_function() { let chain = vec![]; run_test_async(chain, move |store, _, _| async move { let receipts = store - .transaction_receipts_in_block(&H256::zero()) + .transaction_receipts_in_block(&B256::ZERO) .await .unwrap(); assert!(receipts.is_empty()) diff --git a/store/test-store/tests/postgres/relational.rs b/store/test-store/tests/postgres/relational.rs index dcd4e770014..4a029709408 100644 --- a/store/test-store/tests/postgres/relational.rs +++ b/store/test-store/tests/postgres/relational.rs @@ -3,9 +3,10 @@ use diesel_async::SimpleAsyncConnection; use graph::components::store::write::{EntityModification, RowGroup}; use graph::data::store::scalar; use graph::entity; +use graph::prelude::alloy::primitives::B256; use graph::prelude::{ - o, slog, web3::types::H256, DeploymentHash, Entity, EntityCollection, EntityFilter, - EntityOrder, EntityQuery, Logger, StopwatchMetrics, Value, ValueType, BLOCK_NUMBER_MAX, + o, slog, DeploymentHash, Entity, EntityCollection, EntityFilter, EntityOrder, EntityQuery, + Logger, StopwatchMetrics, Value, ValueType, BLOCK_NUMBER_MAX, }; use graph::prelude::{BlockNumber, MetricsRegistry}; use graph::schema::{EntityKey, EntityType, InputSchema}; @@ -185,13 +186,13 @@ lazy_static! { static ref LARGE_INT: BigInt = BigInt::from(i64::MAX).pow(17).unwrap(); static ref LARGE_DECIMAL: BigDecimal = BigDecimal::from(1) / BigDecimal::new(LARGE_INT.clone(), 1); - static ref BYTES_VALUE: H256 = H256::from(hex!( + static ref BYTES_VALUE: B256 = B256::from(hex!( "e8b3b02b936c4a4a331ac691ac9a86e197fb7731f14e3108602c87d4dac55160" )); - static ref BYTES_VALUE2: H256 = H256::from(hex!( + static ref BYTES_VALUE2: B256 = B256::from(hex!( "b98fb783b49de5652097a989414c767824dff7e7fd765a63b493772511db81c1" )); - static ref BYTES_VALUE3: H256 = H256::from(hex!( + static ref BYTES_VALUE3: B256 = B256::from(hex!( "977c084229c72a0fa377cae304eda9099b6a2cb5d83b25cdf0f0969b69874255" )); static ref SCALAR_ENTITY: Entity = { diff --git a/store/test-store/tests/postgres/relational_bytes.rs b/store/test-store/tests/postgres/relational_bytes.rs index 7eab03c5df5..470d4e17412 100644 --- a/store/test-store/tests/postgres/relational_bytes.rs +++ b/store/test-store/tests/postgres/relational_bytes.rs @@ -5,6 +5,7 @@ use graph::components::store::write::RowGroup; use graph::data::store::scalar; use graph::data_source::CausalityRegion; use graph::entity; +use graph::prelude::alloy::primitives::B256; use graph::prelude::{BlockNumber, EntityModification, EntityQuery, MetricsRegistry, StoreError}; use graph::schema::{EntityKey, EntityType, InputSchema}; use graph_store_postgres::AsyncPgConnection; @@ -17,9 +18,9 @@ use std::{collections::BTreeMap, sync::Arc}; use graph::data::store::scalar::{BigDecimal, BigInt}; use graph::data::store::IdList; use graph::prelude::{ - o, slog, web3::types::H256, AttributeNames, ChildMultiplicity, DeploymentHash, Entity, - EntityCollection, EntityLink, EntityWindow, Logger, ParentLink, StopwatchMetrics, - WindowAttribute, BLOCK_NUMBER_MAX, + o, slog, AttributeNames, ChildMultiplicity, DeploymentHash, Entity, EntityCollection, + EntityLink, EntityWindow, Logger, ParentLink, StopwatchMetrics, WindowAttribute, + BLOCK_NUMBER_MAX, }; use graph_store_postgres::{ layout_for_tests::make_dummy_site, @@ -46,13 +47,13 @@ lazy_static! { static ref LARGE_INT: BigInt = BigInt::from(i64::MAX).pow(17).unwrap(); static ref LARGE_DECIMAL: BigDecimal = BigDecimal::from(1) / BigDecimal::new(LARGE_INT.clone(), 1); - static ref BYTES_VALUE: H256 = H256::from(hex!( + static ref BYTES_VALUE: B256 = B256::from(hex!( "e8b3b02b936c4a4a331ac691ac9a86e197fb7731f14e3108602c87d4dac55160" )); - static ref BYTES_VALUE2: H256 = H256::from(hex!( + static ref BYTES_VALUE2: B256 = B256::from(hex!( "b98fb783b49de5652097a989414c767824dff7e7fd765a63b493772511db81c1" )); - static ref BYTES_VALUE3: H256 = H256::from(hex!( + static ref BYTES_VALUE3: B256 = B256::from(hex!( "977c084229c72a0fa377cae304eda9099b6a2cb5d83b25cdf0f0969b69874255" )); static ref BEEF_ENTITY: Entity = entity! { THINGS_SCHEMA => diff --git a/store/test-store/tests/postgres/store.rs b/store/test-store/tests/postgres/store.rs index 0059032e3ba..f85a9e89e2d 100644 --- a/store/test-store/tests/postgres/store.rs +++ b/store/test-store/tests/postgres/store.rs @@ -3,6 +3,7 @@ use graph::blockchain::BlockTime; use graph::data::graphql::ext::TypeDefinitionExt; use graph::data::subgraph::schema::DeploymentCreate; use graph::data_source::common::MappingABI; +use graph::prelude::alloy::primitives::{Address, B256}; use graph::schema::{EntityType, InputSchema}; use graph_chain_ethereum::Mapping; use hex_literal::hex; @@ -15,13 +16,11 @@ use graph::data::subgraph::*; use graph::{ blockchain::DataSource, components::store::{BlockStore as _, EntityFilter, EntityOrder, EntityQuery, StatusStore}, - prelude::ethabi::Contract, }; use graph::{data::store::scalar, semver::Version}; use graph::{entity, prelude::*}; use graph_store_postgres::layout_for_tests::STRING_PREFIX_SIZE; use graph_store_postgres::{Store as DieselStore, SubgraphStore as DieselSubgraphStore}; -use web3::types::{Address, H256}; const USER_GQL: &str = " interface ColorAndAge { @@ -65,56 +64,56 @@ lazy_static! { InputSchema::parse_latest(USER_GQL, TEST_SUBGRAPH_ID.clone()) .expect("Failed to parse user schema"); static ref TEST_BLOCK_0_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "bd34884280958002c51d3f7b5f853e6febeba33de0f40d15b0363006533c924f" )), 0u64 ) .into(); static ref TEST_BLOCK_1_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "8511fa04b64657581e3f00e14543c1d522d5d7e771b54aa3060b662ade47da13" )), 1u64 ) .into(); static ref TEST_BLOCK_2_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "b98fb783b49de5652097a989414c767824dff7e7fd765a63b493772511db81c1" )), 2u64 ) .into(); static ref TEST_BLOCK_3_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "977c084229c72a0fa377cae304eda9099b6a2cb5d83b25cdf0f0969b69874255" )), 3u64 ) .into(); static ref TEST_BLOCK_3A_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "d163aec0592c7cb00c2700ab65dcaac93289f5d250b3b889b39198b07e1fbe4a" )), 3u64 ) .into(); static ref TEST_BLOCK_4_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "007a03cdf635ebb66f5e79ae66cc90ca23d98031665649db056ff9c6aac2d74d" )), 4u64 ) .into(); static ref TEST_BLOCK_4A_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "8fab27e9e9285b0a39110f4d9877f05d0f43d2effa157e55f4dcc49c3cf8cbd7" )), 4u64 ) .into(); static ref TEST_BLOCK_5_PTR: BlockPtr = ( - H256::from(hex!( + B256::from(hex!( "e8b3b02b936c4a4a331ac691ac9a86e197fb7731f14e3108602c87d4dac55160" )), 5u64 @@ -1133,19 +1132,18 @@ fn mock_data_source() -> graph_chain_ethereum::DataSource { fn mock_abi() -> MappingABI { MappingABI { name: "mock_abi".to_string(), - contract: Contract::load( + contract: serde_json::from_str( r#"[ - { - "inputs": [ - { - "name": "a", - "type": "address" - } - ], - "type": "constructor" - } - ]"# - .as_bytes(), + { + "inputs": [ + { + "name": "a", + "type": "address" + } + ], + "type": "constructor" + } + ]"#, ) .unwrap(), } diff --git a/store/test-store/tests/postgres/writable.rs b/store/test-store/tests/postgres/writable.rs index 93fd28c6d8f..2ac4eddd5ce 100644 --- a/store/test-store/tests/postgres/writable.rs +++ b/store/test-store/tests/postgres/writable.rs @@ -2,6 +2,7 @@ use graph::blockchain::block_stream::{EntitySourceOperation, FirehoseCursor}; use graph::data::subgraph::schema::DeploymentCreate; use graph::data::value::Word; use graph::data_source::CausalityRegion; +use graph::prelude::alloy::primitives::B256; use graph::schema::{EntityKey, EntityType, InputSchema}; use lazy_static::lazy_static; use std::collections::{BTreeMap, BTreeSet}; @@ -18,7 +19,6 @@ use graph::semver::Version; use graph::{entity, prelude::*}; use graph_store_postgres::layout_for_tests::writable; use graph_store_postgres::{Store as DieselStore, SubgraphStore as DieselSubgraphStore}; -use web3::types::H256; const SCHEMA_GQL: &str = " type Counter @entity { @@ -145,7 +145,7 @@ where } fn block_pointer(number: u8) -> BlockPtr { - let hash = H256::from([number; 32]); + let hash = B256::from([number; 32]); BlockPtr::from((hash, number as BlockNumber)) } diff --git a/tests/Cargo.toml b/tests/Cargo.toml index 0ce0757b049..015fbd43243 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -25,6 +25,10 @@ tokio = { version = "1.45.1", features = ["rt", "macros", "process"] } # uses until then secp256k1 = { version = "0.21", features = ["recovery"] } tokio-util.workspace = true +web3 = { git = "https://github.com/graphprotocol/rust-web3", branch = "graph-patches-onto-0.18", features = [ + "arbitrary_precision", + "test", +] } [dev-dependencies] anyhow = "1.0.100" diff --git a/tests/src/contract.rs b/tests/src/contract.rs index 2d3d72216f3..80a9ba57031 100644 --- a/tests/src/contract.rs +++ b/tests/src/contract.rs @@ -3,12 +3,13 @@ use std::str::FromStr; use graph::prelude::{ lazy_static, serde_json::{self, Value}, - web3::{ - api::{Eth, Namespace}, - contract::{tokens::Tokenize, Contract as Web3Contract, Options}, - transports::Http, - types::{Address, Block, BlockId, BlockNumber, Bytes, TransactionReceipt, H256}, - }, +}; + +use web3::{ + api::{Eth, Namespace}, + contract::{tokens::Tokenize, Contract as Web3Contract, Options}, + transports::Http, + types::{Address, Block, BlockId, BlockNumber, Bytes, TransactionReceipt, H256}, }; // web3 version 0.18 does not expose this; once the graph crate updates to // version 0.19, we can use web3::signing::SecretKey from the graph crate @@ -161,16 +162,16 @@ impl Contract { if contract.name == "DeclaredCallsContract" { status!("contracts", "Emitting transfers from DeclaredCallsContract"); let addr1 = "0x1111111111111111111111111111111111111111" - .parse::() + .parse::() .unwrap(); let addr2 = "0x2222222222222222222222222222222222222222" - .parse::() + .parse::() .unwrap(); let addr3 = "0x3333333333333333333333333333333333333333" - .parse::() + .parse::() .unwrap(); let addr4 = "0x4444444444444444444444444444444444444444" - .parse::() + .parse::() .unwrap(); contract diff --git a/tests/src/fixture/ethereum.rs b/tests/src/fixture/ethereum.rs index 96473c6ad8c..713df0af40f 100644 --- a/tests/src/fixture/ethereum.rs +++ b/tests/src/fixture/ethereum.rs @@ -6,16 +6,20 @@ use super::{ test_ptr, CommonChainConfig, MutexBlockStreamBuilder, NoopAdapterSelector, NoopRuntimeAdapterBuilder, StaticBlockRefetcher, StaticStreamBuilder, Stores, TestChain, }; +use graph::abi; +use graph::blockchain::block_stream::BlockWithTriggers; use graph::blockchain::block_stream::{EntityOperationKind, EntitySourceOperation}; use graph::blockchain::client::ChainClient; use graph::blockchain::{BlockPtr, Trigger, TriggersAdapterSelector}; use graph::cheap_clone::CheapClone; use graph::data_source::subgraph; -use graph::prelude::ethabi::ethereum_types::H256; -use graph::prelude::web3::types::{Address, Log, Transaction, H160}; -use graph::prelude::{ethabi, tiny_keccak, DeploymentHash, Entity, LightEthereumBlock, ENV_VARS}; +use graph::prelude::alloy::primitives::{Address, B256, U256}; +use graph::prelude::alloy::rpc::types::BlockTransactions; +use graph::prelude::{ + create_dummy_transaction, create_minimal_block_for_test, tiny_keccak, DeploymentHash, Entity, + LightEthereumBlock, ENV_VARS, +}; use graph::schema::EntityType; -use graph::{blockchain::block_stream::BlockWithTriggers, prelude::ethabi::ethereum_types::U64}; use graph_chain_ethereum::network::EthereumNetworkAdapters; use graph_chain_ethereum::trigger::LogRef; use graph_chain_ethereum::Chain; @@ -76,12 +80,11 @@ pub async fn chain( pub fn genesis() -> BlockWithTriggers { let ptr = test_ptr(0); + + let block = create_minimal_block_for_test(ptr.number as u64, ptr.hash.as_b256()); + BlockWithTriggers:: { - block: BlockFinality::Final(Arc::new(LightEthereumBlock { - hash: Some(H256::from_slice(ptr.hash.as_slice())), - number: Some(U64::from(ptr.number)), - ..Default::default() - })), + block: BlockFinality::Final(Arc::new(LightEthereumBlock::new(block.into()))), trigger_data: vec![Trigger::Chain(EthereumTrigger::Block( ptr, EthereumBlockTriggerType::End, @@ -101,7 +104,7 @@ pub fn generate_empty_blocks_for_range( let parent_ptr = blocks.last().map(|b| b.ptr()).unwrap_or(parent_ptr.clone()); let ptr = BlockPtr { number: i, - hash: H256::from_low_u64_be(i as u64 + add_to_hash).into(), + hash: B256::from(U256::from(i as u64 + add_to_hash)).into(), }; blocks.push(empty_block(parent_ptr, ptr)); } @@ -113,25 +116,19 @@ pub fn empty_block(parent_ptr: BlockPtr, ptr: BlockPtr) -> BlockWithTriggers parent_ptr.number); - // A 0x000.. transaction is used so `push_test_log` can use it - let transactions = vec![Transaction { - hash: H256::zero(), - block_hash: Some(H256::from_slice(ptr.hash.as_slice())), - block_number: Some(ptr.number.into()), - transaction_index: Some(0.into()), - from: Some(H160::zero()), - to: Some(H160::zero()), - ..Default::default() - }]; + let dummy_txn = + create_dummy_transaction(ptr.number as u64, ptr.hash.as_b256(), Some(0), B256::ZERO); + let transactions = BlockTransactions::Full(vec![dummy_txn]); + let alloy_block = create_minimal_block_for_test(ptr.number as u64, ptr.hash.as_b256()) + .map_header(|mut header| { + // Ensure the parent hash matches the given parent_ptr so that parent_ptr() lookups succeed + header.inner.parent_hash = parent_ptr.hash.as_b256(); + header + }) + .with_transactions(transactions); BlockWithTriggers:: { - block: BlockFinality::Final(Arc::new(LightEthereumBlock { - hash: Some(H256::from_slice(ptr.hash.as_slice())), - number: Some(U64::from(ptr.number)), - parent_hash: H256::from_slice(parent_ptr.hash.as_slice()), - transactions, - ..Default::default() - })), + block: BlockFinality::Final(Arc::new(LightEthereumBlock::new(alloy_block.into()))), trigger_data: vec![Trigger::Chain(EthereumTrigger::Block( ptr, EthereumBlockTriggerType::End, @@ -140,19 +137,25 @@ pub fn empty_block(parent_ptr: BlockPtr, ptr: BlockPtr) -> BlockWithTriggers, payload: impl Into) { + use graph::prelude::alloy::{self, primitives::LogData, rpc::types::Log}; + let log = Arc::new(Log { - address: Address::zero(), - topics: vec![tiny_keccak::keccak256(b"TestEvent(string)").into()], - data: ethabi::encode(&[ethabi::Token::String(payload.into())]).into(), - block_hash: Some(H256::from_slice(block.ptr().hash.as_slice())), - block_number: Some(block.ptr().number.into()), - transaction_hash: Some(H256::from_low_u64_be(0)), - transaction_index: Some(0.into()), - log_index: Some(0.into()), - transaction_log_index: Some(0.into()), - log_type: None, - removed: None, + inner: alloy::primitives::Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![tiny_keccak::keccak256(b"TestEvent(string)").into()], + abi::DynSolValue::String(payload.into()).abi_encode().into(), + ), + }, + block_hash: Some(B256::from_slice(block.ptr().hash.as_slice())), + block_number: Some(block.ptr().number as u64), + transaction_hash: Some(B256::from(U256::from(0))), + transaction_index: Some(0), + log_index: Some(0), + block_timestamp: None, + removed: false, }); + block .trigger_data .push(Trigger::Chain(EthereumTrigger::Log(LogRef::FullLog( @@ -190,23 +193,30 @@ pub fn push_test_command( test_command: impl Into, data: impl Into, ) { + use graph::prelude::alloy::{self, primitives::LogData, rpc::types::Log}; + let log = Arc::new(Log { - address: Address::zero(), - topics: vec![tiny_keccak::keccak256(b"TestEvent(string,string)").into()], - data: ethabi::encode(&[ - ethabi::Token::String(test_command.into()), - ethabi::Token::String(data.into()), - ]) - .into(), - block_hash: Some(H256::from_slice(block.ptr().hash.as_slice())), - block_number: Some(block.ptr().number.into()), - transaction_hash: Some(H256::from_low_u64_be(0)), - transaction_index: Some(0.into()), - log_index: Some(0.into()), - transaction_log_index: Some(0.into()), - log_type: None, - removed: None, + inner: alloy::primitives::Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![tiny_keccak::keccak256(b"TestEvent(string,string)").into()], + abi::DynSolValue::Tuple(vec![ + abi::DynSolValue::String(test_command.into()), + abi::DynSolValue::String(data.into()), + ]) + .abi_encode_params() + .into(), + ), + }, + block_hash: Some(block.ptr().hash.as_b256()), + block_number: Some(block.ptr().number as u64), + transaction_hash: Some(B256::from(U256::from(0))), + transaction_index: Some(0), + log_index: Some(0), + block_timestamp: None, + removed: false, }); + block .trigger_data .push(Trigger::Chain(EthereumTrigger::Log(LogRef::FullLog( diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index 62000dc5e8e..f3caaa636a6 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -38,7 +38,8 @@ use graph::http_body_util::Full; use graph::hyper::body::Bytes; use graph::hyper::Request; use graph::ipfs::{IpfsClient, IpfsMetrics}; -use graph::prelude::ethabi::ethereum_types::H256; +use graph::prelude::alloy::primitives::B256; +use graph::prelude::alloy::primitives::U256; use graph::prelude::serde_json::{self, json}; use graph::prelude::{ lazy_static, q, r, ApiVersion, BigInt, BlockNumber, DeploymentHash, GraphQlRunner as _, @@ -71,7 +72,7 @@ pub fn test_ptr(n: BlockNumber) -> BlockPtr { // Set n as the low bits and `reorg_n` as the high bits of the hash. pub fn test_ptr_reorged(n: BlockNumber, reorg_n: u32) -> BlockPtr { - let mut hash = H256::from_low_u64_be(n as u64); + let mut hash = B256::from(U256::from(n as u64)); hash[0..4].copy_from_slice(&reorg_n.to_be_bytes()); BlockPtr { hash: hash.into(), diff --git a/tests/tests/integration_tests.rs b/tests/tests/integration_tests.rs index 322eb643533..b5c7d3405ca 100644 --- a/tests/tests/integration_tests.rs +++ b/tests/tests/integration_tests.rs @@ -17,13 +17,13 @@ use anyhow::{anyhow, bail, Context, Result}; use graph::futures03::StreamExt; use graph::itertools::Itertools; use graph::prelude::serde_json::{json, Value}; -use graph::prelude::web3::types::U256; use graph_tests::contract::Contract; use graph_tests::subgraph::Subgraph; use graph_tests::{error, status, CONFIG}; use tokio::process::Child; use tokio::task::JoinError; use tokio::time::sleep; +use web3::types::U256; const SUBGRAPH_LAST_GRAFTING_BLOCK: i32 = 3; diff --git a/tests/tests/runner_tests.rs b/tests/tests/runner_tests.rs index 046a1476491..0b846b7c49a 100644 --- a/tests/tests/runner_tests.rs +++ b/tests/tests/runner_tests.rs @@ -15,8 +15,7 @@ use graph::data_source::CausalityRegion; use graph::env::{EnvVars, TEST_WITH_NO_REORG}; use graph::ipfs::test_utils::add_files_to_local_ipfs_node_for_testing; use graph::object; -use graph::prelude::ethabi::ethereum_types::H256; -use graph::prelude::web3::types::Address; +use graph::prelude::alloy::primitives::{Address, B256, U256}; use graph::prelude::{hex, CheapClone, SubgraphName, SubgraphStore}; use graph_tests::fixture::ethereum::{ chain, empty_block, generate_empty_blocks_for_range, genesis, push_test_command, push_test_log, @@ -67,7 +66,7 @@ async fn data_source_revert() -> anyhow::Result<()> { let block1 = empty_block(block0.ptr(), test_ptr(1)); let block1_reorged_ptr = BlockPtr { number: 1, - hash: H256::from_low_u64_be(12).into(), + hash: B256::from(U256::from(12)).into(), }; let block1_reorged = empty_block(block0.ptr(), block1_reorged_ptr.clone()); let block2 = empty_block(block1_reorged_ptr, test_ptr(2)); @@ -187,7 +186,7 @@ async fn typename() -> anyhow::Result<()> { let block_1 = empty_block(block_0.ptr(), test_ptr(1)); let block_1_reorged_ptr = BlockPtr { number: 1, - hash: H256::from_low_u64_be(12).into(), + hash: B256::from(U256::from(12)).into(), }; let block_1_reorged = empty_block(block_0.ptr(), block_1_reorged_ptr); let block_2 = empty_block(block_1_reorged.ptr(), test_ptr(2)); @@ -1015,7 +1014,7 @@ async fn retry_create_ds() { let block1 = empty_block(block0.ptr(), test_ptr(1)); let block1_reorged_ptr = BlockPtr { number: 1, - hash: H256::from_low_u64_be(12).into(), + hash: B256::from(U256::from(12)).into(), }; let block1_reorged = empty_block(block0.ptr(), block1_reorged_ptr); let block2 = empty_block(block1_reorged.ptr(), test_ptr(2)); From f7242f9fcfb5ae5ade0943831f2c03165bafb099 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Thu, 22 Jan 2026 12:34:00 +0400 Subject: [PATCH 2/2] fix clippy warnings --- chain/ethereum/src/codec.rs | 144 ++++++++++++---------- chain/ethereum/src/ethereum_adapter.rs | 2 +- chain/ethereum/src/tests.rs | 16 ++- graph/src/abi/event_ext.rs | 2 +- graph/src/abi/function_ext.rs | 12 +- graph/src/amp/sql/query_builder/parser.rs | 8 +- graph/src/components/ethereum/types.rs | 10 +- graph/src/data_source/common.rs | 19 +-- graph/src/util/test_utils.rs | 8 +- store/postgres/src/deployment.rs | 2 +- store/test-store/src/block_store.rs | 14 +-- 11 files changed, 130 insertions(+), 107 deletions(-) diff --git a/chain/ethereum/src/codec.rs b/chain/ethereum/src/codec.rs index 0041335f659..a3bfb75ad49 100644 --- a/chain/ethereum/src/codec.rs +++ b/chain/ethereum/src/codec.rs @@ -1,4 +1,5 @@ #[rustfmt::skip] +#[allow(clippy::doc_lazy_continuation, clippy::doc_overindented_list_items)] #[path = "protobuf/sf.ethereum.r#type.v2.rs"] mod pbcodec; @@ -844,6 +845,33 @@ impl BlockchainBlock for HeaderOnlyBlock { } } +fn extract_signature_from_trace( + _trace: &TransactionTrace, + _tx_type: TxType, +) -> Result { + use alloy::primitives::{Signature as PrimitiveSignature, U256}; + + // Create a dummy signature with r = 0, s = 0 and even y-parity (false) + let dummy = PrimitiveSignature::new(U256::ZERO, U256::ZERO, false); + + Ok(dummy) +} + +fn get_to_address(trace: &TransactionTrace) -> Result, Error> { + // Try to detect contract creation transactions, which have no 'to' address + let is_contract_creation = trace.to.is_empty() + || trace + .calls + .first() + .is_some_and(|call| CallType::try_from(call.call_type) == Ok(CallType::Create)); + + if is_contract_creation { + Ok(None) + } else { + Ok(Some(trace.to.try_decode_proto("transaction to address")?)) + } +} + #[cfg(test)] mod test { use graph::{blockchain::Block as _, prelude::chrono::Utc}; @@ -886,25 +914,31 @@ mod test { use graph::prelude::alloy::network::AnyTxEnvelope; use graph::prelude::alloy::primitives::B256; - let mut block = Block::default(); - let mut header = BlockHeader::default(); - header.number = 123456; - header.timestamp = Some(Timestamp { - seconds: 1234567890, - nanos: 0, - }); - block.header = Some(header); - block.number = 123456; - block.hash = vec![0u8; 32]; - - let mut trace = TransactionTrace::default(); - trace.r#type = 126; // 0x7e Optimism deposit transaction - trace.hash = vec![1u8; 32]; - trace.from = vec![2u8; 20]; - trace.to = vec![3u8; 20]; - trace.nonce = 42; - trace.gas_limit = 21000; - trace.index = 0; + let header = BlockHeader { + number: 123456, + timestamp: Some(Timestamp { + seconds: 1234567890, + nanos: 0, + }), + ..Default::default() + }; + let block = Block { + header: Some(header), + number: 123456, + hash: vec![0u8; 32], + ..Default::default() + }; + + let trace = TransactionTrace { + r#type: 126, // 0x7e Optimism deposit transaction + hash: vec![1u8; 32], + from: vec![2u8; 20], + to: vec![3u8; 20], + nonce: 42, + gas_limit: 21000, + index: 0, + ..Default::default() + }; let trace_at = TransactionTraceAt::new(&trace, &block); let result: Result< @@ -941,25 +975,32 @@ mod test { use super::transaction_trace_to_alloy_txn_reciept; use crate::codec::TransactionTrace; - let mut block = Block::default(); - let mut header = BlockHeader::default(); - header.number = 123456; - block.header = Some(header); - block.hash = vec![0u8; 32]; - - let mut trace = TransactionTrace::default(); - trace.r#type = 126; // 0x7e Optimism deposit transaction - trace.hash = vec![1u8; 32]; - trace.from = vec![2u8; 20]; - trace.to = vec![3u8; 20]; - trace.index = 0; - trace.gas_used = 21000; - trace.status = 1; - - let mut receipt = super::TransactionReceipt::default(); - receipt.cumulative_gas_used = 21000; - receipt.logs_bloom = vec![0u8; 256]; - trace.receipt = Some(receipt); + let header = BlockHeader { + number: 123456, + ..Default::default() + }; + let block = Block { + header: Some(header), + hash: vec![0u8; 32], + ..Default::default() + }; + + let receipt = super::TransactionReceipt { + cumulative_gas_used: 21000, + logs_bloom: vec![0u8; 256], + ..Default::default() + }; + let trace = TransactionTrace { + r#type: 126, // 0x7e Optimism deposit transaction + hash: vec![1u8; 32], + from: vec![2u8; 20], + to: vec![3u8; 20], + index: 0, + gas_used: 21000, + status: 1, + receipt: Some(receipt), + ..Default::default() + }; let result = transaction_trace_to_alloy_txn_reciept(&trace, &block); @@ -978,30 +1019,3 @@ mod test { assert_eq!(receipt.transaction_index, Some(0)); } } - -fn extract_signature_from_trace( - _trace: &TransactionTrace, - _tx_type: TxType, -) -> Result { - use alloy::primitives::{Signature as PrimitiveSignature, U256}; - - // Create a dummy signature with r = 0, s = 0 and even y-parity (false) - let dummy = PrimitiveSignature::new(U256::ZERO, U256::ZERO, false); - - Ok(dummy) -} - -fn get_to_address(trace: &TransactionTrace) -> Result, Error> { - // Try to detect contract creation transactions, which have no 'to' address - let is_contract_creation = trace.to.is_empty() - || trace - .calls - .first() - .is_some_and(|call| CallType::try_from(call.call_type) == Ok(CallType::Create)); - - if is_contract_creation { - Ok(None) - } else { - Ok(Some(trace.to.try_decode_proto("transaction to address")?)) - } -} diff --git a/chain/ethereum/src/ethereum_adapter.rs b/chain/ethereum/src/ethereum_adapter.rs index 49d5fc0d01e..500251121e8 100644 --- a/chain/ethereum/src/ethereum_adapter.rs +++ b/chain/ethereum/src/ethereum_adapter.rs @@ -1483,7 +1483,7 @@ impl EthereumAdapterTrait for EthereumAdapter { let encoded_call = call .function .abi_encode_input(&call.args) - .map_err(|err| ContractCallError::EncodingError(err))?; + .map_err(ContractCallError::EncodingError)?; call::Request::new(call.address, encoded_call, index) }; diff --git a/chain/ethereum/src/tests.rs b/chain/ethereum/src/tests.rs index ab5907a7c40..5fa236323a4 100644 --- a/chain/ethereum/src/tests.rs +++ b/chain/ethereum/src/tests.rs @@ -77,9 +77,11 @@ fn test_trigger_ordering() { }; let call1 = EthereumTrigger::Call(Arc::new(call1)); - let mut call2 = EthereumCall::default(); - call2.transaction_index = 2; - call2.input = Bytes::from(vec![0]); + let call2 = EthereumCall { + transaction_index: 2, + input: Bytes::from(vec![0]), + ..Default::default() + }; let call2 = EthereumTrigger::Call(Arc::new(call2)); let call3 = EthereumCall { @@ -89,10 +91,12 @@ fn test_trigger_ordering() { let call3 = EthereumTrigger::Call(Arc::new(call3)); // Call with the same tx index as call2 - let mut call4 = EthereumCall::default(); - call4.transaction_index = 2; // different than call2 so they don't get mistaken as the same - call4.input = Bytes::from(vec![1]); + let call4 = EthereumCall { + transaction_index: 2, + input: Bytes::from(vec![1]), + ..Default::default() + }; let call4 = EthereumTrigger::Call(Arc::new(call4)); // Event with transaction_index 1 and log_index 0; diff --git a/graph/src/abi/event_ext.rs b/graph/src/abi/event_ext.rs index 94088dfcaae..383252a379f 100644 --- a/graph/src/abi/event_ext.rs +++ b/graph/src/abi/event_ext.rs @@ -14,7 +14,7 @@ pub trait EventExt { impl EventExt for Event { fn decode_log(&self, log: &Log) -> Result> { let log_data = log.data(); - let decoded_event = alloy::dyn_abi::EventExt::decode_log(self, &log_data)?; + let decoded_event = alloy::dyn_abi::EventExt::decode_log(self, log_data)?; let mut indexed: VecDeque = decoded_event.indexed.into(); let mut body: VecDeque = decoded_event.body.into(); diff --git a/graph/src/abi/function_ext.rs b/graph/src/abi/function_ext.rs index 3264dd10a35..52696beac23 100644 --- a/graph/src/abi/function_ext.rs +++ b/graph/src/abi/function_ext.rs @@ -52,12 +52,12 @@ impl FunctionExt for Function { let mut sig = String::with_capacity(sig_cap); - sig.push_str(&name); - signature_part(&inputs, &mut sig); + sig.push_str(name); + signature_part(inputs, &mut sig); if !outputs.is_empty() { sig.push(':'); - signature_part(&outputs, &mut sig); + signature_part(outputs, &mut sig); } sig @@ -122,12 +122,12 @@ fn signature_part(params: &[Param], out: &mut String) { 1 => { params[0].selector_type_raw(out); } - n => { + _ => { params[0].selector_type_raw(out); - for i in 1..n { + for param in params.iter().skip(1) { out.push(','); - params[i].selector_type_raw(out); + param.selector_type_raw(out); } } } diff --git a/graph/src/amp/sql/query_builder/parser.rs b/graph/src/amp/sql/query_builder/parser.rs index 1f965b955b6..2e40b0e53a3 100644 --- a/graph/src/amp/sql/query_builder/parser.rs +++ b/graph/src/amp/sql/query_builder/parser.rs @@ -42,15 +42,15 @@ struct AllowOnlySelectQueries; impl AllowOnlySelectQueries { /// Returns an error if the `set_expr` is not a `SELECT` expression. - fn visit_set_expr(&self, set_expr: &ast::SetExpr) -> Result<()> { + fn visit_set_expr(set_expr: &ast::SetExpr) -> Result<()> { match set_expr { ast::SetExpr::Select(_) | ast::SetExpr::Query(_) | ast::SetExpr::Values(_) | ast::SetExpr::Table(_) => Ok(()), ast::SetExpr::SetOperation { left, right, .. } => { - self.visit_set_expr(left)?; - self.visit_set_expr(right)?; + Self::visit_set_expr(left)?; + Self::visit_set_expr(right)?; Ok(()) } ast::SetExpr::Insert(_) | ast::SetExpr::Update(_) | ast::SetExpr::Delete(_) => { @@ -64,7 +64,7 @@ impl Visitor for AllowOnlySelectQueries { type Break = anyhow::Error; fn pre_visit_query(&mut self, query: &ast::Query) -> ControlFlow { - match self.visit_set_expr(&query.body) { + match Self::visit_set_expr(&query.body) { Ok(()) => ControlFlow::Continue(()), Err(e) => ControlFlow::Break(e), } diff --git a/graph/src/components/ethereum/types.rs b/graph/src/components/ethereum/types.rs index c9a4174bf0a..a6c21aacb8a 100644 --- a/graph/src/components/ethereum/types.rs +++ b/graph/src/components/ethereum/types.rs @@ -92,7 +92,7 @@ impl LightEthereumBlockExt for AnyBlock { log.transaction_hash.and_then(|hash| { self.transactions .txns() - .find(|tx| &tx.tx_hash() == &hash) + .find(|tx| tx.tx_hash() == hash) .cloned() }) } @@ -101,7 +101,7 @@ impl LightEthereumBlockExt for AnyBlock { call.transaction_hash.and_then(|hash| { self.transactions .txns() - .find(|tx| &tx.tx_hash() == &hash) + .find(|tx| tx.tx_hash() == hash) .cloned() }) } @@ -227,15 +227,15 @@ impl EthereumCall { // The only traces without transactions are those from Parity block reward contracts, we // don't support triggering on that. - let transaction_index = trace.transaction_position? as u64; + let transaction_index = trace.transaction_position?; Some(EthereumCall { from: call.from, to: call.to, value: call.value, - gas_used: gas_used, + gas_used, input: call.input.clone(), - output: output, + output, block_number: BlockNumber::try_from( trace .block_number diff --git a/graph/src/data_source/common.rs b/graph/src/data_source/common.rs index 8f38fa2e94d..bc9b27b5bd6 100644 --- a/graph/src/data_source/common.rs +++ b/graph/src/data_source/common.rs @@ -270,9 +270,11 @@ impl AbiJson { return Ok(Some(vec![])); } // Recursively resolve the nested path - return self - .resolve_field_path(components, nested_path) - .map(Some); + return Self::resolve_field_path( + components, + nested_path, + ) + .map(Some); } } } @@ -297,7 +299,6 @@ impl AbiJson { /// Supports both numeric indices and field names /// Returns the index path to access the final field fn resolve_field_path( - &self, components: &serde_json::Value, field_path: &[&str], ) -> Result, Error> { @@ -334,7 +335,7 @@ impl AbiJson { // Recursively resolve the remaining path let mut result = vec![index]; let nested_result = - self.resolve_field_path(nested_components, remaining_path)?; + Self::resolve_field_path(nested_components, remaining_path)?; result.extend(nested_result); return Ok(result); } else { @@ -374,8 +375,10 @@ impl AbiJson { if let Some(nested_components) = component.get("components") { // Recursively resolve the remaining path let mut result = vec![index]; - let nested_result = - self.resolve_field_path(nested_components, remaining_path)?; + let nested_result = Self::resolve_field_path( + nested_components, + remaining_path, + )?; result.extend(nested_result); return Ok(result); } else { @@ -1782,7 +1785,7 @@ mod tests { // Test scenario 1: Unknown parameter let inner_log = alloy::primitives::Log { address: Address::ZERO, - data: alloy::primitives::LogData::new_unchecked(vec![].into(), vec![].into()), + data: alloy::primitives::LogData::new_unchecked(vec![], vec![].into()), }; let log = Log { inner: inner_log, diff --git a/graph/src/util/test_utils.rs b/graph/src/util/test_utils.rs index 38b618560c4..d5240e21faa 100644 --- a/graph/src/util/test_utils.rs +++ b/graph/src/util/test_utils.rs @@ -9,8 +9,10 @@ use crate::prelude::alloy::rpc::types::{Block, Header}; /// Creates a minimal Alloy Block for testing purposes. pub fn create_minimal_block_for_test(block_number: u64, block_hash: B256) -> Block { // Create consensus header with defaults, but set the specific number - let mut consensus_header = ConsensusHeader::default(); - consensus_header.number = block_number; + let consensus_header = ConsensusHeader { + number: block_number, + ..Default::default() + }; // Create RPC header with the specific hash let rpc_header = Header { @@ -51,7 +53,7 @@ pub fn create_dummy_transaction( inner: recovered, block_hash: Some(block_hash), block_number: Some(block_number), - transaction_index: transaction_index, + transaction_index, effective_gas_price: None, } } diff --git a/store/postgres/src/deployment.rs b/store/postgres/src/deployment.rs index f3d4a9f1362..15e64321a99 100644 --- a/store/postgres/src/deployment.rs +++ b/store/postgres/src/deployment.rs @@ -966,7 +966,7 @@ pub async fn update_deployment_status( d::failed.eq(health.is_failed()), d::health.eq(health), d::fatal_error.eq::>(fatal_error), - d::non_fatal_errors.eq::>(non_fatal_errors.unwrap_or(vec![])), + d::non_fatal_errors.eq::>(non_fatal_errors.unwrap_or_default()), )) .execute(conn) .await diff --git a/store/test-store/src/block_store.rs b/store/test-store/src/block_store.rs index 2385574e527..77bacf0ae53 100644 --- a/store/test-store/src/block_store.rs +++ b/store/test-store/src/block_store.rs @@ -107,13 +107,13 @@ impl FakeBlock { let parent_hash = B256::from_str(self.parent_hash.as_str()).expect("invalid parent hash"); let block_hash = B256::from_str(self.hash.as_str()).expect("invalid block hash"); - let mut consensus_header = ConsensusHeader::default(); - consensus_header.number = self.number as u64; - consensus_header.parent_hash = parent_hash; - consensus_header.logs_bloom = Bloom::default(); // Empty bloom filter for test blocks - if let Some(ts) = self.timestamp { - consensus_header.timestamp = ts.to::(); - } + let consensus_header = ConsensusHeader { + number: self.number as u64, + parent_hash, + logs_bloom: Bloom::default(), // Empty bloom filter for test blocks + timestamp: self.timestamp.map(|ts| ts.to::()).unwrap_or_default(), + ..Default::default() + }; let rpc_header = Header { hash: block_hash,