From c3c1002903f9f083f62e91cdb60d70efe962fdcd Mon Sep 17 00:00:00 2001 From: T Date: Wed, 27 Nov 2024 18:07:52 +0800 Subject: [PATCH 01/47] chroe: update dependencies (#411) - Move the dependencies (including version) to the workspace `Cargo.toml`. - Update the available dependencies. As update `alloy` to `0.4` (`0.5` and `0.6` of alloy could not work with integration test). - Keep the current version of `ethereum-types` and `rlp`, since both is related with our forked `eth-trie.rs`. Integration test could work. --- Cargo.lock | 830 +++++++++++++++++++++++++++++------------------------ 1 file changed, 450 insertions(+), 380 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 969cad600..59b6f18fd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -74,9 +74,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy" @@ -101,11 +101,11 @@ dependencies = [ [[package]] name = "alloy-chains" -version = "0.1.47" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18c5c520273946ecf715c0010b4e3503d7eba9893cd9ce6b7fff5654c4a3c470" +checksum = "4ab9d1367c6ffb90c93fb4a9a4989530aa85112438c6f73a734067255d348469" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "num_enum", "strum", ] @@ -117,7 +117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae09ffd7c29062431dd86061deefe4e3c6f07fa0d674930095f8dcedb0baf02c" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rlp", "alloy-serde", "auto_impl", @@ -136,7 +136,7 @@ dependencies = [ "alloy-json-abi", "alloy-network", "alloy-network-primitives", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-provider", "alloy-rpc-types-eth", "alloy-sol-types", @@ -148,32 +148,32 @@ dependencies = [ [[package]] name = "alloy-core" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d14d531c99995de71558e8e2206c27d709559ee8e5a0452b965ea82405a013" +checksum = "648275bb59110f88cc5fa9a176845e52a554ebfebac2d21220bcda8c9220f797" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rlp", "alloy-sol-types", ] [[package]] name = "alloy-dyn-abi" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80759b3f57b3b20fa7cd8fef6479930fc95461b58ff8adea6e87e618449c8a1d" +checksum = "bc9138f4f0912793642d453523c3116bd5d9e11de73b70177aa7cb3e94b98ad2" dependencies = [ "alloy-json-abi", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-sol-type-parser", "alloy-sol-types", "const-hex", "itoa", "serde", "serde_json", - "winnow", + "winnow 0.6.26", ] [[package]] @@ -182,7 +182,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0069cf0642457f87a01a014f6dc29d5d893cd4fd8fddf0c3cdfad1bb3ebafc41" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rlp", "serde", ] @@ -193,7 +193,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c986539255fb839d1533c128e190e557e52ff652c9ef62939e233a81dd93f7e" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rlp", "derive_more 1.0.0", "serde", @@ -207,7 +207,7 @@ checksum = "5b6aa3961694b30ba53d41006131a2fca3bdab22e4c344e46db2c639e7c2dfdd" dependencies = [ "alloy-eip2930", "alloy-eip7702", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rlp", "alloy-serde", "c-kzg", @@ -223,18 +223,18 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e53f7877ded3921d18a0a9556d55bedf84535567198c9edab2aa23106da91855" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-serde", "serde", ] [[package]] name = "alloy-json-abi" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac4b22b3e51cac09fd2adfcc73b55f447b4df669f983c13f7894ec82b607c63f" +checksum = "24acd2f5ba97c7a320e67217274bc81fe3c3174b8e6144ec875d9d54e760e278" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-sol-type-parser", "serde", "serde_json", @@ -246,7 +246,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3694b7e480728c0b3e228384f223937f14c10caef5a4c766021190fc8f283d35" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-sol-types", "serde", "serde_json", @@ -264,7 +264,7 @@ dependencies = [ "alloy-eips", "alloy-json-rpc", "alloy-network-primitives", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rpc-types-eth", "alloy-serde", "alloy-signer", @@ -285,7 +285,7 @@ checksum = "df9f3e281005943944d15ee8491534a1c7b3cbf7a7de26f8c433b842b93eb5f9" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-serde", "serde", ] @@ -297,7 +297,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9805d126f24be459b958973c0569c73e1aadd27d4535eee82b2b6764aa03616" dependencies = [ "alloy-genesis", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "k256", "rand", "serde_json", @@ -326,9 +326,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9db948902dfbae96a73c2fbf1f7abec62af034ab883e4c777c3fd29702bd6e2c" +checksum = "ec878088ec6283ce1e90d280316aadd3d6ce3de06ff63d68953c855e7e447e92" dependencies = [ "alloy-rlp", "bytes", @@ -338,8 +338,7 @@ dependencies = [ "foldhash", "getrandom 0.2.15", "hashbrown 0.15.2", - "hex-literal", - "indexmap 2.6.0", + "indexmap 2.7.1", "itoa", "k256", "keccak-asm", @@ -367,7 +366,7 @@ dependencies = [ "alloy-network", "alloy-network-primitives", "alloy-node-bindings", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rpc-client", "alloy-rpc-types-anvil", "alloy-rpc-types-eth", @@ -384,7 +383,7 @@ dependencies = [ "lru", "parking_lot", "pin-project", - "reqwest 0.12.9", + "reqwest 0.12.12", "schnellru", "serde", "serde_json", @@ -397,9 +396,9 @@ dependencies = [ [[package]] name = "alloy-rlp" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0822426598f95e45dd1ea32a738dac057529a709ee645fcc516ffa4cbde08f" +checksum = "3d6c1d995bff8d011f7cd6c81820d51825e6e06d6db73914c1630ecf544d83d6" dependencies = [ "alloy-rlp-derive", "arrayvec 0.7.6", @@ -408,13 +407,13 @@ dependencies = [ [[package]] name = "alloy-rlp-derive" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" +checksum = "a40e1ef334153322fd878d07e86af7a529bcb86b2439525920a88eba87bcf943" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -424,12 +423,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "374dbe0dc3abdc2c964f36b3d3edf9cdb3db29d16bda34aa123f03d810bec1dd" dependencies = [ "alloy-json-rpc", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-transport", "alloy-transport-http", "futures", "pin-project", - "reqwest 0.12.9", + "reqwest 0.12.12", "serde", "serde_json", "tokio", @@ -446,7 +445,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c74832aa474b670309c20fffc2a869fa141edab7c79ff7963fad0a08de60bae1" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -458,7 +457,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ca97963132f78ddfc60e43a017348e6d52eea983925c23652f5b330e8e02291" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -473,7 +472,7 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-network-primitives", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-rlp", "alloy-serde", "alloy-sol-types", @@ -489,7 +488,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dfa4a7ccf15b2492bb68088692481fd6b2604ccbee1d0d6c44c21427ae4df83" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "serde", "serde_json", ] @@ -500,7 +499,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e10aec39d60dc27edcac447302c7803d2371946fb737245320a05b78eb2fafd" dependencies = [ - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "async-trait", "auto_impl", "elliptic-curve", @@ -516,7 +515,7 @@ checksum = "d8396f6dff60700bc1d215ee03d86ff56de268af96e2bf833a14d0bafcab9882" dependencies = [ "alloy-consensus", "alloy-network", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-signer", "async-trait", "k256", @@ -526,42 +525,42 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bfd7853b65a2b4f49629ec975fee274faf6dff15ab8894c620943398ef283c0" +checksum = "8d039d267aa5cbb7732fa6ce1fd9b5e9e29368f580f80ba9d7a8450c794de4b2" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82ec42f342d9a9261699f8078e57a7a4fda8aaa73c1a212ed3987080e6a9cd13" +checksum = "620ae5eee30ee7216a38027dec34e0585c55099f827f92f50d11e3d2d3a4a954" dependencies = [ "alloy-json-abi", "alloy-sol-macro-input", "const-hex", "heck 0.5.0", - "indexmap 2.6.0", + "indexmap 2.7.1", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2c50e6a62ee2b4f7ab3c6d0366e5770a21cad426e109c2f40335a1b3aff3df" +checksum = "ad9f7d057e00f8c5994e4ff4492b76532c51ead39353aa2ed63f8c50c0f4d52e" dependencies = [ "alloy-json-abi", "const-hex", @@ -570,28 +569,28 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.89", + "syn 2.0.96", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac17c6e89a50fb4a758012e4b409d9a0ba575228e69b539fe37d7a1bd507ca4a" +checksum = "74e60b084fe1aef8acecda2743ff2d93c18ff3eb67a2d3b12f62582a1e66ef5e" dependencies = [ "serde", - "winnow", + "winnow 0.6.26", ] [[package]] name = "alloy-sol-types" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9dc0fffe397aa17628160e16b89f704098bf3c9d74d5d369ebc239575936de5" +checksum = "c1382302752cd751efd275f4d6ef65877ddf61e0e6f5ac84ef4302b79a33a31a" dependencies = [ "alloy-json-abi", - "alloy-primitives 0.8.14", + "alloy-primitives 0.8.19", "alloy-sol-macro", "const-hex", "serde", @@ -625,7 +624,7 @@ checksum = "5dc013132e34eeadaa0add7e74164c1503988bfba8bae885b32e0918ba85a8a6" dependencies = [ "alloy-json-rpc", "alloy-transport", - "reqwest 0.12.9", + "reqwest 0.12.12", "serde_json", "tower", "tracing", @@ -707,19 +706,20 @@ dependencies = [ [[package]] name = "anstyle-wincon" -version = "3.0.6" +version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", + "once_cell", "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "ark-ff" @@ -891,18 +891,18 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "async-trait" -version = "0.1.83" +version = "0.1.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -929,13 +929,13 @@ dependencies = [ [[package]] name = "auto_impl" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" +checksum = "e12882f59de5360c748c4cbf569a042d5fb0eb515f7bea9c1f470b47f6ffbd73" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -1051,9 +1051,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.6.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "bitvec" @@ -1100,9 +1100,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "byte-slice-cast" @@ -1199,7 +1199,7 @@ checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037" dependencies = [ "camino", "cargo-platform", - "semver 1.0.23", + "semver 1.0.25", "serde", "serde_json", "thiserror 1.0.69", @@ -1207,9 +1207,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.2" +version = "1.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f34d93e62b03caf570cccc334cbc6c2fceca82f39211051345108adcba3eebdc" +checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" dependencies = [ "jobserver", "libc", @@ -1224,9 +1224,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -1247,9 +1247,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.21" +version = "4.5.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" +checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796" dependencies = [ "clap_builder", "clap_derive", @@ -1257,9 +1257,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.21" +version = "4.5.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" +checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7" dependencies = [ "anstream", "anstyle", @@ -1269,21 +1269,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "clap_lex" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "coins-bip32" @@ -1345,25 +1345,25 @@ checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "colored" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] name = "console" -version = "0.15.8" +version = "0.15.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" dependencies = [ "encode_unicode", - "lazy_static", "libc", - "unicode-width", - "windows-sys 0.52.0", + "once_cell", + "unicode-width 0.2.0", + "windows-sys 0.59.0", ] [[package]] @@ -1435,9 +1435,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] @@ -1453,9 +1453,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -1472,15 +1472,15 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" +checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" [[package]] name = "crypto-bigint" @@ -1571,7 +1571,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -1595,7 +1595,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -1606,7 +1606,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -1638,19 +1638,19 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" +checksum = "0e60eed09d8c01d3cee5b7d30acb059b76614c918fa0f992e0dd6eeb10daad6f" [[package]] name = "delegate" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc2323e10c92e1cf4d86e11538512e6dc03ceb586842970b6332af3d4046a046" +checksum = "297806318ef30ad066b15792a8372858020ae3ca2e414ee6c2133b1eb9e9e945" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -1694,7 +1694,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version 0.4.1", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -1714,7 +1714,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", "unicode-xid", ] @@ -1803,7 +1803,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -1893,9 +1893,9 @@ dependencies = [ [[package]] name = "encode_unicode" -version = "0.3.6" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "encoding_rs" @@ -1932,14 +1932,14 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "env_filter" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" dependencies = [ "log", "regex", @@ -1973,9 +1973,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" dependencies = [ "anstream", "anstyle", @@ -2001,7 +2001,7 @@ checksum = "d4291f0c7220b67ad15e9d5300ba2f215cee504f0924d60e77c9d1c77e7a69b1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -2212,7 +2212,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "syn 2.0.89", + "syn 2.0.96", "toml", "walkdir", ] @@ -2236,7 +2236,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "syn 2.0.89", + "syn 2.0.96", "toml", "walkdir", ] @@ -2253,7 +2253,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -2269,7 +2269,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -2294,7 +2294,7 @@ dependencies = [ "serde", "serde_json", "strum", - "syn 2.0.89", + "syn 2.0.96", "tempfile", "thiserror 1.0.69", "tiny-keccak", @@ -2324,7 +2324,7 @@ dependencies = [ "serde", "serde_json", "strum", - "syn 2.0.89", + "syn 2.0.96", "tempfile", "thiserror 1.0.69", "tiny-keccak", @@ -2339,7 +2339,7 @@ dependencies = [ "chrono", "ethers-core 2.0.13", "reqwest 0.11.27", - "semver 1.0.23", + "semver 1.0.25", "serde", "serde_json", "thiserror 1.0.69", @@ -2355,7 +2355,7 @@ dependencies = [ "chrono", "ethers-core 2.0.14", "reqwest 0.11.27", - "semver 1.0.23", + "semver 1.0.25", "serde", "serde_json", "thiserror 1.0.69", @@ -2542,7 +2542,7 @@ dependencies = [ "path-slash", "rayon", "regex", - "semver 1.0.23", + "semver 1.0.25", "serde", "serde_json", "solang-parser", @@ -2574,7 +2574,7 @@ dependencies = [ "path-slash", "rayon", "regex", - "semver 1.0.23", + "semver 1.0.25", "serde", "serde_json", "solang-parser", @@ -2605,9 +2605,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "fastrlp" @@ -2620,6 +2620,17 @@ dependencies = [ "bytes", ] +[[package]] +name = "fastrlp" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" +dependencies = [ + "arrayvec 0.7.6", + "auto_impl", + "bytes", +] + [[package]] name = "ff" version = "0.13.0" @@ -2700,9 +2711,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" [[package]] name = "foreign-types" @@ -2820,7 +2831,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -2922,6 +2933,18 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.13.3+wasi-0.2.2", + "windows-targets 0.52.6", +] + [[package]] name = "gimli" version = "0.31.1" @@ -2945,14 +2968,14 @@ checksum = "53010ccb100b96a67bc32c0175f0ed1426b31b655d562898e57325f81c023ac0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "gloo-timers" @@ -2991,7 +3014,7 @@ version = "0.1.0" dependencies = [ "alloy", "anyhow", - "env_logger 0.11.5", + "env_logger 0.11.6", "gnark-utils", "hex", "itertools 0.13.0", @@ -3033,7 +3056,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.6.0", + "indexmap 2.7.1", "slab", "tokio", "tokio-util", @@ -3144,11 +3167,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3164,9 +3187,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -3191,7 +3214,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -3202,16 +3225,16 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.9.5" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" +checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a" [[package]] name = "httpdate" @@ -3227,9 +3250,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.31" +version = "0.14.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" dependencies = [ "bytes", "futures-channel", @@ -3251,14 +3274,14 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "httparse", "itoa", @@ -3276,7 +3299,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.31", + "hyper 0.14.32", "rustls", "tokio", "tokio-rustls", @@ -3289,7 +3312,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.31", + "hyper 0.14.32", "native-tls", "tokio", "tokio-native-tls", @@ -3303,7 +3326,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.5.1", + "hyper 1.6.0", "hyper-util", "native-tls", "tokio", @@ -3320,9 +3343,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", - "hyper 1.5.1", + "hyper 1.6.0", "pin-project-lite", "socket2", "tokio", @@ -3468,7 +3491,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -3533,7 +3556,7 @@ checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -3555,9 +3578,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", "hashbrown 0.15.2", @@ -3601,19 +3624,19 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.10.1" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "is-terminal" -version = "0.4.13" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" +checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37" dependencies = [ "hermit-abi 0.4.0", "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3682,10 +3705,11 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.73" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb15147158e79fd8b8afd0252522769c4f48725460b37338544d8379d94fc8f9" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -3797,9 +3821,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.167" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libm" @@ -3813,15 +3837,15 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "libc", ] [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "litemap" @@ -3841,9 +3865,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.22" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" [[package]] name = "lru" @@ -3902,9 +3926,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.0" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" dependencies = [ "adler2", ] @@ -3957,7 +3981,7 @@ version = "0.1.0" dependencies = [ "alloy", "anyhow", - "env_logger 0.11.5", + "env_logger 0.11.6", "eth_trie", "log", "mp2_common", @@ -3980,7 +4004,7 @@ dependencies = [ "bincode", "csv", "derive_more 1.0.0", - "env_logger 0.11.5", + "env_logger 0.11.6", "envconfig", "eth_trie", "futures", @@ -4017,9 +4041,9 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +checksum = "0dab59f8e050d5df8e4dd87d9206fb6f65a483e20ac9fda365ade4fab353196c" dependencies = [ "libc", "log", @@ -4179,14 +4203,14 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "object" -version = "0.36.5" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] @@ -4224,11 +4248,11 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.68" +version = "0.10.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" +checksum = "f5e534d133a060a3c19daec1eb3e98ec6f4685978834f2dbadfe2ec215bab64e" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "cfg-if", "foreign-types", "libc", @@ -4245,14 +4269,14 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "openssl-probe" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" @@ -4298,34 +4322,33 @@ dependencies = [ "ansitok", "bytecount", "fnv", - "unicode-width", + "unicode-width 0.1.11", ] [[package]] name = "parity-scale-codec" -version = "3.7.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ "arrayvec 0.7.6", "bitvec", "byte-slice-cast", "impl-trait-for-tuples", "parity-scale-codec-derive", - "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.7.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 1.0.109", ] [[package]] @@ -4365,7 +4388,7 @@ dependencies = [ "serde_json", "sqlparser", "stderrlog", - "thiserror 2.0.3", + "thiserror 2.0.11", "verifiable-db", ] @@ -4431,12 +4454,12 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 1.0.69", + "thiserror 2.0.11", "ucd-trie", ] @@ -4447,7 +4470,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.6.0", + "indexmap 2.7.1", ] [[package]] @@ -4462,35 +4485,35 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ "phf_macros", - "phf_shared 0.11.2", + "phf_shared 0.11.3", ] [[package]] name = "phf_generator" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared 0.11.2", + "phf_shared 0.11.3", "rand", ] [[package]] name = "phf_macros" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ "phf_generator", - "phf_shared 0.11.2", + "phf_shared 0.11.3", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -4499,43 +4522,43 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "siphasher", + "siphasher 0.3.11", ] [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ - "siphasher", + "siphasher 1.0.1", ] [[package]] name = "pin-project" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +checksum = "1e2ec53ad785f4d35dac0adea7f7dc6f1bb277ad84a680c7afefeae05d1f5916" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "pin-project-lite" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -4719,13 +4742,13 @@ dependencies = [ "serde", "serde_json", "serde_plain", - "serde_with 3.11.0", + "serde_with 3.12.0", "sha2", "sha256", "starkyx", "tokio", "tracing", - "uuid 1.11.0", + "uuid 1.12.1", ] [[package]] @@ -4735,7 +4758,7 @@ source = "git+https://github.com/Lagrange-Labs/succinctx?branch=fix-build#8580a6 dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -4805,12 +4828,12 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "prettyplease" -version = "0.2.25" +version = "0.2.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" +checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac" dependencies = [ "proc-macro2", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -4889,14 +4912,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] @@ -4909,7 +4932,7 @@ checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.6.0", + "bitflags 2.8.0", "lazy_static", "num-traits", "rand", @@ -4929,9 +4952,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] @@ -5017,7 +5040,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", - "env_logger 0.11.5", + "env_logger 0.11.6", "log", "mp2_common", "plonky2", @@ -5029,11 +5052,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", ] [[package]] @@ -5111,7 +5134,7 @@ dependencies = [ "h2", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.31", + "hyper 0.14.32", "hyper-rustls", "hyper-tls 0.5.0", "ipnet", @@ -5143,18 +5166,18 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.9" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" dependencies = [ "base64 0.22.1", "bytes", "futures-core", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.1", + "hyper 1.6.0", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -5172,6 +5195,7 @@ dependencies = [ "sync_wrapper 1.0.2", "tokio", "tokio-native-tls", + "tower", "tower-service", "url", "wasm-bindgen", @@ -5224,7 +5248,7 @@ dependencies = [ "alloy-primitives 0.4.2", "alloy-rlp", "auto_impl", - "bitflags 2.6.0", + "bitflags 2.8.0", "bitvec", "enumn", "hashbrown 0.14.5", @@ -5354,22 +5378,24 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.89", + "syn 2.0.96", "unicode-ident", ] [[package]] name = "ruint" -version = "1.12.3" +version = "1.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c3cc4c2511671f327125da14133d0c5c5d137f006a1017a16f557bc85b16286" +checksum = "f5ef8fb1dd8de3870cb8400d51b4c2023854bbafd5431a3ac7e7317243e22d2f" dependencies = [ "alloy-rlp", "ark-ff 0.3.0", "ark-ff 0.4.2", "bytes", - "fastrlp", + "fastrlp 0.3.1", + "fastrlp 0.4.0", "num-bigint 0.4.6", + "num-integer", "num-traits", "parity-scale-codec", "postgres-types", @@ -5379,7 +5405,7 @@ dependencies = [ "rlp", "ruint-macro", "serde", - "thiserror 1.0.69", + "thiserror 2.0.11", "valuable", "zeroize", ] @@ -5398,9 +5424,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" [[package]] name = "rustc-hex" @@ -5423,20 +5449,20 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ - "semver 1.0.23", + "semver 1.0.25", ] [[package]] name = "rustix" -version = "0.38.41" +version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5471,9 +5497,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" [[package]] name = "rustls-webpki" @@ -5487,9 +5513,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "rusty-fork" @@ -5521,7 +5547,7 @@ dependencies = [ "serde_json", "sha256", "simple_logger", - "thiserror 2.0.3", + "thiserror 2.0.11", "tokio", "tokio-postgres", "tracing", @@ -5529,9 +5555,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "salsa20" @@ -5572,14 +5598,14 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "scc" -version = "2.2.5" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed" +checksum = "28e1c91382686d21b5ac7959341fcb9780fa7c03773646995a87c950fa7be640" dependencies = [ "sdd", ] @@ -5595,9 +5621,9 @@ dependencies = [ [[package]] name = "schnellru" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9a8ef13a93c54d20580de1e5c413e624e53121d42fc7e2c11d10ef7f8b02367" +checksum = "356285bbf17bea63d9e52e96bd18f039672ac92b55b8cb997d6162a2a37d1649" dependencies = [ "ahash", "cfg-if", @@ -5634,9 +5660,9 @@ dependencies = [ [[package]] name = "sdd" -version = "3.0.4" +version = "3.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49c1eeaf4b6a87c7479688c6d52b9f1153cedd3c489300564f932b065c6eab95" +checksum = "478f121bb72bbf63c52c93011ea1791dca40140dfe13f8336c4c5ac952c33aa9" [[package]] name = "sec1" @@ -5658,7 +5684,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "core-foundation", "core-foundation-sys", "libc", @@ -5667,9 +5693,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.1" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" dependencies = [ "core-foundation-sys", "libc", @@ -5686,9 +5712,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.23" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" dependencies = [ "serde", ] @@ -5716,29 +5742,29 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.215" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.215" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.138" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" dependencies = [ "itoa", "memchr", @@ -5794,19 +5820,19 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.11.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" +checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.6.0", + "indexmap 2.7.1", "serde", "serde_derive", "serde_json", - "serde_with_macros 3.11.0", + "serde_with_macros 3.12.0", "time", ] @@ -5819,19 +5845,19 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "serde_with_macros" -version = "3.11.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" +checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -5881,7 +5907,7 @@ checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -5981,13 +6007,13 @@ dependencies = [ [[package]] name = "simple_asn1" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" dependencies = [ "num-bigint 0.4.6", "num-traits", - "thiserror 1.0.69", + "thiserror 2.0.11", "time", ] @@ -6008,6 +6034,12 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + [[package]] name = "slab" version = "0.4.9" @@ -6171,7 +6203,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -6213,7 +6245,7 @@ dependencies = [ "hex", "once_cell", "reqwest 0.11.27", - "semver 1.0.23", + "semver 1.0.25", "serde", "serde_json", "sha2", @@ -6235,9 +6267,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.89" +version = "2.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" dependencies = [ "proc-macro2", "quote", @@ -6246,14 +6278,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0523f59468a2696391f2a772edc089342aacd53c3caa2ac3264e598edf119b" +checksum = "b84e4d83a0a6704561302b917a932484e1cae2d8c6354c64be8b7bac1c1fe057" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -6279,7 +6311,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -6336,12 +6368,13 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.14.0" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91" dependencies = [ "cfg-if", "fastrand", + "getrandom 0.3.1", "once_cell", "rustix", "windows-sys 0.59.0", @@ -6369,24 +6402,24 @@ dependencies = [ [[package]] name = "test-log" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dffced63c2b5c7be278154d76b479f9f9920ed34e7574201407f0b14e2bbb93" +checksum = "e7f46083d221181166e5b6f6b1e5f1d499f3a76888826e6cb1d057554157cd0f" dependencies = [ - "env_logger 0.11.5", + "env_logger 0.11.6", "test-log-macros", "tracing-subscriber", ] [[package]] name = "test-log-macros" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" +checksum = "888d0c3c6db53c0fdab160d2ed5e12ba745383d3e85813f2ea0f2b1475ab553f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -6409,11 +6442,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.3" +version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" dependencies = [ - "thiserror-impl 2.0.3", + "thiserror-impl 2.0.11", ] [[package]] @@ -6424,18 +6457,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] name = "thiserror-impl" -version = "2.0.3" +version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -6459,9 +6492,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -6480,9 +6513,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -6509,9 +6542,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" dependencies = [ "tinyvec_macros", ] @@ -6524,9 +6557,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.41.1" +version = "1.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" dependencies = [ "backtrace", "bytes", @@ -6542,13 +6575,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -6599,9 +6632,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -6626,9 +6659,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -6660,27 +6693,28 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.22" +version = "0.22.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.1", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.7.0", ] [[package]] name = "tower" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.2", + "tokio", "tower-layer", "tower-service", ] @@ -6716,7 +6750,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -6825,15 +6859,15 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-bidi" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" [[package]] name = "unicode-ident" -version = "1.0.14" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" [[package]] name = "unicode-normalization" @@ -6856,6 +6890,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -6931,18 +6971,18 @@ dependencies = [ [[package]] name = "uuid" -version = "1.11.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +checksum = "b3758f5e68192bb96cc8f9b7e2c2cfdabb435499a28499a42f8f984092adad4b" dependencies = [ "serde", ] [[package]] name = "valuable" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "vcpkg" @@ -7043,6 +7083,15 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasi" +version = "0.13.3+wasi-0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +dependencies = [ + "wit-bindgen-rt", +] + [[package]] name = "wasite" version = "0.1.0" @@ -7051,35 +7100,35 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.96" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21d3b25c3ea1126a2ad5f4f9068483c2af1e64168f847abe863a526b8dbfe00b" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", "once_cell", + "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.96" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52857d4c32e496dc6537646b5b117081e71fd2ff06de792e3577a150627db283" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.46" +version = "0.4.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "951fe82312ed48443ac78b66fa43eded9999f738f6022e67aead7b708659e49a" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" dependencies = [ "cfg-if", "js-sys", @@ -7090,9 +7139,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.96" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "920b0ffe069571ebbfc9ddc0b36ba305ef65577c94b06262ed793716a1afd981" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -7100,22 +7149,25 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.96" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf59002391099644be3524e23b781fa43d2be0c5aa0719a18c0731b9d195cab6" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.96" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5047c5392700766601942795a436d7d2599af60dcc3cc1248c9120bfb0827b0" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "wasmtimer" @@ -7133,9 +7185,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.73" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "476364ff87d0ae6bfb661053a9104ab312542658c3d8f963b7ace80b6f9b26b9" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" dependencies = [ "js-sys", "wasm-bindgen", @@ -7388,9 +7440,18 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.20" +version = "0.6.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" +checksum = "1e90edd2ac1aa278a5c4599b1d89cf03074b610800f866d4026dc199d7929a28" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e49d2d35d3fad69b39b94139037ecfb4f359f08958b9c11e7315ce770462419" dependencies = [ "memchr", ] @@ -7405,6 +7466,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "wit-bindgen-rt" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +dependencies = [ + "bitflags 2.8.0", +] + [[package]] name = "write16" version = "1.0.0" @@ -7471,7 +7541,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", "synstructure", ] @@ -7493,7 +7563,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -7513,7 +7583,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", "synstructure", ] @@ -7534,7 +7604,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] @@ -7556,7 +7626,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.96", ] [[package]] From 195946f42aabe1a02a14d98de19cbc1a7f2548f9 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 17 Oct 2024 22:49:12 +0200 Subject: [PATCH 02/47] test with receipts encoding --- mp2-common/src/eth.rs | 386 ++++++++++++++++++++++-- mp2-v1/src/block_extraction/circuit.rs | 2 +- mp2-v1/src/block_extraction/mod.rs | 2 +- mp2-v1/tests/common/block_extraction.rs | 2 +- 4 files changed, 356 insertions(+), 36 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index eac6413e4..986134b02 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -1,20 +1,27 @@ //! Module containing several structure definitions for Ethereum related operations //! such as fetching blocks, transactions, creating MPTs, getting proofs, etc. use alloy::{ + consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom}, eips::BlockNumberOrTag, + network::{eip2718::Encodable2718, TransactionResponse}, primitives::{Address, B256, U256}, providers::{Provider, RootProvider}, rlp::Encodable as AlloyEncodable, - rpc::types::{Block, EIP1186AccountProofResponse}, - transports::Transport, + rpc::types::{ + Block, BlockTransactions, EIP1186AccountProofResponse, ReceiptEnvelope, Transaction, + }, + transports::{ + http::{Client, Http}, + Transport, + }, }; -use anyhow::{bail, Result}; +use anyhow::{bail, Context, Result}; use eth_trie::{EthTrie, MemoryDB, Trie}; use ethereum_types::H256; use itertools::Itertools; use log::debug; use log::warn; -use rlp::Rlp; +use rlp::{Encodable, Rlp}; use serde::{Deserialize, Serialize}; use std::{array::from_fn as create_array, sync::Arc}; @@ -23,7 +30,7 @@ use crate::{mpt_sequential::utils::bytes_to_nibbles, rlp::MAX_KEY_NIBBLE_LEN, ut /// Retry number for the RPC request const RETRY_NUM: usize = 3; -pub trait BlockUtil { +pub trait Rlpable { fn block_hash(&self) -> Vec { keccak256(&self.rlp()) } @@ -358,7 +365,7 @@ impl ProofQuery { } } -impl BlockUtil for alloy::rpc::types::Block { +impl Rlpable for alloy::rpc::types::Block { fn rlp(&self) -> Vec { let mut out = Vec::new(); self.header.encode(&mut out); @@ -366,7 +373,13 @@ impl BlockUtil for alloy::rpc::types::Block { } } -impl BlockUtil for alloy::rpc::types::Header { +impl Rlpable for alloy::rpc::types::Header { + fn rlp(&self) -> Vec { + self.inner.rlp() + } +} + +impl Rlpable for alloy::consensus::Header { fn rlp(&self) -> Vec { let mut out = Vec::new(); self.encode(&mut out); @@ -374,6 +387,265 @@ impl BlockUtil for alloy::rpc::types::Header { } } +pub struct BlockUtil { + pub block: Block, + pub txs: Vec, + pub receipts_trie: EthTrie, +} + +pub struct TxWithReceipt(Transaction, ReceiptEnvelope); +impl TxWithReceipt { + pub fn receipt(&self) -> ReceiptEnvelope { + self.1.clone() + } +} + +impl BlockUtil { + pub async fn fetch(t: RootProvider>, id: BlockNumberOrTag) -> Result { + let block = t + .get_block(id.into(), alloy::rpc::types::BlockTransactionsKind::Full) + .await? + .context("can't get block")?; + let receipts = t + .get_block_receipts(id.into()) + .await? + .context("can't get receipts")?; + let BlockTransactions::Full(all_tx) = block.transactions.clone() else { + bail!("can't see full transactions"); + }; + let tx_receipts: Vec<(_, _)> = receipts + .into_iter() + .map(|receipt| { + ( + all_tx + .iter() + .find(|tx| tx.tx_hash() == receipt.transaction_hash) + .expect("no tx with receipt hash") + .clone(), + receipt, + ) + }) + .collect(); + // check receipt root + let memdb = Arc::new(MemoryDB::new(true)); + let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); + let consensus_receipts = tx_receipts + .into_iter() + .map(|tr| { + let receipt = tr.1; + let tx_index = receipt.transaction_index.unwrap().rlp_bytes(); + //let mut buff = Vec::new(); + let receipt_primitive = receipt.inner.clone(); + let receipt_primitive = match receipt_primitive { + CRE::Legacy(ref r) => CRE::Legacy(from_rpc_logs_to_consensus(&r)), + CRE::Eip2930(ref r) => CRE::Eip2930(from_rpc_logs_to_consensus(&r)), + CRE::Eip1559(ref r) => CRE::Eip1559(from_rpc_logs_to_consensus(&r)), + CRE::Eip4844(ref r) => CRE::Eip4844(from_rpc_logs_to_consensus(&r)), + CRE::Eip7702(ref r) => CRE::Eip7702(from_rpc_logs_to_consensus(&r)), + _ => panic!("aie"), + }; + let body_rlp = receipt_primitive.encoded_2718(); + + receipts_trie + .insert(&tx_index, &body_rlp) + .expect("can't insert tx"); + TxWithReceipt(tr.0, receipt_primitive) + }) + .collect::>(); + Ok(BlockUtil { + block, + txs: consensus_receipts, + receipts_trie, + }) + } + + // recompute the receipts trie by first converting all receipts form RPC type to consensus type + // since in Alloy these are two different types and RLP functions are only implemented for + // consensus ones. + // TODO: transaction trie + fn check(&mut self) -> Result<()> { + let computed = self.receipts_trie.root_hash().expect("root hash problem"); + let expected = self.block.header.receipts_root; + assert_eq!(expected.to_vec(), computed.0.to_vec()); + Ok(()) + } +} + +fn from_rpc_logs_to_consensus( + r: &ReceiptWithBloom, +) -> ReceiptWithBloom { + ReceiptWithBloom { + logs_bloom: r.logs_bloom, + receipt: alloy::consensus::Receipt { + status: r.receipt.status, + cumulative_gas_used: r.receipt.cumulative_gas_used, + logs: r + .receipt + .logs + .iter() + .map(|l| alloy::primitives::Log { + address: l.inner.address, + data: l.inner.data.clone(), + }) + .collect(), + }, + } +} + +// for compatibility check with alloy +#[cfg(test)] +mod tryethers { + + use std::sync::Arc; + + use anyhow::Result; + use eth_trie::{EthTrie, MemoryDB, Trie}; + use ethers::{ + providers::{Http, Middleware, Provider}, + types::{ + Address, Block, BlockId, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, + H256, U64, + }, + }; + use rlp::{Encodable, Rlp, RlpStream}; + + /// A wrapper around a transaction and its receipt. The receipt is used to filter + /// bad transactions, so we only compute over valid transactions. + pub struct TxAndReceipt(Transaction, TransactionReceipt); + + impl TxAndReceipt { + pub fn tx(&self) -> &Transaction { + &self.0 + } + pub fn receipt(&self) -> &TransactionReceipt { + &self.1 + } + pub fn tx_rlp(&self) -> Bytes { + self.0.rlp() + } + // TODO: this should be upstreamed to ethers-rs + pub fn receipt_rlp(&self) -> Bytes { + let tx_type = self.tx().transaction_type; + let mut rlp = RlpStream::new(); + rlp.begin_unbounded_list(); + match &self.1.status { + Some(s) if s.as_u32() == 1 => rlp.append(s), + _ => rlp.append_empty_data(), + }; + rlp.append(&self.1.cumulative_gas_used) + .append(&self.1.logs_bloom) + .append_list(&self.1.logs); + + rlp.finalize_unbounded_list(); + let rlp_bytes: Bytes = rlp.out().freeze().into(); + let mut encoded = vec![]; + match tx_type { + // EIP-2930 (0x01) + Some(x) if x == U64::from(0x1) => { + encoded.extend_from_slice(&[0x1]); + encoded.extend_from_slice(rlp_bytes.as_ref()); + encoded.into() + } + // EIP-1559 (0x02) + Some(x) if x == U64::from(0x2) => { + encoded.extend_from_slice(&[0x2]); + encoded.extend_from_slice(rlp_bytes.as_ref()); + encoded.into() + } + _ => rlp_bytes, + } + } + } + /// Structure containing the block header and its transactions / receipts. Amongst other things, + /// it is used to create a proof of inclusion for any transaction inside this block. + pub struct BlockData { + pub block: ethers::types::Block, + pub txs: Vec, + // TODO: add generics later - this may be re-used amongst different workers + pub tx_trie: EthTrie, + pub receipts_trie: EthTrie, + } + + impl BlockData { + pub async fn fetch + Send + Sync>( + blockid: T, + url: String, + ) -> Result { + let provider = + Provider::::try_from(url).expect("could not instantiate HTTP Provider"); + Self::fetch_from(&provider, blockid).await + } + pub async fn fetch_from + Send + Sync>( + provider: &Provider, + blockid: T, + ) -> Result { + let block = provider + .get_block_with_txs(blockid) + .await? + .expect("should have been a block"); + let receipts = provider.get_block_receipts(block.number.unwrap()).await?; + + let tx_with_receipt = block + .transactions + .clone() + .into_iter() + .map(|tx| { + let tx_hash = tx.hash(); + let r = receipts + .iter() + .find(|r| r.transaction_hash == tx_hash) + .expect("RPC sending invalid data"); + // TODO remove cloning + TxAndReceipt(tx, r.clone()) + }) + .collect::>(); + + // check transaction root + let memdb = Arc::new(MemoryDB::new(true)); + let mut tx_trie = EthTrie::new(Arc::clone(&memdb)); + for tr in tx_with_receipt.iter() { + tx_trie + .insert(&tr.receipt().transaction_index.rlp_bytes(), &tr.tx().rlp()) + .expect("can't insert tx"); + } + + // check receipt root + let memdb = Arc::new(MemoryDB::new(true)); + let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); + for tr in tx_with_receipt.iter() { + if tr.tx().transaction_index.unwrap() == U64::from(0) { + println!( + "Ethers: Index {} -> {}", + tr.tx().transaction_index.unwrap(), + hex::encode(tr.receipt_rlp()) + ); + } + receipts_trie + .insert( + &tr.receipt().transaction_index.rlp_bytes(), + // TODO: make getter value for rlp encoding + &tr.receipt_rlp(), + ) + .expect("can't insert tx"); + } + let computed = tx_trie.root_hash().expect("root hash problem"); + let expected = block.transactions_root; + assert_eq!(expected, computed); + + let computed = receipts_trie.root_hash().expect("root hash problem"); + let expected = block.receipts_root; + assert_eq!(expected, computed); + + Ok(BlockData { + block, + tx_trie, + receipts_trie, + txs: tx_with_receipt, + }) + } + } +} + #[cfg(test)] mod test { #[cfg(feature = "ci")] @@ -388,39 +660,87 @@ mod test { }; use hashbrown::HashMap; - use crate::{ - types::MAX_BLOCK_LEN, - utils::{Endianness, Packer}, - }; + use crate::utils::{Endianness, Packer}; use mp2_test::eth::{get_mainnet_url, get_sepolia_url}; + use super::*; + #[tokio::test] - #[ignore] - async fn test_rlp_andrus() -> Result<()> { + async fn test_block_receipt_trie() -> Result<()> { let url = get_sepolia_url(); - let block_number1 = 5674446; - let block_number2 = block_number1 + 1; + // get some tx and receipt let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let block = provider - .get_block(BlockNumberOrTag::Number(block_number1).into(), false.into()) - .await? - .unwrap(); - let comp_hash = keccak256(&block.rlp()); - let block_next = provider - .get_block(BlockNumberOrTag::from(block_number2).into(), false.into()) - .await? - .unwrap(); - let exp_hash = block_next.header.parent_hash; - assert!(comp_hash == exp_hash.as_slice()); - assert!( - block.rlp().len() <= MAX_BLOCK_LEN, - " rlp len = {}", - block.rlp().len() + let bn = 6893107; + let bna = BlockNumberOrTag::Number(bn); + let mut block = BlockUtil::fetch(provider, bna).await?; + // check if we compute the RLP correctly now + block.check()?; + let mut be = tryethers::BlockData::fetch(bn, url).await?; + let er = be.receipts_trie.root_hash()?; + let ar = block.receipts_trie.root_hash()?; + assert_eq!(er, ar); + // dissect one receipt entry in the trie + let tx_receipt = block.txs.first().clone().unwrap(); + // https://sepolia.etherscan.io/tx/0x9bef12fafd3962b0e0d66b738445d6ea2c1f3daabe10c889bd1916acc75d698b#eventlog + println!( + "Looking at tx hash on sepolia: {}", + hex::encode(tx_receipt.0.tx_hash()) ); + // in the MPT trie it's + // RLP ( RLP(Index), RLP ( LOGS )) + // the second component is done like that: + // + let rlp_encoding = tx_receipt.receipt().encoded_2718(); + let state = rlp::Rlp::new(&rlp_encoding); + assert!(state.is_list()); + // index 0 -> status, + // index 1 -> gas used + // index 2 -> logs_bloom + // index 3 -> logs + let logs_state = state.at(3).context("can't access logs field3")?; + assert!(logs_state.is_list()); + // there should be only one log for this tx + let log_state = logs_state.at(0).context("can't access first log")?; + assert!(log_state.is_list()); + // log: + // 0: address where it has been emitted + // 1: Topics (4 topics max, with 1 mandatory, the event sig) + // 2: Bytes32 array + let log_address: Vec = log_state.val_at(0).context("can't decode address")?; + let hex_address = hex::encode(&log_address); + assert_eq!( + hex_address, + "BBd3EDd4D3b519c0d14965d9311185CFaC8c3220".to_lowercase(), + ); + let topics: Vec> = log_state.list_at(1).context("can't decode topics")?; + // Approval (index_topic_1 address owner, index_topic_2 address approved, index_topic_3 uint256 tokenId)View Source + // first topic is signature of the event keccak(fn_name,args...) + let expected_sig = "8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925"; + let found_sig = hex::encode(&topics[0]); + assert_eq!(expected_sig, found_sig); + // second topic is owner + let expected_owner = hex::encode(left_pad32(&hex::decode( + "66d2F437a12d8f9f340C226b1EDC605124e763A6", + )?)); + let found_owner = hex::encode(&topics[1]); + assert_eq!(expected_owner, found_owner); + // third topic is approved + let expected_approved = hex::encode(left_pad32(&hex::decode( + "094f1570A8B5fc99d6756aD54DF0Fd6906795cd3", + )?)); + let found_approved = hex::encode(left_pad32(&topics[2])); + assert_eq!(expected_approved, found_approved); + // final is tokenid - not in topic + let expected_data = "000000000000000000000000000000000000000000115eec47f6cf7e35000000"; + let log_data: Vec = log_state.val_at(2).context("can't decode log data")?; + let found_data = hex::encode(&left_pad32( + &log_data.into_iter().take(32).collect::>(), + )); + assert_eq!(expected_data, found_data); + Ok(()) } - use super::*; #[tokio::test] async fn test_sepolia_slot() -> Result<()> { #[cfg(feature = "ci")] @@ -610,7 +930,7 @@ mod test { let previous_block = provider .get_block_by_number( BlockNumberOrTag::Number(block.header.number - 1), - true.into(), + alloy::rpc::types::BlockTransactionsKind::Full, ) .await? .unwrap(); @@ -673,7 +993,7 @@ mod test { } /// TEST to compare alloy with ethers pub struct RLPBlock<'a, X>(pub &'a ethers::types::Block); - impl BlockUtil for ethers::types::Block { + impl Rlpable for ethers::types::Block { fn rlp(&self) -> Vec { let rlp = RLPBlock(self); rlp::encode(&rlp).to_vec() diff --git a/mp2-v1/src/block_extraction/circuit.rs b/mp2-v1/src/block_extraction/circuit.rs index 0600285a8..ceb6df077 100644 --- a/mp2-v1/src/block_extraction/circuit.rs +++ b/mp2-v1/src/block_extraction/circuit.rs @@ -131,7 +131,7 @@ mod test { use mp2_common::{eth::left_pad_generic, u256, utils::ToFields, C, F}; use mp2_common::{ - eth::BlockUtil, + eth::Rlpable, types::CBuilder, utils::{Endianness, Packer}, D, diff --git a/mp2-v1/src/block_extraction/mod.rs b/mp2-v1/src/block_extraction/mod.rs index de6648f41..261cf95d1 100644 --- a/mp2-v1/src/block_extraction/mod.rs +++ b/mp2-v1/src/block_extraction/mod.rs @@ -69,7 +69,7 @@ mod test { }; use anyhow::Result; use mp2_common::{ - eth::BlockUtil, + eth::Rlpable, proof::deserialize_proof, utils::{Endianness, FromFields, Packer, ToFields}, C, D, F, diff --git a/mp2-v1/tests/common/block_extraction.rs b/mp2-v1/tests/common/block_extraction.rs index 1bda85eba..933823e56 100644 --- a/mp2-v1/tests/common/block_extraction.rs +++ b/mp2-v1/tests/common/block_extraction.rs @@ -1,7 +1,7 @@ use alloy::primitives::U256; use anyhow::Result; use mp2_common::{ - eth::BlockUtil, + eth::{left_pad_generic, BlockUtil, Rlpable}, proof::deserialize_proof, utils::{Endianness, Packer, ToFields}, C, D, F, From 129984fdf706cf1907e76fba0b00537b762c432e Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Fri, 18 Oct 2024 16:28:08 +0200 Subject: [PATCH 03/47] wip --- mp2-common/src/eth.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 986134b02..bf9de0c62 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -687,9 +687,13 @@ mod test { hex::encode(tx_receipt.0.tx_hash()) ); // in the MPT trie it's - // RLP ( RLP(Index), RLP ( LOGS )) + // RLP ( RLP(Index), RLP ( DATA )) // the second component is done like that: - // + // DATA = RLP [ Rlp(status), Rlp(gas_used), Rlp(logs_bloom), Rlp(logs) ] + // it contains multiple logs so + // logs = RLP_LIST(RLP(logs[0]), RLP(logs[1])...) + // Each RLP(logs[0]) = RLP([ RLP(Address), RLP(topics), RLP(data)]) + // RLP(topics) is a list with up to 4 topics let rlp_encoding = tx_receipt.receipt().encoded_2718(); let state = rlp::Rlp::new(&rlp_encoding); assert!(state.is_list()); @@ -699,8 +703,7 @@ mod test { // index 3 -> logs let logs_state = state.at(3).context("can't access logs field3")?; assert!(logs_state.is_list()); - // there should be only one log for this tx - let log_state = logs_state.at(0).context("can't access first log")?; + let log_state = logs_state.at(0).context("can't access single log state")?; assert!(log_state.is_list()); // log: // 0: address where it has been emitted @@ -712,6 +715,7 @@ mod test { hex_address, "BBd3EDd4D3b519c0d14965d9311185CFaC8c3220".to_lowercase(), ); + // the topics are in a list let topics: Vec> = log_state.list_at(1).context("can't decode topics")?; // Approval (index_topic_1 address owner, index_topic_2 address approved, index_topic_3 uint256 tokenId)View Source // first topic is signature of the event keccak(fn_name,args...) From 485dcb9b61cfaafdcfb73343cac3c69892940b20 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Mon, 21 Oct 2024 21:29:05 +0200 Subject: [PATCH 04/47] further testing --- mp2-common/src/eth.rs | 65 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 61 insertions(+), 4 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index bf9de0c62..a75791de0 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -446,6 +446,11 @@ impl BlockUtil { }; let body_rlp = receipt_primitive.encoded_2718(); + println!( + "TX index {} RLP encoded: {:?}", + receipt.transaction_index.unwrap(), + tx_index.to_vec() + ); receipts_trie .insert(&tx_index, &body_rlp) .expect("can't insert tx"); @@ -615,9 +620,9 @@ mod tryethers { for tr in tx_with_receipt.iter() { if tr.tx().transaction_index.unwrap() == U64::from(0) { println!( - "Ethers: Index {} -> {}", + "Ethers: Index {} -> {:?}", tr.tx().transaction_index.unwrap(), - hex::encode(tr.receipt_rlp()) + tr.receipt_rlp().to_vec() ); } receipts_trie @@ -652,7 +657,8 @@ mod test { use std::env; use std::str::FromStr; - use alloy::{primitives::Bytes, providers::ProviderBuilder}; + use alloy::{primitives::Bytes, providers::ProviderBuilder, rpc::types::BlockTransactionsKind}; + use eth_trie::Nibbles; use ethereum_types::U64; use ethers::{ providers::{Http, Middleware}, @@ -660,7 +666,11 @@ mod test { }; use hashbrown::HashMap; - use crate::utils::{Endianness, Packer}; + use crate::{ + mpt_sequential::utils::nibbles_to_bytes, + types::MAX_BLOCK_LEN, + utils::{Endianness, Packer}, + }; use mp2_test::eth::{get_mainnet_url, get_sepolia_url}; use super::*; @@ -695,14 +705,26 @@ mod test { // Each RLP(logs[0]) = RLP([ RLP(Address), RLP(topics), RLP(data)]) // RLP(topics) is a list with up to 4 topics let rlp_encoding = tx_receipt.receipt().encoded_2718(); + println!( + "Size of RLP encoded receipt in bytes: {}", + rlp_encoding.len() + ); let state = rlp::Rlp::new(&rlp_encoding); assert!(state.is_list()); // index 0 -> status, // index 1 -> gas used // index 2 -> logs_bloom // index 3 -> logs + let gas_used: Vec = state.val_at(1).context("can't access gas used")?; + println!("gas used byte length: {}", gas_used.len()); + let bloom: Vec = state.val_at(2).context("can't access bloom")?; + println!("bloom byte length: {}", bloom.len()); + //let logs: Vec> = state.list_at(3).context("can't access logs")?; + //println!("logs byte length: {}", logs.len()); + let logs_state = state.at(3).context("can't access logs field3")?; assert!(logs_state.is_list()); + println!("logs in hex: {}", hex::encode(logs_state.data()?)); let log_state = logs_state.at(0).context("can't access single log state")?; assert!(log_state.is_list()); // log: @@ -742,6 +764,41 @@ mod test { )); assert_eq!(expected_data, found_data); + let mpt_key = tx_receipt.0.transaction_index.unwrap(); + let proof = block + .receipts_trie + .get_proof(&mpt_key.rlp_bytes()) + .expect("can't retrieve mpt proof"); + let mpt_node = proof.last().unwrap(); + println!("MPT LEAF NODE: {:?}", mpt_node); + // First decode the top level header + let top_header = rlp::Rlp::new(mpt_node); + assert!(top_header.is_list()); + // then extract the buffer containing all elements (key and value) + let top_info = top_header.payload_info()?; + println!("TOP level header: {:?}", top_info); + let list_buff = &mpt_node[top_info.header_len..top_info.header_len + top_info.value_len]; + // then check the key and make sure it's equal to the RLP encoding of the index + let key_header = rlp::Rlp::new(list_buff); + assert!(!key_header.is_list()); + // key is RLP( compact ( RLP(index))) + let key_info = key_header.payload_info()?; + let compact_key = &list_buff[key_info.header_len..key_info.header_len + key_info.value_len]; + let decoded_key = rlp::encode(&nibbles_to_bytes( + Nibbles::from_compact(compact_key).nibbles(), + )); + assert_eq!(decoded_key, &mpt_key.rlp_bytes().to_vec(),); + + // then check if the value portion fits what we tested above + // value is RLP ( RLP(status, etc...)) + let outer_value_min = top_info.header_len + key_info.header_len + key_info.value_len; + let outer_value_buff = &mpt_node[outer_value_min..]; + let outer_value_state = rlp::Rlp::new(outer_value_buff); + assert!(!outer_value_state.is_list()); + let outer_payload = outer_value_state.payload_info()?; + let inner_value_min = outer_value_min + outer_payload.header_len; + let inner_value_buff = &mpt_node[inner_value_min..]; + assert_eq!(rlp_encoding, inner_value_buff); Ok(()) } From 68a053279e4b63c93b03d1aaeb98cddebba65679 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 7 Nov 2024 11:00:36 +0000 Subject: [PATCH 05/47] WIP: Receipt Trie leaves --- Cargo.lock | 1 + mp2-common/src/array.rs | 185 ++++++- mp2-common/src/eth.rs | 480 +++++++++++++++-- mp2-common/src/group_hashing/mod.rs | 2 + mp2-common/src/mpt_sequential/key.rs | 26 +- .../src/mpt_sequential/leaf_or_extension.rs | 44 +- mp2-common/src/mpt_sequential/mod.rs | 309 ++++++----- mp2-common/src/rlp.rs | 24 +- mp2-test/Cargo.toml | 1 + mp2-test/src/mpt_sequential.rs | 152 ++++++ mp2-v1/src/contract_extraction/branch.rs | 18 +- mp2-v1/src/length_extraction/branch.rs | 8 +- mp2-v1/src/lib.rs | 1 + mp2-v1/src/receipt_extraction/leaf.rs | 510 ++++++++++++++++++ mp2-v1/src/receipt_extraction/mod.rs | 2 + .../src/receipt_extraction/public_inputs.rs | 76 +++ mp2-v1/src/values_extraction/branch.rs | 12 +- rustc-ice-2024-11-04T12_36_50-74186.txt | 63 +++ rustc-ice-2024-11-04T12_37_01-74253.txt | 62 +++ rustc-ice-2024-11-04T12_37_13-74307.txt | 62 +++ 20 files changed, 1827 insertions(+), 211 deletions(-) create mode 100644 mp2-v1/src/receipt_extraction/leaf.rs create mode 100644 mp2-v1/src/receipt_extraction/mod.rs create mode 100644 mp2-v1/src/receipt_extraction/public_inputs.rs create mode 100644 rustc-ice-2024-11-04T12_36_50-74186.txt create mode 100644 rustc-ice-2024-11-04T12_37_01-74253.txt create mode 100644 rustc-ice-2024-11-04T12_37_13-74307.txt diff --git a/Cargo.lock b/Cargo.lock index 59b6f18fd..6e8b712f9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3991,6 +3991,7 @@ dependencies = [ "recursion_framework", "ryhope", "serde", + "tokio", ] [[package]] diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index b9be10774..7561f1679 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -1,6 +1,9 @@ use crate::{ serialization::{deserialize_long_array, serialize_long_array}, - utils::{less_than_or_equal_to_unsafe, range_check_optimized, Endianness, PackerTarget}, + utils::{ + less_than_or_equal_to_unsafe, less_than_unsafe, range_check_optimized, Endianness, + PackerTarget, + }, }; use anyhow::{anyhow, Result}; use plonky2::{ @@ -605,6 +608,91 @@ where pub fn last(&self) -> T { self.arr[SIZE - 1] } + + /// This function allows you to search a larger [`Array`] by representing it as a number of + /// smaller [`Array`]s with size [`RANDOM_ACCESS_SIZE`], padding the final smaller array where required. + pub fn random_access_large_array, const D: usize>( + &self, + b: &mut CircuitBuilder, + at: Target, + ) -> T { + // We will split the array into smaller arrays of size 64, padding the last array with zeroes if required + let padded_size = (SIZE - 1) / RANDOM_ACCESS_SIZE + 1; + + // Create an array of `Array`s + let arrays: Vec> = (0..padded_size) + .map(|i| Array { + arr: create_array(|j| { + let index = 64 * i + j; + if index < self.arr.len() { + self.arr[index] + } else { + T::from_target(b.zero()) + } + }), + }) + .collect(); + + // We need to express `at` in base 64, we are also assuming that the initial array was smaller than 64^2 = 4096 which we enforce with a range check. + // We also check that `at` is smaller that the size of the array. + let array_size = b.constant(F::from_noncanonical_u64(SIZE as u64)); + let less_than_check = less_than_unsafe(b, at, array_size, 12); + let true_target = b._true(); + b.connect(less_than_check.target, true_target.target); + b.range_check(at, 12); + let (low_bits, high_bits) = b.split_low_high(at, 6, 12); + + // Search each of the smaller arrays for the target at `low_bits` + let first_search = arrays + .into_iter() + .map(|array| { + b.random_access( + low_bits, + array + .arr + .iter() + .map(Targetable::to_target) + .collect::>(), + ) + }) + .collect::>(); + + // Serach the result for the Target at `high_bits` + T::from_target(b.random_access(high_bits, first_search)) + } + + /// Returns [`self[at..at+SUB_SIZE]`]. + /// This is more expensive than [`Self::extract_array`] due to using [`Self::random_access_large_array`] + /// instead of [`Self::value_at`]. This function enforces that the values extracted are within the array. + pub fn extract_array_large< + F: RichField + Extendable, + const D: usize, + const SUB_SIZE: usize, + >( + &self, + b: &mut CircuitBuilder, + at: Target, + ) -> Array { + let m = b.constant(F::from_canonical_usize(SUB_SIZE)); + let array_len = b.constant(F::from_canonical_usize(SIZE)); + let upper_bound = b.add(at, m); + let num_bits_size = SIZE.ilog2() + 1; + + let lt = less_than_or_equal_to_unsafe(b, upper_bound, array_len, num_bits_size as usize); + + let t = b._true(); + b.connect(t.target, lt.target); + + Array:: { + arr: core::array::from_fn(|i| { + let i_target = b.constant(F::from_canonical_usize(i)); + let i_plus_n_target = b.add(at, i_target); + + // out_val = arr[((i+n)<=n+M) * (i+n)] + self.random_access_large_array(b, i_plus_n_target) + }), + } + } } /// Returns the size of the array in 32-bit units, rounded up. #[allow(non_snake_case)] @@ -820,6 +908,51 @@ mod test { run_circuit::(ValueAtCircuit { arr, idx, exp }); } + #[test] + fn test_random_access_large_array() { + const SIZE: usize = 512; + #[derive(Clone, Debug)] + struct ValueAtCircuit { + arr: [u8; SIZE], + idx: usize, + exp: u8, + } + impl UserCircuit for ValueAtCircuit + where + F: RichField + Extendable, + { + type Wires = (Array, Target, Target); + fn build(c: &mut CircuitBuilder) -> Self::Wires { + let array = Array::::new(c); + let exp_value = c.add_virtual_target(); + let index = c.add_virtual_target(); + let extracted = array.random_access_large_array(c, index); + c.connect(exp_value, extracted); + (array, index, exp_value) + } + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + wires + .0 + .assign(pw, &create_array(|i| F::from_canonical_u8(self.arr[i]))); + pw.set_target(wires.1, F::from_canonical_usize(self.idx)); + pw.set_target(wires.2, F::from_canonical_u8(self.exp)); + } + } + let mut rng = thread_rng(); + let mut arr = [0u8; SIZE]; + rng.fill(&mut arr[..]); + let idx: usize = rng.gen_range(0..SIZE); + let exp = arr[idx]; + run_circuit::(ValueAtCircuit { arr, idx, exp }); + + // Now we check that it fails when the index is too large + let idx = SIZE; + let result = std::panic::catch_unwind(|| { + run_circuit::(ValueAtCircuit { arr, idx, exp }) + }); + assert!(result.is_err()); + } + #[test] fn test_extract_array() { const SIZE: usize = 80; @@ -863,6 +996,56 @@ mod test { run_circuit::(ExtractArrayCircuit { arr, idx, exp }); } + #[test] + fn test_extract_array_large() { + const SIZE: usize = 512; + const SUBSIZE: usize = 40; + #[derive(Clone, Debug)] + struct ExtractArrayCircuit { + arr: [u8; SIZE], + idx: usize, + exp: [u8; SUBSIZE], + } + impl UserCircuit for ExtractArrayCircuit + where + F: RichField + Extendable, + { + type Wires = (Array, Target, Array); + fn build(c: &mut CircuitBuilder) -> Self::Wires { + let array = Array::::new(c); + let index = c.add_virtual_target(); + let expected = Array::::new(c); + let extracted = array.extract_array_large::<_, _, SUBSIZE>(c, index); + let are_equal = expected.equals(c, &extracted); + let tru = c._true(); + c.connect(are_equal.target, tru.target); + (array, index, expected) + } + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + wires + .0 + .assign(pw, &create_array(|i| F::from_canonical_u8(self.arr[i]))); + pw.set_target(wires.1, F::from_canonical_usize(self.idx)); + wires + .2 + .assign(pw, &create_array(|i| F::from_canonical_u8(self.exp[i]))); + } + } + let mut rng = thread_rng(); + let mut arr = [0u8; SIZE]; + rng.fill(&mut arr[..]); + let idx: usize = rng.gen_range(0..(SIZE - SUBSIZE)); + let exp = create_array(|i| arr[idx + i]); + run_circuit::(ExtractArrayCircuit { arr, idx, exp }); + + // It should panic if we try to extract an array where some of the indices fall outside of (0..SIZE) + let idx = SIZE; + let result = std::panic::catch_unwind(|| { + run_circuit::(ExtractArrayCircuit { arr, idx, exp }) + }); + assert!(result.is_err()); + } + #[test] fn test_contains_subarray() { #[derive(Clone, Debug)] diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index a75791de0..7be9e9999 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -1,21 +1,18 @@ //! Module containing several structure definitions for Ethereum related operations //! such as fetching blocks, transactions, creating MPTs, getting proofs, etc. use alloy::{ - consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom}, + consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom, TxEnvelope}, eips::BlockNumberOrTag, network::{eip2718::Encodable2718, TransactionResponse}, - primitives::{Address, B256, U256}, + primitives::{Address, B256}, providers::{Provider, RootProvider}, rlp::Encodable as AlloyEncodable, rpc::types::{ - Block, BlockTransactions, EIP1186AccountProofResponse, ReceiptEnvelope, Transaction, - }, - transports::{ - http::{Client, Http}, - Transport, + Block, BlockTransactions, EIP1186AccountProofResponse, Filter, ReceiptEnvelope, Transaction, }, + transports::Transport, }; -use anyhow::{bail, Context, Result}; +use anyhow::{anyhow, bail, Context, Result}; use eth_trie::{EthTrie, MemoryDB, Trie}; use ethereum_types::H256; use itertools::Itertools; @@ -120,6 +117,175 @@ pub struct ProofQuery { pub(crate) slot: StorageSlot, } +/// Struct used for storing relevant data to query blocks as they come in. +#[derive(Debug, Clone)] +pub struct ReceiptQuery { + /// The contract that emits the event we care about + pub contract: Address, + /// The event we wish to monitor for, + pub event: Event, +} + +/// Struct used to store all the information needed for proving a leaf in the Receipt Trie is one we care about. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReceiptProofInfo { + /// The MPT proof that this Receipt is in the tree + pub mpt_proof: Vec>, + /// The index of this transaction in the block + pub tx_index: u64, + /// The size of the index in bytes + pub index_size: usize, + /// The offset in the leaf (in RLP form) to status + pub status_offset: usize, + /// The offset in the leaf (in RLP form) to the start of logs + pub logs_offset: usize, + /// Data about the type of log we are proving the existence of + pub event_log_info: EventLogInfo, + /// The offsets for the relevant logs + pub relevant_logs_offset: Vec, +} + +/// Contains all the information for an [`Event`] in rlp form +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +pub struct EventLogInfo { + /// Size in bytes of the whole log rlp encoded + pub size: usize, + /// Packed contract address to check + pub address: Address, + /// Byte offset for the address from the beginning of a Log + pub add_rel_offset: usize, + /// Packed event signature, + pub event_signature: [u8; 32], + /// Byte offset from the start of the log to event signature + pub sig_rel_offset: usize, + /// The topics for this Log + pub topics: [LogDataInfo; 3], + /// The extra data stored by this Log + pub data: [LogDataInfo; 2], +} + +/// Contains all the information for data contained in an [`Event`] +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] +pub struct LogDataInfo { + pub column_id: usize, + /// The byte offset from the beggining of the log to this target + pub rel_byte_offset: usize, + /// The length of this topic/data + pub len: usize, +} + +impl TryFrom<&Log> for EventLogInfo { + type Error = anyhow::Error; + + fn try_from(log: &Log) -> std::result::Result { + // First we encode the log in rlp form + let mut buf = Vec::::new(); + log.encode(&mut buf); + + let rlp_log = rlp::Rlp::new(&buf); + // Extract the header + let log_header = rlp_log.payload_info()?; + let next_data = &buf[log_header.header_len..log_header.header_len + log_header.value_len]; + let rlp_log_no_header = rlp::Rlp::new(next_data); + // Find the address offset (skipping its header) + let address_header = rlp_log_no_header.payload_info()?; + let rel_address_offset = log_header.header_len + address_header.header_len; + // Find the signature offset (skipping its header) + let topics_data = &buf[rel_address_offset + address_header.value_len + ..log_header.header_len + log_header.value_len]; + let topics_rlp = rlp::Rlp::new(topics_data); + let topics_header = topics_rlp.payload_info()?; + let topic_0_data = + &buf[rel_address_offset + address_header.value_len + topics_header.header_len + ..log_header.header_len + + address_header.header_len + + address_header.value_len + + topics_header.header_len + + topics_header.value_len]; + let topic_0_rlp = rlp::Rlp::new(topic_0_data); + let topic_0_header = topic_0_rlp.payload_info()?; + let rel_sig_offset = log_header.header_len + + address_header.header_len + + address_header.value_len + + topics_header.header_len + + topic_0_header.header_len; + let event_signature: [u8; 32] = buf[rel_sig_offset..rel_sig_offset + 32].try_into()?; + // Each topic takes 33 bytes to encode so we divide this length by 33 to get the number of topics remaining + let remaining_topics = buf[rel_sig_offset + topic_0_header.value_len + ..log_header.header_len + + address_header.header_len + + address_header.value_len + + topics_header.header_len + + topics_header.value_len] + .len() + / 33; + + let mut topics = [LogDataInfo::default(); 3]; + let mut current_topic_offset = rel_sig_offset + topic_0_header.value_len + 1; + topics + .iter_mut() + .enumerate() + .take(remaining_topics) + .for_each(|(j, info)| { + *info = LogDataInfo { + column_id: j, + rel_byte_offset: current_topic_offset, + len: 32, + }; + current_topic_offset += 33; + }); + + // Deal with any remaining data + let mut data = [LogDataInfo::default(); 2]; + + let data_vec = if current_topic_offset < buf.len() { + buf.iter() + .skip(current_topic_offset - 1) + .copied() + .collect::>() + } else { + vec![] + }; + + if !data_vec.is_empty() { + let data_rlp = rlp::Rlp::new(&data_vec); + let data_header = data_rlp.payload_info()?; + // Since we can deal with at most two words of additional data we only need to take 66 bytes from this list + let mut additional_offset = data_header.header_len; + data_vec[data_header.header_len..] + .chunks(33) + .enumerate() + .take(2) + .try_for_each(|(j, chunk)| { + let chunk_rlp = rlp::Rlp::new(chunk); + let chunk_header = chunk_rlp.payload_info()?; + if chunk_header.value_len <= 32 { + data[j] = LogDataInfo { + column_id: 3 + j, + rel_byte_offset: current_topic_offset + + additional_offset + + chunk_header.header_len, + len: chunk_header.value_len, + }; + additional_offset += chunk_header.header_len + chunk_header.value_len; + } else { + return Ok(()); + } + Result::<(), anyhow::Error>::Ok(()) + })?; + } + Ok(EventLogInfo { + size: log_header.header_len + log_header.value_len, + address: log.address, + add_rel_offset: rel_address_offset, + event_signature, + sig_rel_offset: rel_sig_offset, + topics, + data, + }) + } +} + /// Represent an intermediate or leaf node of a storage slot in contract. /// /// It has a `parent` node, and its ancestor (root) must be a simple or mapping slot. @@ -365,6 +531,102 @@ impl ProofQuery { } } +impl ReceiptQuery { + pub fn new(contract: Address, event: Event) -> Self { + Self { contract, event } + } + + /// Function that returns the MPT Trie inclusion proofs for all receipts in a block whose logs contain + /// the specified event for the contract. + pub async fn query_receipt_proofs( + &self, + provider: &RootProvider, + block: BlockNumberOrTag, + ) -> Result> { + let expected_topic_0 = B256::from_slice(&keccak256(self.event.signature().as_bytes())); + let filter = Filter::new() + .select(block) + .address(self.contract) + .event(&self.event.signature()); + let logs = provider.get_logs(&filter).await?; + // Find the length of the RLP encoded log + let event_log_info: EventLogInfo = (&logs + .first() + .ok_or(anyhow!("No relevant logs in this block"))? + .inner) + .try_into()?; + + // For each of the logs return the transacion its included in, then sort and remove duplicates. + let mut tx_indices = logs + .iter() + .map(|log| log.transaction_index) + .collect::>>() + .ok_or(anyhow!("One of the logs did not have a transaction index"))?; + tx_indices.sort(); + tx_indices.dedup(); + + // Construct the Receipt Trie for this block so we can retrieve MPT proofs. + let mut block_util = BlockUtil::fetch(provider, block).await?; + + let proofs = tx_indices + .into_iter() + .map(|index| { + let key = index.rlp_bytes(); + let index_size = key.len(); + let proof = block_util.receipts_trie.get_proof(&key)?; + let receipt = block_util.txs[index as usize].receipt(); + let rlp_body = receipt.encoded_2718(); + // Skip the first byte as it refers to the transaction type + let length_hint = rlp_body[1] as usize - 247; + + let status_offset = 2 + length_hint; + let gas_hint = rlp_body[3 + length_hint] as usize - 128; + // Logs bloom is always 256 bytes long and comes after the gas used the first byte is 185 then 1 then 0 then the bloom so the + // log data starts at 4 + length_hint + gas_hint + 259 + let log_offset = 4 + length_hint + gas_hint + 259; + + let log_hint = if rlp_body[log_offset] < 247 { + rlp_body[log_offset] as usize - 192 + } else { + rlp_body[log_offset] as usize - 247 + }; + // We iterate through the logs and store the offsets we care about. + let mut current_log_offset = log_offset + 1 + log_hint; + + let relevant_logs = receipt + .logs() + .iter() + .filter_map(|log| { + let length = log.length(); + if log.address == self.contract + && log.data.topics().contains(&expected_topic_0) + { + let out = current_log_offset; + current_log_offset += length; + Some(out) + } else { + current_log_offset += length; + None + } + }) + .collect::>(); + + Ok(ReceiptProofInfo { + mpt_proof: proof, + tx_index: index, + index_size, + status_offset, + logs_offset: log_offset, + event_log_info, + relevant_logs_offset: relevant_logs, + }) + }) + .collect::, eth_trie::TrieError>>()?; + + Ok(proofs) + } +} + impl Rlpable for alloy::rpc::types::Block { fn rlp(&self) -> Vec { let mut out = Vec::new(); @@ -391,17 +653,24 @@ pub struct BlockUtil { pub block: Block, pub txs: Vec, pub receipts_trie: EthTrie, + pub transactions_trie: EthTrie, } pub struct TxWithReceipt(Transaction, ReceiptEnvelope); impl TxWithReceipt { - pub fn receipt(&self) -> ReceiptEnvelope { - self.1.clone() + pub fn receipt(&self) -> &ReceiptEnvelope { + &self.1 + } + pub fn transaction(&self) -> &Transaction { + &self.0 } } impl BlockUtil { - pub async fn fetch(t: RootProvider>, id: BlockNumberOrTag) -> Result { + pub async fn fetch( + t: &RootProvider, + id: BlockNumberOrTag, + ) -> Result { let block = t .get_block(id.into(), alloy::rpc::types::BlockTransactionsKind::Full) .await? @@ -410,42 +679,36 @@ impl BlockUtil { .get_block_receipts(id.into()) .await? .context("can't get receipts")?; - let BlockTransactions::Full(all_tx) = block.transactions.clone() else { + let BlockTransactions::Full(all_tx) = block.transactions() else { bail!("can't see full transactions"); }; - let tx_receipts: Vec<(_, _)> = receipts - .into_iter() - .map(|receipt| { - ( - all_tx - .iter() - .find(|tx| tx.tx_hash() == receipt.transaction_hash) - .expect("no tx with receipt hash") - .clone(), - receipt, - ) - }) - .collect(); // check receipt root let memdb = Arc::new(MemoryDB::new(true)); - let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); - let consensus_receipts = tx_receipts + let mut receipts_trie = EthTrie::new(memdb.clone()); + let mut transactions_trie = EthTrie::new(memdb.clone()); + let consensus_receipts = receipts .into_iter() - .map(|tr| { - let receipt = tr.1; + .zip(all_tx.into_iter()) + .map(|(receipt, transaction)| { let tx_index = receipt.transaction_index.unwrap().rlp_bytes(); - //let mut buff = Vec::new(); - let receipt_primitive = receipt.inner.clone(); - let receipt_primitive = match receipt_primitive { - CRE::Legacy(ref r) => CRE::Legacy(from_rpc_logs_to_consensus(&r)), - CRE::Eip2930(ref r) => CRE::Eip2930(from_rpc_logs_to_consensus(&r)), - CRE::Eip1559(ref r) => CRE::Eip1559(from_rpc_logs_to_consensus(&r)), - CRE::Eip4844(ref r) => CRE::Eip4844(from_rpc_logs_to_consensus(&r)), - CRE::Eip7702(ref r) => CRE::Eip7702(from_rpc_logs_to_consensus(&r)), + + let receipt_primitive = match receipt.inner { + CRE::Legacy(ref r) => CRE::Legacy(from_rpc_logs_to_consensus(r)), + CRE::Eip2930(ref r) => CRE::Eip2930(from_rpc_logs_to_consensus(r)), + CRE::Eip1559(ref r) => CRE::Eip1559(from_rpc_logs_to_consensus(r)), + CRE::Eip4844(ref r) => CRE::Eip4844(from_rpc_logs_to_consensus(r)), + CRE::Eip7702(ref r) => CRE::Eip7702(from_rpc_logs_to_consensus(r)), _ => panic!("aie"), }; + + let transaction_primitive = match TxEnvelope::try_from(transaction.clone()) { + Ok(t) => t, + _ => panic!("Couldn't get transaction envelope"), + }; + let body_rlp = receipt_primitive.encoded_2718(); + let tx_body_rlp = transaction_primitive.encoded_2718(); println!( "TX index {} RLP encoded: {:?}", receipt.transaction_index.unwrap(), @@ -453,25 +716,31 @@ impl BlockUtil { ); receipts_trie .insert(&tx_index, &body_rlp) - .expect("can't insert tx"); - TxWithReceipt(tr.0, receipt_primitive) + .expect("can't insert receipt"); + transactions_trie + .insert(&tx_index, &tx_body_rlp) + .expect("can't insert transaction"); + TxWithReceipt(transaction.clone(), receipt_primitive) }) .collect::>(); Ok(BlockUtil { block, txs: consensus_receipts, receipts_trie, + transactions_trie, }) } // recompute the receipts trie by first converting all receipts form RPC type to consensus type // since in Alloy these are two different types and RLP functions are only implemented for // consensus ones. - // TODO: transaction trie fn check(&mut self) -> Result<()> { - let computed = self.receipts_trie.root_hash().expect("root hash problem"); + let computed = self.receipts_trie.root_hash()?; + let tx_computed = self.transactions_trie.root_hash()?; let expected = self.block.header.receipts_root; - assert_eq!(expected.to_vec(), computed.0.to_vec()); + let tx_expected = self.block.header.transactions_root; + assert_eq!(expected.0, computed.0); + assert_eq!(tx_expected.0, tx_computed.0); Ok(()) } } @@ -657,7 +926,13 @@ mod test { use std::env; use std::str::FromStr; - use alloy::{primitives::Bytes, providers::ProviderBuilder, rpc::types::BlockTransactionsKind}; + use alloy::{ + node_bindings::Anvil, + primitives::{Bytes, Log}, + providers::ProviderBuilder, + rlp::Decodable, + sol, + }; use eth_trie::Nibbles; use ethereum_types::U64; use ethers::{ @@ -682,7 +957,7 @@ mod test { let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); let bn = 6893107; let bna = BlockNumberOrTag::Number(bn); - let mut block = BlockUtil::fetch(provider, bna).await?; + let mut block = BlockUtil::fetch(&provider, bna).await?; // check if we compute the RLP correctly now block.check()?; let mut be = tryethers::BlockData::fetch(bn, url).await?; @@ -802,6 +1077,123 @@ mod test { Ok(()) } + #[tokio::test] + async fn test_receipt_query() -> Result<()> { + // Spin up a local node. + let anvil = Anvil::new().spawn(); + // Create a provider with the wallet for contract deployment and interaction. + let rpc_url = anvil.endpoint(); + + let rpc = ProviderBuilder::new().on_http(rpc_url.parse().unwrap()); + + // Make a contract taht emits events so we can pick up on them + sol! { + #[allow(missing_docs)] + // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780638381f58a14610058578063d09de08a14610076578063db73227914610080575b5f80fd5b61005661008a565b005b6100606100f8565b60405161006d9190610165565b60405180910390f35b61007e6100fd565b005b610088610115565b005b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100c06100fd565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100f66100fd565b565b5f5481565b5f8081548092919061010e906101ab565b9190505550565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a261014b6100fd565b565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212202787ca0f2ea71e118bc4d1bf239cde5ec4730aeb35a404c44e6c9d587316418564736f6c634300081a0033")] + contract EventEmitter { + uint256 public number; + event testEvent(uint256 indexed num); + + function testEmit() public { + emit testEvent(number); + increment(); + } + + function twoEmits() public { + emit testEvent(number); + increment(); + emit testEvent(number); + increment(); + } + + function increment() public { + number++; + } + } + } + // Deploy the contract using anvil + let contract = EventEmitter::deploy(&rpc).await?; + + // Fire off a few transactions to emit some events + let mut transactions = Vec::::new(); + + for i in 0..10 { + if i % 2 == 0 { + let builder = contract.testEmit(); + let tx_hash = builder.send().await?.watch().await?; + let transaction = rpc.get_transaction_by_hash(tx_hash).await?.unwrap(); + transactions.push(transaction); + } else { + let builder = contract.twoEmits(); + let tx_hash = builder.send().await?.watch().await?; + let transaction = rpc.get_transaction_by_hash(tx_hash).await?.unwrap(); + transactions.push(transaction); + } + } + + // We want to get the event signature so we can make a ReceiptQuery + let all_events = EventEmitter::abi::events(); + + let events = all_events.get("testEvent").unwrap(); + let receipt_query = ReceiptQuery::new(*contract.address(), events[0].clone()); + + // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it + for transaction in transactions.iter() { + let index = transaction + .block_number + .ok_or(anyhow!("Could not get block number from transaction"))?; + let block = rpc + .get_block( + BlockNumberOrTag::Number(index).into(), + alloy::rpc::types::BlockTransactionsKind::Full, + ) + .await? + .ok_or(anyhow!("Could not get block test"))?; + let proofs = receipt_query + .query_receipt_proofs(&rpc, BlockNumberOrTag::Number(index)) + .await?; + + for proof in proofs.into_iter() { + let memdb = Arc::new(MemoryDB::new(true)); + let tx_trie = EthTrie::new(Arc::clone(&memdb)); + + let mpt_key = transaction.transaction_index.unwrap().rlp_bytes(); + let receipt_hash = block.header().receipts_root; + let is_valid = tx_trie + .verify_proof(receipt_hash.0.into(), &mpt_key, proof.mpt_proof.clone())? + .ok_or(anyhow!("No proof found when verifying"))?; + + let expected_sig: [u8; 32] = keccak256(receipt_query.event.signature().as_bytes()) + .try_into() + .unwrap(); + + for log_offset in proof.relevant_logs_offset.iter() { + let mut buf = &is_valid[*log_offset..*log_offset + proof.event_log_info.size]; + let decoded_log = Log::decode(&mut buf)?; + let raw_bytes: [u8; 20] = is_valid[*log_offset + + proof.event_log_info.add_rel_offset + ..*log_offset + proof.event_log_info.add_rel_offset + 20] + .to_vec() + .try_into() + .unwrap(); + assert_eq!(decoded_log.address, receipt_query.contract); + assert_eq!(raw_bytes, receipt_query.contract); + let topics = decoded_log.topics(); + assert_eq!(topics[0].0, expected_sig); + let raw_bytes: [u8; 32] = is_valid[*log_offset + + proof.event_log_info.sig_rel_offset + ..*log_offset + proof.event_log_info.sig_rel_offset + 32] + .to_vec() + .try_into() + .unwrap(); + assert_eq!(topics[0].0, raw_bytes); + } + } + } + Ok(()) + } + #[tokio::test] async fn test_sepolia_slot() -> Result<()> { #[cfg(feature = "ci")] diff --git a/mp2-common/src/group_hashing/mod.rs b/mp2-common/src/group_hashing/mod.rs index 47a8822aa..bf4360676 100644 --- a/mp2-common/src/group_hashing/mod.rs +++ b/mp2-common/src/group_hashing/mod.rs @@ -21,6 +21,8 @@ use plonky2_ecgfp5::{ }, }; +use std::array::from_fn as create_array; + mod curve_add; pub mod field_to_curve; mod sswu_gadget; diff --git a/mp2-common/src/mpt_sequential/key.rs b/mp2-common/src/mpt_sequential/key.rs index d7129fd84..f98b57aac 100644 --- a/mp2-common/src/mpt_sequential/key.rs +++ b/mp2-common/src/mpt_sequential/key.rs @@ -15,25 +15,37 @@ use plonky2::{ use plonky2_crypto::u32::arithmetic_u32::U32Target; use serde::{Deserialize, Serialize}; +pub type MPTKeyWire = MPTKeyWireGeneric; + +pub type ReceiptKeyWire = MPTKeyWireGeneric; + +pub const MAX_TX_KEY_NIBBLE_LEN: usize = 6; + /// Calculate the pointer from the MPT key. pub fn mpt_key_ptr(mpt_key: &[u8]) -> usize { let nibbles = Nibbles::from_compact(mpt_key); MAX_KEY_NIBBLE_LEN - 1 - nibbles.nibbles().len() } +/// Calculate the pointer from the MPT key. +pub fn receipt_key_ptr(mpt_key: &[u8]) -> usize { + let nibbles = Nibbles::from_compact(mpt_key); + MAX_TX_KEY_NIBBLE_LEN - 1 - nibbles.nibbles().len() +} + /// A structure that keeps a running pointer to the portion of the key the circuit /// already has proven. #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] -pub struct MPTKeyWire { +pub struct MPTKeyWireGeneric { /// Represents the full key of the value(s) we're looking at in the MPT trie. - pub key: Array, + pub key: Array, /// Represents which portion of the key we already processed. The pointer /// goes _backwards_ since circuit starts proving from the leaf up to the root. /// i.e. pointer must be equal to F::NEG_ONE when we reach the root. pub pointer: Target, } -impl MPTKeyWire { +impl MPTKeyWireGeneric { pub fn current_nibble, const D: usize>( &self, b: &mut CircuitBuilder, @@ -72,7 +84,7 @@ impl MPTKeyWire { /// Create a new fresh key wire pub fn new, const D: usize>(b: &mut CircuitBuilder) -> Self { Self { - key: Array::::new(b), + key: Array::::new(b), pointer: b.add_virtual_target(), } } @@ -80,7 +92,7 @@ impl MPTKeyWire { pub fn assign( &self, p: &mut PartialWitness, - key_nibbles: &[u8; MAX_KEY_NIBBLE_LEN], + key_nibbles: &[u8; KEY_LENGTH], ptr: usize, ) { let f_nibbles = create_array(|i| F::from_canonical_u8(key_nibbles[i])); @@ -141,7 +153,7 @@ impl MPTKeyWire { // now we need to pack each pair of 2 bit limbs into a nibble, but for each byte we want nibbles to // be ordered in big-endian limbs - .chunks(4) + .chunks_exact(4) .flat_map(|chunk| { vec![ b.mul_const_add(F::from_canonical_u8(4), chunk[3], chunk[2]), @@ -154,7 +166,7 @@ impl MPTKeyWire { .try_into() .unwrap(), }, - pointer: b.constant(F::from_canonical_usize(MAX_KEY_NIBBLE_LEN - 1)), + pointer: b.constant(F::from_canonical_usize(KEY_LENGTH - 1)), } } } diff --git a/mp2-common/src/mpt_sequential/leaf_or_extension.rs b/mp2-common/src/mpt_sequential/leaf_or_extension.rs index 96b3b6355..8c64d7584 100644 --- a/mp2-common/src/mpt_sequential/leaf_or_extension.rs +++ b/mp2-common/src/mpt_sequential/leaf_or_extension.rs @@ -1,10 +1,10 @@ //! MPT leaf or extension node gadget -use super::{Circuit as MPTCircuit, MPTKeyWire, PAD_LEN}; +use super::{advance_key_leaf_or_extension, key::MPTKeyWireGeneric, PAD_LEN}; use crate::{ array::{Array, Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires}, - rlp::decode_fixed_list, + rlp::{decode_fixed_list, MAX_KEY_NIBBLE_LEN}, types::GFp, }; use plonky2::{ @@ -15,10 +15,16 @@ use plonky2::{ }; use serde::{Deserialize, Serialize}; +pub type MPTLeafOrExtensionWires = + MPTLeafOrExtensionWiresGeneric; + /// Wrapped wires for a MPT leaf or extension node #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct MPTLeafOrExtensionWires -where +pub struct MPTLeafOrExtensionWiresGeneric< + const NODE_LEN: usize, + const VALUE_LEN: usize, + const KEY_LEN: usize, +> where [(); PAD_LEN(NODE_LEN)]:, { /// MPT node @@ -26,12 +32,13 @@ where /// MPT root pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, /// New MPT key after advancing the current key - pub key: MPTKeyWire, + pub key: MPTKeyWireGeneric, /// New MPT value pub value: Array, } -impl MPTLeafOrExtensionWires +impl + MPTLeafOrExtensionWiresGeneric where [(); PAD_LEN(NODE_LEN)]:, { @@ -41,10 +48,12 @@ where } } +pub type MPTLeafOrExtensionNode = MPTLeafOrExtensionNodeGeneric; + /// MPT leaf or extension node gadget -pub struct MPTLeafOrExtensionNode; +pub struct MPTLeafOrExtensionNodeGeneric; -impl MPTLeafOrExtensionNode { +impl MPTLeafOrExtensionNodeGeneric { /// Build the MPT node and advance the current key. pub fn build_and_advance_key< F: RichField + Extendable, @@ -53,8 +62,8 @@ impl MPTLeafOrExtensionNode { const VALUE_LEN: usize, >( b: &mut CircuitBuilder, - current_key: &MPTKeyWire, - ) -> MPTLeafOrExtensionWires + current_key: &MPTKeyWireGeneric, + ) -> MPTLeafOrExtensionWiresGeneric where [(); PAD_LEN(NODE_LEN)]:, { @@ -70,15 +79,16 @@ impl MPTLeafOrExtensionNode { // Advance the key and extract the value (only decode two headers in the case of leaf). let rlp_headers = decode_fixed_list::<_, D, 2>(b, &node.arr.arr, zero); - let (key, value, valid) = MPTCircuit::<1, NODE_LEN>::advance_key_leaf_or_extension::< - F, - D, - 2, - VALUE_LEN, - >(b, &node.arr, current_key, &rlp_headers); + let (key, value, valid) = + advance_key_leaf_or_extension::( + b, + &node.arr, + current_key, + &rlp_headers, + ); b.connect(tru.target, valid.target); - MPTLeafOrExtensionWires { + MPTLeafOrExtensionWiresGeneric { node, root, key, diff --git a/mp2-common/src/mpt_sequential/mod.rs b/mp2-common/src/mpt_sequential/mod.rs index 3ded0e97c..50087c1af 100644 --- a/mp2-common/src/mpt_sequential/mod.rs +++ b/mp2-common/src/mpt_sequential/mod.rs @@ -1,3 +1,4 @@ +use crate::rlp::MAX_KEY_NIBBLE_LEN; use crate::serialization::{ deserialize_array, deserialize_long_array, serialize_array, serialize_long_array, }; @@ -8,14 +9,12 @@ use crate::{ compute_size_with_padding, InputData, KeccakCircuit, KeccakWires, OutputHash, HASH_LEN, PACKED_HASH_LEN, }, - rlp::{ - decode_compact_encoding, decode_fixed_list, RlpHeader, RlpList, MAX_ITEMS_IN_LIST, - MAX_KEY_NIBBLE_LEN, - }, + rlp::{decode_compact_encoding, decode_fixed_list, RlpHeader, RlpList, MAX_ITEMS_IN_LIST}, utils::{find_index_subvector, keccak256}, }; use anyhow::{anyhow, Result}; use core::array::from_fn as create_array; + use plonky2::{ field::extension::Extendable, hash::hash_types::RichField, @@ -33,8 +32,14 @@ mod key; mod leaf_or_extension; pub mod utils; -pub use key::{mpt_key_ptr, MPTKeyWire}; -pub use leaf_or_extension::{MPTLeafOrExtensionNode, MPTLeafOrExtensionWires}; +pub use key::{ + mpt_key_ptr, receipt_key_ptr, MPTKeyWire, MPTKeyWireGeneric, ReceiptKeyWire, + MAX_TX_KEY_NIBBLE_LEN, +}; +pub use leaf_or_extension::{ + MPTLeafOrExtensionNode, MPTLeafOrExtensionNodeGeneric, MPTLeafOrExtensionWires, + MPTLeafOrExtensionWiresGeneric, +}; /// Number of items in the RLP encoded list in a leaf node. const NB_ITEMS_LEAF: usize = 2; @@ -44,6 +49,11 @@ const NB_ITEMS_LEAF: usize = 2; /// Given we target MPT storage proof, the value is 32 bytes + 1 byte for RLP encoding. pub const MAX_LEAF_VALUE_LEN: usize = 33; +/// This is the maximum size we allow for the value of Receipt Trie leaf +/// currently set to be the same as we allow for a branch node in the Storage Trie +/// minus the length of the key header and key +pub const MAX_RECEIPT_LEAF_VALUE_LEN: usize = 526; + /// RLP item size for the extension node pub const MPT_EXTENSION_RLP_SIZE: usize = 2; @@ -56,6 +66,17 @@ pub const MPT_BRANCH_RLP_SIZE: usize = 17; pub const fn PAD_LEN(d: usize) -> usize { compute_size_with_padding(d) } + +/// const function to allow arrays of half a generics size without additional generics +#[allow(non_snake_case)] +pub const fn NIBBLES_TO_BYTES(d: usize) -> usize { + d >> 1 +} + +/// We export a type here to keep it consistent with the already established codebase. +pub type MPTCircuit = + Circuit; + /// Circuit that simoply proves the inclusion of a value inside a MPT tree. /// /// . DEPTH is the maximal depth of the tree. If the tree is smaller, the circuit @@ -65,23 +86,29 @@ pub const fn PAD_LEN(d: usize) -> usize { /// branch node can be up to 32 * 17 = 544 bytes. /// - Note since it uses keccak, the array being hashed is larger because /// keccak requires padding. +/// KEY_LEN is the maximum length of the MPT key (differs between storage tries and transaction/receipt tries) #[derive(Clone, Debug)] -pub struct Circuit { +pub struct Circuit< + const DEPTH: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, + const KEY_LEN_BYTES: usize = { NIBBLES_TO_BYTES(KEY_LEN) }, +> { /// for ease of usage, we take vector here and the circuit is doing the padding nodes: Vec>, /// the full key that we are trying to prove in this trie /// NOTE: the key is in bytes. This code will transform it into nibbles /// before passing it to circuit, i.e. the circuit takes the key in nibbles /// whose length == MAX_KEY_NIBBLE_LEN - key: [u8; MAX_KEY_NIBBLE_LEN / 2], + key: [u8; KEY_LEN_BYTES], } #[derive(Serialize, Deserialize, Clone, Debug)] -pub struct InputWires +pub struct InputWires where [(); PAD_LEN(NODE_LEN)]:, [(); DEPTH - 1]:, { - pub(crate) key: MPTKeyWire, + pub(crate) key: MPTKeyWireGeneric, /// a vector of buffers whose size is the padded size of the maximum node length /// the padding may occur anywhere in the array but it can fit the maximum node size /// NOTE: this makes the code a bit harder grasp at first, but it's a straight @@ -122,27 +149,28 @@ where pub root: OutputHash, } -impl Circuit +impl + Circuit where [(); PAD_LEN(NODE_LEN)]:, [(); DEPTH - 1]:, { - pub fn new(key: [u8; MAX_KEY_NIBBLE_LEN / 2], proof: Vec>) -> Self { + pub fn new(key: [u8; NIBBLES_TO_BYTES(KEY_LEN)], proof: Vec>) -> Self { Self { nodes: proof, key } } pub fn create_input_wires( b: &mut CircuitBuilder, - key: Option, // Could set the full key from outside - ) -> InputWires + key: Option>, // Could set the full key from outside + ) -> InputWires where F: RichField + Extendable, { // full key is expected to be given by verifier (done in UserCircuit impl) // initial key has the pointer that is set at the maximum length - 1 (it's an index, so 0-based) - let key = key.unwrap_or_else(|| MPTKeyWire { - key: Array::::new(b), - pointer: b.constant(F::from_canonical_usize(MAX_KEY_NIBBLE_LEN) - F::ONE), + let key = key.unwrap_or_else(|| MPTKeyWireGeneric:: { + key: Array::::new(b), + pointer: b.constant(F::from_canonical_usize(KEY_LEN) - F::ONE), }); let should_process: [BoolTarget; DEPTH - 1] = create_array(|_| b.add_virtual_bool_target_safe()); @@ -162,7 +190,7 @@ where /// to be done by the caller. pub fn verify_mpt_proof( b: &mut CircuitBuilder, - inputs: &InputWires, + inputs: &InputWires, ) -> OutputWires where F: RichField + Extendable, @@ -177,12 +205,8 @@ where // small optimization here as we only need to decode two items for a leaf, since we know it's a leaf let leaf_headers = decode_fixed_list::<_, _, NB_ITEMS_LEAF>(b, &inputs.nodes[0].arr.arr, zero); - let (mut iterative_key, leaf_value, is_leaf) = Self::advance_key_leaf_or_extension( - b, - &inputs.nodes[0].arr, - &inputs.key, - &leaf_headers, - ); + let (mut iterative_key, leaf_value, is_leaf) = + advance_key_leaf_or_extension(b, &inputs.nodes[0].arr, &inputs.key, &leaf_headers); b.connect(t.target, is_leaf.target); let mut last_hash_output = leaf_hash.output_array.clone(); let mut keccak_wires = vec![leaf_hash]; @@ -239,7 +263,7 @@ where pub fn assign, const D: usize>( &self, p: &mut PartialWitness, - inputs: &InputWires, + inputs: &InputWires, outputs: &OutputWires, ) -> Result<()> { let pad_len = DEPTH.checked_sub(self.nodes.len()).ok_or(anyhow!( @@ -302,8 +326,12 @@ where pub fn advance_key, const D: usize>( b: &mut CircuitBuilder, node: &Array, - key: &MPTKeyWire, - ) -> (MPTKeyWire, Array, BoolTarget) { + key: &MPTKeyWireGeneric, + ) -> ( + MPTKeyWireGeneric, + Array, + BoolTarget, + ) { let zero = b.zero(); // It will try to decode a RLP list of the maximum number of items there can be // in a list, which is 16 for a branch node (Excluding value). @@ -313,9 +341,9 @@ where // if it's more ==> node's a branch node // RLP ( RLP(hash1), RLP(hash2), ... RLP(hash16), RLP(value)) let rlp_headers = decode_fixed_list::(b, &node.arr, zero); - let leaf_info = Self::advance_key_leaf_or_extension(b, node, key, &rlp_headers); + let leaf_info = advance_key_leaf_or_extension(b, node, key, &rlp_headers); let tuple_condition = leaf_info.2; - let branch_info = Self::advance_key_branch(b, node, key, &rlp_headers); + let branch_info = advance_key_branch(b, node, key, &rlp_headers); // ensures it's either a branch or leaf/extension let tuple_or_branch = b.or(leaf_info.2, branch_info.2); @@ -327,78 +355,94 @@ where (new_key, child_hash, tuple_or_branch) } +} - /// This function advances the pointer of the MPT key. The parameters are: - /// * The key where to lookup the next nibble and thus the hash stored at - /// nibble position in the branch node. - /// * RLP headers of the current node. - /// And it returns: - /// * New key with the pointer moved. - /// * The child hash / value of the node. - /// * A boolean that must be true if the given node is a leaf or an extension. - /// * The nibble position before this advance. - pub fn advance_key_branch, const D: usize>( - b: &mut CircuitBuilder, - node: &Array, - key: &MPTKeyWire, - rlp_headers: &RlpList, - ) -> (MPTKeyWire, Array, BoolTarget, Target) { - let one = b.one(); - // assume it's a node and return the boolean condition that must be true if - // it is a node - decided in advance_key function - let seventeen = b.constant(F::from_canonical_usize(MAX_ITEMS_IN_LIST)); - let branch_condition = b.is_equal(seventeen, rlp_headers.num_fields); - - // Given we are reading the nibble from the key itself, we don't need to do - // any more checks on it. The key and pointer will be given by the verifier so - // attacker can't indicate a different nibble - let nibble = key.current_nibble(b); - - // we advance the pointer for the next iteration - let new_key = key.advance_by(b, one); - let nibble_header = rlp_headers.select(b, nibble); - let branch_child_hash = node.extract_array::(b, nibble_header.offset); - (new_key, branch_child_hash, branch_condition, nibble) - } +/// This function advances the pointer of the MPT key. The parameters are: +/// * The key where to lookup the next nibble and thus the hash stored at +/// nibble position in the branch node. +/// * RLP headers of the current node. +/// And it returns: +/// * New key with the pointer moved. +/// * The child hash / value of the node. +/// * A boolean that must be true if the given node is a leaf or an extension. +/// * The nibble position before this advance. +pub fn advance_key_branch< + F: RichField + Extendable, + const D: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, +>( + b: &mut CircuitBuilder, + node: &Array, + key: &MPTKeyWireGeneric, + rlp_headers: &RlpList, +) -> ( + MPTKeyWireGeneric, + Array, + BoolTarget, + Target, +) { + let one = b.one(); + // assume it's a node and return the boolean condition that must be true if + // it is a node - decided in advance_key function + let seventeen = b.constant(F::from_canonical_usize(MAX_ITEMS_IN_LIST)); + let branch_condition = b.is_equal(seventeen, rlp_headers.num_fields); - /// Returns the key with the pointer moved, returns the child hash / value of the node, - /// and returns booleans that must be true IF the given node is a leaf or an extension. - pub fn advance_key_leaf_or_extension< - F: RichField + Extendable, - const D: usize, - const LIST_LEN: usize, - // in case of a leaf, the value can be up to 33 bytes because of additional RLP encoding - // in case of extension, the value is 32 bytes - const VALUE_LEN: usize, - >( - b: &mut CircuitBuilder, - node: &Array, - key: &MPTKeyWire, - rlp_headers: &RlpList, - ) -> (MPTKeyWire, Array, BoolTarget) { - let two = b.two(); - let condition = b.is_equal(rlp_headers.num_fields, two); - let key_header = RlpHeader { - data_type: rlp_headers.data_type[0], - offset: rlp_headers.offset[0], - len: rlp_headers.len[0], - }; - let (extracted_key, should_true) = decode_compact_encoding(b, node, &key_header); - // it's either the _value_ of the leaf, OR the _hash_ of the child node if node = ext. - let leaf_child_hash = node.extract_array::(b, rlp_headers.offset[1]); - // note we are going _backwards_ on the key, so we need to substract the expected key length - // we want to check against - let new_key = key.advance_by(b, extracted_key.real_len); - // NOTE: there is no need to check if the extracted_key is indeed a subvector of the full key - // in this case. Indeed, in leaf/ext. there is only one key possible. Since we decoded it - // from the beginning of the node, and that the hash of the node also starts at the beginning, - // either the attacker give the right node or it gives an invalid node and hashes will not - // match. - let condition = b.and(condition, should_true); - (new_key, leaf_child_hash, condition) - } + // Given we are reading the nibble from the key itself, we don't need to do + // any more checks on it. The key and pointer will be given by the verifier so + // attacker can't indicate a different nibble + let nibble = key.current_nibble(b); + + // we advance the pointer for the next iteration + let new_key = key.advance_by(b, one); + let nibble_header = rlp_headers.select(b, nibble); + let branch_child_hash = node.extract_array::(b, nibble_header.offset); + (new_key, branch_child_hash, branch_condition, nibble) } +/// Returns the key with the pointer moved, returns the child hash / value of the node, +/// and returns booleans that must be true IF the given node is a leaf or an extension. +pub fn advance_key_leaf_or_extension< + F: RichField + Extendable, + const D: usize, + const LIST_LEN: usize, + // in case of a leaf, the value can be up to 33 bytes because of additional RLP encoding + // in case of extension, the value is 32 bytes + const VALUE_LEN: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, +>( + b: &mut CircuitBuilder, + node: &Array, + key: &MPTKeyWireGeneric, + rlp_headers: &RlpList, +) -> ( + MPTKeyWireGeneric, + Array, + BoolTarget, +) { + let two = b.two(); + let condition = b.is_equal(rlp_headers.num_fields, two); + let key_header = RlpHeader { + data_type: rlp_headers.data_type[0], + offset: rlp_headers.offset[0], + len: rlp_headers.len[0], + }; + let (extracted_key, should_true) = + decode_compact_encoding::<_, _, _, KEY_LEN>(b, node, &key_header); + // it's either the _value_ of the leaf, OR the _hash_ of the child node if node = ext. + let leaf_child_hash = node.extract_array::(b, rlp_headers.offset[1]); + // note we are going _backwards_ on the key, so we need to substract the expected key length + // we want to check against + let new_key = key.advance_by(b, extracted_key.real_len); + // NOTE: there is no need to check if the extracted_key is indeed a subvector of the full key + // in this case. Indeed, in leaf/ext. there is only one key possible. Since we decoded it + // from the beginning of the node, and that the hash of the node also starts at the beginning, + // either the attacker give the right node or it gives an invalid node and hashes will not + // match. + let condition = b.and(condition, should_true); + (new_key, leaf_child_hash, condition) +} #[cfg(test)] mod test { use std::array::from_fn as create_array; @@ -428,31 +472,43 @@ mod test { use plonky2_crypto::u32::arithmetic_u32::U32Target; use rand::{thread_rng, RngCore}; - use crate::keccak::{HASH_LEN, PACKED_HASH_LEN}; - use crate::rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}; use crate::utils::{Endianness, PackerTarget}; use crate::{ array::Array, utils::{find_index_subvector, keccak256}, }; use crate::{eth::ProofQuery, C, D, F}; + use crate::{ + keccak::{HASH_LEN, PACKED_HASH_LEN}, + mpt_sequential::advance_key_leaf_or_extension, + }; + use crate::{ + mpt_sequential::advance_key_branch, + rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}, + }; use super::{ utils::{bytes_to_nibbles, nibbles_to_bytes, visit_node, visit_proof}, - Circuit, InputWires, MPTKeyWire, OutputWires, MAX_LEAF_VALUE_LEN, NB_ITEMS_LEAF, PAD_LEN, + Circuit, InputWires, MPTKeyWire, OutputWires, MAX_LEAF_VALUE_LEN, NB_ITEMS_LEAF, + NIBBLES_TO_BYTES, PAD_LEN, }; #[derive(Clone, Debug)] - struct TestCircuit { - c: Circuit, + struct TestCircuit< + const DEPTH: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, + const KEY_LEN_BYTES: usize = { NIBBLES_TO_BYTES(KEY_LEN) }, + > { + c: Circuit, exp_root: [u8; 32], exp_value: [u8; MAX_LEAF_VALUE_LEN], // The flag identifies if need to check the expected leaf value, it's // set to true for storage proof, and false for state proof (unconcern). checking_value: bool, } - impl UserCircuit - for TestCircuit + impl + UserCircuit for TestCircuit where F: RichField + Extendable, [(); PAD_LEN(NODE_LEN)]:, @@ -461,7 +517,7 @@ mod test { [(); HASH_LEN / 4]:, { type Wires = ( - InputWires, + InputWires, OutputWires, Array, // root Array, // value @@ -531,12 +587,16 @@ mod test { // Written as constant from ^ const DEPTH: usize = 2; const NODE_LEN: usize = 150; - verify_storage_proof_from_query::(&query, &res)?; + verify_storage_proof_from_query::(&query, &res)?; verify_state_proof_from_query(&query, &res) } /// Verify the storage proof from query result. - pub(crate) fn verify_storage_proof_from_query( + pub(crate) fn verify_storage_proof_from_query< + const DEPTH: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, + >( query: &ProofQuery, res: &EIP1186AccountProofResponse, ) -> Result<()> @@ -544,6 +604,7 @@ mod test { [(); PAD_LEN(NODE_LEN)]:, [(); DEPTH - 1]:, [(); PAD_LEN(NODE_LEN) / 4]:, + [(); NIBBLES_TO_BYTES(KEY_LEN)]:, { ProofQuery::verify_storage_proof(res)?; @@ -568,8 +629,8 @@ mod test { let u8idx = find_index_subvector(&mpt_proof[i], &child_hash); assert!(u8idx.is_some()); } - let circuit = TestCircuit:: { - c: Circuit::::new(mpt_key.try_into().unwrap(), mpt_proof), + let circuit = TestCircuit:: { + c: Circuit::::new(mpt_key.try_into().unwrap(), mpt_proof), exp_root: root.try_into().unwrap(), exp_value: encoded_value.try_into().unwrap(), checking_value: false, @@ -608,8 +669,11 @@ mod test { let u8idx = find_index_subvector(&mpt_proof[i], &child_hash); assert!(u8idx.is_some()); } - let circuit = TestCircuit:: { - c: Circuit::::new(mpt_key.try_into().unwrap(), mpt_proof), + let circuit = TestCircuit:: { + c: Circuit::::new( + mpt_key.try_into().unwrap(), + mpt_proof, + ), exp_root: root.try_into().unwrap(), exp_value: [0; MAX_LEAF_VALUE_LEN], // the reason we don't check the value is the circuit is made for storage proof and it extracts a 32bytes @@ -665,8 +729,8 @@ mod test { let u8idx = find_index_subvector(&proof[i], &child_hash); assert!(u8idx.is_some()); } - let circuit = TestCircuit:: { - c: Circuit::::new(key.try_into().unwrap(), proof), + let circuit = TestCircuit:: { + c: Circuit::::new(key.try_into().unwrap(), proof), exp_root: root, // simply pad it to max size exp_value: create_array(|i| if i < VALUE_LEN { value[i] } else { 0 }), @@ -753,7 +817,9 @@ mod test { let node = Array::::new(&mut b); let key_wire = MPTKeyWire::new(&mut b); let (advanced_key, value, valid_node) = - Circuit::::advance_key(&mut b, &node, &key_wire); + Circuit::::advance_key( + &mut b, &node, &key_wire, + ); b.connect(tr.target, valid_node.target); let exp_key_ptr = b.add_virtual_target(); b.connect(advanced_key.pointer, exp_key_ptr); @@ -864,12 +930,13 @@ mod test { let key_wire = MPTKeyWire::new(&mut builder); let rlp_headers = decode_fixed_list::(&mut builder, &node.arr, zero); - let (advanced_key, value, should_true, _) = Circuit::::advance_key_branch( - &mut builder, - &node, - &key_wire, - &rlp_headers, - ); + let (advanced_key, value, should_true, _) = + advance_key_branch::<_, _, NODE_LEN, MAX_KEY_NIBBLE_LEN>( + &mut builder, + &node, + &key_wire, + &rlp_headers, + ); builder.connect(tt.target, should_true.target); let exp_key_ptr = builder.add_virtual_target(); builder.connect(advanced_key.pointer, exp_key_ptr); @@ -935,7 +1002,7 @@ mod test { let key_wire = MPTKeyWire::new(&mut builder); let rlp_headers = decode_fixed_list::(&mut builder, &node.arr, zero); let (advanced_key, value, should_true) = - Circuit::::advance_key_leaf_or_extension( + advance_key_leaf_or_extension::<_, _, _, _, NODE_LEN, MAX_KEY_NIBBLE_LEN>( &mut builder, &node, &key_wire, diff --git a/mp2-common/src/rlp.rs b/mp2-common/src/rlp.rs index 741f9e38e..3c50eb8cc 100644 --- a/mp2-common/src/rlp.rs +++ b/mp2-common/src/rlp.rs @@ -58,11 +58,16 @@ impl RlpList { } } } -pub fn decode_compact_encoding, const D: usize, const N: usize>( +pub fn decode_compact_encoding< + F: RichField + Extendable, + const D: usize, + const N: usize, + const KEY_LEN: usize, +>( b: &mut CircuitBuilder, input: &Array, key_header: &RlpHeader, -) -> (VectorWire, BoolTarget) { +) -> (VectorWire, BoolTarget) { let zero = b.zero(); let two = b.two(); let first_byte = input.value_at(b, key_header.offset); @@ -71,7 +76,7 @@ pub fn decode_compact_encoding, const D: usize, con let mut prev_nibbles = (least_bits, most_bits); let mut cur_nibbles: (Target, Target); - let mut nibbles: [Target; MAX_KEY_NIBBLE_LEN] = [b.zero(); MAX_KEY_NIBBLE_LEN]; + let mut nibbles: [Target; KEY_LEN] = [b.zero(); KEY_LEN]; let first_nibble = prev_nibbles.0; let first_nibble_as_bits = num_to_bits(b, 4, first_nibble); @@ -92,7 +97,10 @@ pub fn decode_compact_encoding, const D: usize, con // during the first iteration of this loop. let one = b.one(); let mut i_offset = key_header.offset; - for i in 0..MAX_ENC_KEY_LEN - 1 { + + // We calculate how many times to run the foor loop, this is only depends on + // KEY_LEN, since we skip one byte it is just KEY_LEN / 2. + for i in 0..KEY_LEN / 2 { i_offset = b.add(i_offset, one); // look now at the encoded path let x = input.value_at(b, i_offset); @@ -355,7 +363,7 @@ mod tests { use crate::array::Array; use crate::rlp::{ decode_compact_encoding, decode_fixed_list, decode_header, RlpHeader, MAX_ENC_KEY_LEN, - MAX_LEN_BYTES, + MAX_KEY_NIBBLE_LEN, MAX_LEN_BYTES, }; use crate::utils::{keccak256, less_than_or_equal_to, IntTargetWriter}; use crate::{C, D, F}; @@ -792,7 +800,11 @@ mod tests { len: builder.constant(F::from_canonical_usize(tc.key_len)), data_type: builder.constant(F::from_canonical_usize(0)), }; - let (nibbles, cond) = decode_compact_encoding(&mut builder, &wire1, &key_header); + let (nibbles, cond) = decode_compact_encoding::<_, _, _, MAX_KEY_NIBBLE_LEN>( + &mut builder, + &wire1, + &key_header, + ); builder.assert_bool(cond); let exp_nib_len = builder.constant(F::from_canonical_usize(tc.expected.len())); builder.connect(nibbles.real_len, exp_nib_len); diff --git a/mp2-test/Cargo.toml b/mp2-test/Cargo.toml index e4fd7ddbb..a2341668d 100644 --- a/mp2-test/Cargo.toml +++ b/mp2-test/Cargo.toml @@ -13,6 +13,7 @@ plonky2.workspace = true plonky2_ecgfp5.workspace = true rand.workspace = true serde.workspace = true +tokio.workspace = true mp2_common = { path = "../mp2-common" } recursion_framework = { path = "../recursion-framework" } diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 97a64dfb2..d1e79caa1 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -1,6 +1,17 @@ +use alloy::{ + eips::BlockNumberOrTag, + node_bindings::Anvil, + primitives::U256, + providers::{ext::AnvilApi, Provider, ProviderBuilder, RootProvider, WalletProvider}, + rpc::types::Transaction, + sol, +}; use eth_trie::{EthTrie, MemoryDB, Trie}; + +use mp2_common::eth::{ReceiptProofInfo, ReceiptQuery}; use rand::{thread_rng, Rng}; use std::sync::Arc; +use tokio::task::JoinSet; /// Simply the maximum number of nibbles a key can have. const MAX_KEY_NIBBLE_LEN: usize = 64; @@ -39,3 +50,144 @@ pub fn generate_random_storage_mpt( } (trie, keys[right_key_idx].to_vec()) } + +/// This function is used so that we can generate a Receipt Trie for a blog with varying transactions +/// (i.e. some we are interested in and some we are not). +fn generate_receipt_proofs() -> Vec { + // Make a contract that emits events so we can pick up on them + sol! { + #[allow(missing_docs)] + // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780638381f58a14610058578063d09de08a14610076578063db73227914610080575b5f80fd5b61005661008a565b005b6100606100f8565b60405161006d9190610165565b60405180910390f35b61007e6100fd565b005b610088610115565b005b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100c06100fd565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100f66100fd565b565b5f5481565b5f8081548092919061010e906101ab565b9190505550565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a261014b6100fd565b565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212202787ca0f2ea71e118bc4d1bf239cde5ec4730aeb35a404c44e6c9d587316418564736f6c634300081a0033")] + contract EventEmitter { + uint256 public number; + event testEvent(uint256 indexed num); + + function testEmit() public { + emit testEvent(number); + increment(); + } + + function twoEmits() public { + emit testEvent(number); + increment(); + emit testEvent(number); + increment(); + } + + function increment() public { + number++; + } + } + } + + sol! { + #[allow(missing_docs)] + // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780637229db15146100585780638381f58a14610062578063d09de08a14610080575b5f80fd5b61005661008a565b005b6100606100f8565b005b61006a610130565b6040516100779190610165565b60405180910390f35b610088610135565b005b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100c0610135565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100f6610135565b565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a261012e610135565b565b5f5481565b5f80815480929190610146906101ab565b9190505550565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212203b7602644bfff2df89c2fe9498cd533326876859a0df7b96ac10be1fdc09c3a064736f6c634300081a0033")] + + contract OtherEmitter { + uint256 public number; + event otherEvent(uint256 indexed num); + + function otherEmit() public { + emit otherEvent(number); + increment(); + } + + function twoEmits() public { + emit otherEvent(number); + increment(); + emit otherEvent(number); + increment(); + } + + function increment() public { + number++; + } + } + } + + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(async { + // Spin up a local node. + + let rpc = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_wallet_and_config(|a| Anvil::block_time(a, 1)); + + // Deploy the contract using anvil + let event_contract = EventEmitter::deploy(rpc.clone()).await.unwrap(); + + // Deploy the contract using anvil + let other_contract = OtherEmitter::deploy(rpc.clone()).await.unwrap(); + + let address = rpc.default_signer_address(); + rpc.anvil_set_nonce(address, U256::from(0)).await.unwrap(); + let tx_reqs = (0..25) + .map(|i| match i % 4 { + 0 => event_contract + .testEmit() + .into_transaction_request() + .nonce(i as u64), + 1 => event_contract + .twoEmits() + .into_transaction_request() + .nonce(i as u64), + 2 => other_contract + .otherEmit() + .into_transaction_request() + .nonce(i as u64), + 3 => other_contract + .twoEmits() + .into_transaction_request() + .nonce(i as u64), + _ => unreachable!(), + }) + .collect::>(); + let mut join_set = JoinSet::new(); + tx_reqs.into_iter().for_each(|tx_req| { + let rpc_clone = rpc.clone(); + join_set.spawn(async move { + rpc_clone + .send_transaction(tx_req) + .await + .unwrap() + .watch() + .await + .unwrap() + }); + }); + + let hashes = join_set.join_all().await; + let mut transactions = Vec::new(); + for hash in hashes.into_iter() { + transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); + } + + let block_number = transactions.first().unwrap().block_number.unwrap(); + + // We want to get the event signature so we can make a ReceiptQuery + let all_events = EventEmitter::abi::events(); + + let events = all_events.get("testEvent").unwrap(); + let receipt_query = ReceiptQuery::new(*event_contract.address(), events[0].clone()); + + receipt_query + .query_receipt_proofs(&rpc.root(), BlockNumberOrTag::Number(block_number)) + .await + .unwrap() + }) +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn tester() { + let receipt_proofs = generate_receipt_proofs(); + for proof in receipt_proofs.iter() { + println!("proof: {}", proof.tx_index); + } + } +} diff --git a/mp2-v1/src/contract_extraction/branch.rs b/mp2-v1/src/contract_extraction/branch.rs index ff27d2147..b78e7edfa 100644 --- a/mp2-v1/src/contract_extraction/branch.rs +++ b/mp2-v1/src/contract_extraction/branch.rs @@ -5,7 +5,7 @@ use anyhow::Result; use mp2_common::{ array::{Array, Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires, PACKED_HASH_LEN}, - mpt_sequential::{Circuit as MPTCircuit, PAD_LEN}, + mpt_sequential::{advance_key_branch, PAD_LEN}, public_inputs::PublicInputCommon, rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST}, types::{CBuilder, GFp}, @@ -54,12 +54,14 @@ where // validity of the hash exposed by the proofs. let headers = decode_fixed_list::<_, D, MAX_ITEMS_IN_LIST>(b, &node.arr.arr, zero); - let (new_mpt_key, hash, is_valid, _) = MPTCircuit::<1, NODE_LEN>::advance_key_branch( - b, - &node.arr, - &child_proof.mpt_key(), - &headers, - ); + let (new_mpt_key, hash, is_valid, _) = + // MPTCircuit::<1, NODE_LEN, MAX_KEY_NIBBLE_LEN> + advance_key_branch( + b, + &node.arr, + &child_proof.mpt_key(), + &headers, + ); // We always enforce it's a branch node, i.e. that it has 17 entries. b.connect(is_valid.target, ttrue.target); @@ -111,7 +113,7 @@ where _builder_parameters: Self::CircuitBuilderParams, ) -> Self { let inputs = PublicInputs::from_slice(&verified_proofs[0].public_inputs); - BranchCircuit::build(builder, inputs) + BranchCircuit::<_>::build(builder, inputs) } fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> Result<()> { diff --git a/mp2-v1/src/length_extraction/branch.rs b/mp2-v1/src/length_extraction/branch.rs index 157f0b590..680ecdcba 100644 --- a/mp2-v1/src/length_extraction/branch.rs +++ b/mp2-v1/src/length_extraction/branch.rs @@ -5,9 +5,9 @@ use core::array; use mp2_common::{ array::{Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires, PACKED_HASH_LEN}, - mpt_sequential::Circuit as MPTCircuit, + mpt_sequential::advance_key_branch, public_inputs::PublicInputCommon, - rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST}, + rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}, types::{CBuilder, GFp}, utils::{Endianness, PackerTarget}, D, @@ -79,7 +79,9 @@ impl BranchLengthCircuit { let key = child_proof.mpt_key_wire(); let (key, hash, is_branch, _) = - MPTCircuit::<1, MAX_BRANCH_NODE_LEN>::advance_key_branch(cb, &node.arr, &key, &headers); + advance_key_branch::<_, D, MAX_BRANCH_NODE_LEN, MAX_KEY_NIBBLE_LEN>( + cb, &node.arr, &key, &headers, + ); // asserts this is a branch node cb.assert_one(is_branch.target); diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index 3bd35ba72..547290b0f 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -25,6 +25,7 @@ pub mod final_extraction; pub mod indexing; pub mod length_extraction; pub mod query; +pub mod receipt_extraction; pub mod values_extraction; #[cfg(test)] diff --git a/mp2-v1/src/receipt_extraction/leaf.rs b/mp2-v1/src/receipt_extraction/leaf.rs new file mode 100644 index 000000000..f7c99d8a7 --- /dev/null +++ b/mp2-v1/src/receipt_extraction/leaf.rs @@ -0,0 +1,510 @@ +//! Module handling the leaf node inside a Receipt Trie + +use super::public_inputs::PublicInputArgs; + +use mp2_common::{ + array::{Array, Vector, VectorWire}, + eth::{EventLogInfo, LogDataInfo, ReceiptProofInfo}, + group_hashing::CircuitBuilderGroupHashing, + keccak::{InputData, KeccakCircuit, KeccakWires}, + mpt_sequential::{ + MPTLeafOrExtensionNodeGeneric, ReceiptKeyWire, MAX_RECEIPT_LEAF_VALUE_LEN, + MAX_TX_KEY_NIBBLE_LEN, PAD_LEN, + }, + poseidon::H, + public_inputs::PublicInputCommon, + types::{CBuilder, GFp}, + utils::{Endianness, PackerTarget}, + D, F, +}; +use plonky2::{ + field::types::Field, + iop::{ + target::Target, + witness::{PartialWitness, WitnessWrite}, + }, +}; + +use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; + +use rlp::Encodable; +use serde::{Deserialize, Serialize}; + +/// Maximum number of logs per transaction we can process +const MAX_LOGS_PER_TX: usize = 2; + +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub struct ReceiptLeafWires +where + [(); PAD_LEN(NODE_LEN)]:, +{ + /// The event we are monitoring for + pub event: EventWires, + /// The node bytes + pub node: VectorWire, + /// The actual value stored in the node + pub value: Array, + /// the hash of the node bytes + pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + /// The offset of the status of the transaction in the RLP encoded receipt node. + pub status_offset: Target, + /// The offsets of the relevant logs inside the node + pub relevant_logs_offset: VectorWire, + /// The key in the MPT Trie + pub mpt_key: ReceiptKeyWire, +} + +/// Contains all the information for an [`Event`] in rlp form +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct EventWires { + /// Size in bytes of the whole event + size: Target, + /// Packed contract address to check + address: Array, + /// Byte offset for the address from the beginning of a Log + add_rel_offset: Target, + /// Packed event signature, + event_signature: Array, + /// Byte offset from the start of the log to event signature + sig_rel_offset: Target, + /// The topics for this Log + topics: [LogColumn; 3], + /// The extra data stored by this Log + data: [LogColumn; 2], +} + +/// Contains all the information for a [`Log`] in rlp form +#[derive(Debug, Clone, Serialize, Deserialize, Copy, PartialEq, Eq)] +pub struct LogColumn { + column_id: Target, + /// The byte offset from the beggining of the log to this target + rel_byte_offset: Target, + /// The length of this topic/data + len: Target, +} + +impl LogColumn { + /// Convert to an array for metadata digest + pub fn to_array(&self) -> [Target; 3] { + [self.column_id, self.rel_byte_offset, self.len] + } + + /// Assigns a log colum from a [`LogDataInfo`] + pub fn assign(&self, pw: &mut PartialWitness, data: LogDataInfo) { + pw.set_target(self.column_id, F::from_canonical_usize(data.column_id)); + pw.set_target( + self.rel_byte_offset, + F::from_canonical_usize(data.rel_byte_offset), + ); + pw.set_target(self.len, F::from_canonical_usize(data.len)); + } +} + +impl EventWires { + /// Convert to an array for metadata digest + pub fn to_slice(&self) -> [Target; 70] { + let topics_flat = self + .topics + .iter() + .flat_map(|t| t.to_array()) + .collect::>(); + let data_flat = self + .data + .iter() + .flat_map(|t| t.to_array()) + .collect::>(); + let mut out = [Target::default(); 70]; + out[0] = self.size; + out.iter_mut() + .skip(1) + .take(20) + .enumerate() + .for_each(|(i, entry)| *entry = self.address.arr[i]); + out[21] = self.add_rel_offset; + out.iter_mut() + .skip(22) + .take(32) + .enumerate() + .for_each(|(i, entry)| *entry = self.event_signature.arr[i]); + out[54] = self.sig_rel_offset; + out.iter_mut() + .skip(55) + .take(9) + .enumerate() + .for_each(|(i, entry)| *entry = topics_flat[i]); + out.iter_mut() + .skip(64) + .take(6) + .enumerate() + .for_each(|(i, entry)| *entry = data_flat[i]); + out + } + + pub fn verify_logs_and_extract_values( + &self, + b: &mut CBuilder, + value: &Array, + status_offset: Target, + relevant_logs_offsets: &VectorWire, + ) -> CurveTarget { + let t = b._true(); + let zero = b.zero(); + let curve_zero = b.curve_zero(); + let mut value_digest = b.curve_zero(); + + // Enforce status is true. + let status = value.random_access_large_array(b, status_offset); + b.connect(status, t.target); + + for log_offset in relevant_logs_offsets.arr.arr { + // Extract the address bytes + let address_start = b.add(log_offset, self.add_rel_offset); + + let address_bytes = value.extract_array_large::<_, _, 20>(b, address_start); + + let address_check = address_bytes.equals(b, &self.address); + // Extract the signature bytes + let sig_start = b.add(log_offset, self.sig_rel_offset); + + let sig_bytes = value.extract_array_large::<_, _, 32>(b, sig_start); + + let sig_check = sig_bytes.equals(b, &self.event_signature); + + // We check to see if the relevant log offset is zero (this indicates a dummy value) + let dummy = b.is_equal(log_offset, zero); + + let address_to_enforce = b.select(dummy, t.target, address_check.target); + let sig_to_enforce = b.select(dummy, t.target, sig_check.target); + + b.connect(t.target, address_to_enforce); + b.connect(t.target, sig_to_enforce); + + for &log_column in self.topics.iter().chain(self.data.iter()) { + let data_start = b.add(log_offset, log_column.rel_byte_offset); + // The data is always 32 bytes long + let data_bytes = value.extract_array_large::<_, _, 32>(b, data_start); + + // Pack the data and get the digest + let packed_data = data_bytes.arr.pack(b, Endianness::Big); + let data_digest = b.map_to_curve_point( + &std::iter::once(log_column.column_id) + .chain(packed_data) + .collect::>(), + ); + + // For each column we use the `column_id` field to tell if its a dummy or not, zero indicates a dummy. + let dummy_column = b.is_equal(log_column.column_id, zero); + let selector = b.and(dummy_column, dummy); + + let selected_point = b.select_curve_point(selector, curve_zero, data_digest); + value_digest = b.add_curve_point(&[selected_point, value_digest]); + } + } + + value_digest + } +} + +/// Circuit to prove the correct derivation of the MPT key from a simple slot +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReceiptLeafCircuit { + pub(crate) info: ReceiptProofInfo, +} + +impl ReceiptLeafCircuit +where + [(); PAD_LEN(NODE_LEN)]:, +{ + pub fn build_leaf_wires(b: &mut CBuilder) -> ReceiptLeafWires { + // Build the event wires + let event_wires = Self::build_event_wires(b); + + // Add targets for the data specific to this receipt + let index = b.add_virtual_target(); + let status_offset = b.add_virtual_target(); + let relevant_logs_offset = VectorWire::::new(b); + + let mpt_key = ReceiptKeyWire::new(b); + + // Build the node wires. + let wires = MPTLeafOrExtensionNodeGeneric::build_and_advance_key::< + _, + D, + NODE_LEN, + MAX_RECEIPT_LEAF_VALUE_LEN, + >(b, &mpt_key); + let node = wires.node; + let root = wires.root; + + // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for + let receipt_body = wires.value; + let mut dv = event_wires.verify_logs_and_extract_values( + b, + &receipt_body, + status_offset, + &relevant_logs_offset, + ); + let value_id = b.map_to_curve_point(&[index]); + dv = b.add_curve_point(&[value_id, dv]); + + let dm = b.hash_n_to_hash_no_pad::(event_wires.to_slice().to_vec()); + + // Register the public inputs + PublicInputArgs { + h: &root.output_array, + k: &wires.key, + dv, + dm, + } + .register_args(b); + + ReceiptLeafWires { + event: event_wires, + node, + value: receipt_body, + root, + status_offset, + relevant_logs_offset, + mpt_key, + } + } + + fn build_event_wires(b: &mut CBuilder) -> EventWires { + let size = b.add_virtual_target(); + + // Packed address + let arr = [b.add_virtual_target(); 20]; + let address = Array::from_array(arr); + + // relative offset of the address + let add_rel_offset = b.add_virtual_target(); + + // Event signature + let arr = [b.add_virtual_target(); 32]; + let event_signature = Array::from_array(arr); + + // Signature relative offset + let sig_rel_offset = b.add_virtual_target(); + + // topics + let topics = [Self::build_log_column(b); 3]; + + // data + let data = [Self::build_log_column(b); 2]; + + EventWires { + size, + address, + add_rel_offset, + event_signature, + sig_rel_offset, + topics, + data, + } + } + + fn build_log_column(b: &mut CBuilder) -> LogColumn { + let column_id = b.add_virtual_target(); + let rel_byte_offset = b.add_virtual_target(); + let len = b.add_virtual_target(); + + LogColumn { + column_id, + rel_byte_offset, + len, + } + } + + pub fn assign(&self, pw: &mut PartialWitness, wires: &ReceiptLeafWires) { + self.assign_event_wires(pw, &wires.event); + + let node = self + .info + .mpt_proof + .last() + .expect("Receipt MPT proof had no nodes"); + let pad_node = + Vector::::from_vec(node).expect("invalid node given"); + wires.node.assign(pw, &pad_node); + KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::assign( + pw, + &wires.root, + &InputData::Assigned(&pad_node), + ); + + pw.set_target( + wires.status_offset, + GFp::from_canonical_usize(self.info.status_offset), + ); + + let relevant_logs_vector = + Vector::::from_vec(&self.info.relevant_logs_offset) + .expect("Could not assign relevant logs offsets"); + wires.relevant_logs_offset.assign(pw, &relevant_logs_vector); + + let key_encoded = self.info.tx_index.rlp_bytes(); + let nibbles = key_encoded + .iter() + .flat_map(|byte| [byte / 16, byte % 16]) + .collect::>(); + + let mut key_nibbles = [0u8; MAX_TX_KEY_NIBBLE_LEN]; + key_nibbles + .iter_mut() + .enumerate() + .for_each(|(index, nibble)| { + if index < nibbles.len() { + *nibble = nibbles[index] + } + }); + + wires.mpt_key.assign(pw, &key_nibbles, self.info.index_size); + } + + pub fn assign_event_wires(&self, pw: &mut PartialWitness, wires: &EventWires) { + let EventLogInfo { + size, + address, + add_rel_offset, + event_signature, + sig_rel_offset, + topics, + data, + } = self.info.event_log_info; + + pw.set_target(wires.size, F::from_canonical_usize(size)); + + wires + .address + .assign(pw, &address.0.map(|byte| GFp::from_canonical_u8(byte))); + + pw.set_target( + wires.add_rel_offset, + F::from_canonical_usize(add_rel_offset), + ); + + wires.event_signature.assign( + pw, + &event_signature.map(|byte| GFp::from_canonical_u8(byte)), + ); + + pw.set_target( + wires.sig_rel_offset, + F::from_canonical_usize(sig_rel_offset), + ); + + wires + .topics + .iter() + .zip(topics.into_iter()) + .for_each(|(topic_wire, topic)| topic_wire.assign(pw, topic)); + wires + .data + .iter() + .zip(data.into_iter()) + .for_each(|(data_wire, data)| data_wire.assign(pw, data)); + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[derive(Clone, Debug)] + struct TestReceiptLeafCircuit { + c: ReceiptLeafCircuit, + exp_value: Vec, + } + + impl UserCircuit for TestReceiptLeafCircuit + where + [(); PAD_LEN(NODE_LEN)]:, + { + // Leaf wires + expected extracted value + type Wires = ( + ReceiptLeafWires, + Array, + ); + + fn build(b: &mut CircuitBuilder) -> Self::Wires { + let exp_value = Array::::new(b); + + let leaf_wires = ReceiptLeafCircuit::::build(b); + leaf_wires.value.enforce_equal(b, &exp_value); + + (leaf_wires, exp_value) + } + + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + self.c.assign(pw, &wires.0); + wires + .1 + .assign_bytes(pw, &self.exp_value.clone().try_into().unwrap()); + } + } + #[test] + fn test_leaf_circuit() { + const NODE_LEN: usize = 80; + + let simple_slot = 2_u8; + let slot = StorageSlot::Simple(simple_slot as usize); + let contract_address = Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(); + let chain_id = 10; + let id = identifier_single_var_column(simple_slot, &contract_address, chain_id, vec![]); + + let (mut trie, _) = generate_random_storage_mpt::<3, MAPPING_LEAF_VALUE_LEN>(); + let value = random_vector(MAPPING_LEAF_VALUE_LEN); + let encoded_value: Vec = rlp::encode(&value).to_vec(); + // assert we added one byte of RLP header + assert_eq!(encoded_value.len(), MAPPING_LEAF_VALUE_LEN + 1); + println!("encoded value {:?}", encoded_value); + trie.insert(&slot.mpt_key(), &encoded_value).unwrap(); + trie.root_hash().unwrap(); + + let proof = trie.get_proof(&slot.mpt_key_vec()).unwrap(); + let node = proof.last().unwrap().clone(); + + let c = LeafSingleCircuit:: { + node: node.clone(), + slot: SimpleSlot::new(simple_slot), + id, + }; + let test_circuit = TestLeafSingleCircuit { + c, + exp_value: value.clone(), + }; + + let proof = run_circuit::(test_circuit); + let pi = PublicInputs::new(&proof.public_inputs); + + { + let exp_hash = keccak256(&node).pack(Endianness::Little); + assert_eq!(pi.root_hash(), exp_hash); + } + { + let (key, ptr) = pi.mpt_key_info(); + + let exp_key = slot.mpt_key_vec(); + let exp_key: Vec<_> = bytes_to_nibbles(&exp_key) + .into_iter() + .map(F::from_canonical_u8) + .collect(); + assert_eq!(key, exp_key); + + let leaf_key: Vec> = rlp::decode_list(&node); + let nib = Nibbles::from_compact(&leaf_key[0]); + let exp_ptr = F::from_canonical_usize(MAX_KEY_NIBBLE_LEN - 1 - nib.nibbles().len()); + assert_eq!(exp_ptr, ptr); + } + // Check values digest + { + let exp_digest = compute_leaf_single_values_digest(id, &value); + assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); + } + // Check metadata digest + { + let exp_digest = compute_leaf_single_metadata_digest(id, simple_slot); + assert_eq!(pi.metadata_digest(), exp_digest.to_weierstrass()); + } + assert_eq!(pi.n(), F::ONE); + } +} \ No newline at end of file diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs new file mode 100644 index 000000000..6c3803e08 --- /dev/null +++ b/mp2-v1/src/receipt_extraction/mod.rs @@ -0,0 +1,2 @@ +pub mod leaf; +pub mod public_inputs; diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs new file mode 100644 index 000000000..901fc0b29 --- /dev/null +++ b/mp2-v1/src/receipt_extraction/public_inputs.rs @@ -0,0 +1,76 @@ +//! Public inputs for Receipt Extraction circuits + +use mp2_common::{ + keccak::{OutputHash, PACKED_HASH_LEN}, + mpt_sequential::ReceiptKeyWire, + public_inputs::{PublicInputCommon, PublicInputRange}, + types::{CBuilder, CURVE_TARGET_LEN}, +}; +use plonky2::hash::hash_types::{HashOutTarget, NUM_HASH_OUT_ELTS}; + +use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; + +/// The maximum length of a transaction index in a block in nibbles. +/// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 3 bytes to represent. +const MAX_INDEX_NIBBLES: usize = 6; +// Contract extraction public Inputs: +/// - `H : [8]F` : packed node hash +const H_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; +/// - `K : [6]F` : Length of the transaction index in nibbles +const K_RANGE: PublicInputRange = H_RANGE.end..H_RANGE.end + MAX_INDEX_NIBBLES; +/// `T : F` pointer in the MPT indicating portion of the key already traversed (from 6 → 0) +const T_RANGE: PublicInputRange = K_RANGE.end..K_RANGE.end + 1; +/// - `DV : Digest[F]` : value digest of all rows to extract +const DV_RANGE: PublicInputRange = T_RANGE.end..T_RANGE.end + CURVE_TARGET_LEN; +/// - `DM : Digest[F]` : metadata digest to extract +const DM_RANGE: PublicInputRange = DV_RANGE.end..DV_RANGE.end + NUM_HASH_OUT_ELTS; + +/// Public inputs for contract extraction +#[derive(Clone, Debug)] +pub struct PublicInputArgs<'a> { + /// The hash of the node + pub(crate) h: &'a OutputHash, + /// The MPT key + pub(crate) k: &'a ReceiptKeyWire, + /// Digest of the values + pub(crate) dv: CurveTarget, + /// The poseidon hash of the metadata + pub(crate) dm: HashOutTarget, +} + +impl<'a> PublicInputCommon for PublicInputArgs<'a> { + const RANGES: &'static [PublicInputRange] = &[H_RANGE, K_RANGE, T_RANGE, DV_RANGE, DM_RANGE]; + + fn register_args(&self, cb: &mut CBuilder) { + self.generic_register_args(cb) + } +} + +impl<'a> PublicInputArgs<'a> { + /// Create a new public inputs. + pub fn new( + h: &'a OutputHash, + k: &'a ReceiptKeyWire, + dv: CurveTarget, + dm: HashOutTarget, + ) -> Self { + Self { h, k, dv, dm } + } +} + +impl<'a> PublicInputArgs<'a> { + pub fn generic_register_args(&self, cb: &mut CBuilder) { + self.h.register_as_public_input(cb); + self.k.register_as_input(cb); + cb.register_curve_public_input(self.dv); + cb.register_public_inputs(&self.dm.elements); + } + + pub fn digest_value(&self) -> CurveTarget { + self.dv + } + + pub fn digest_metadata(&self) -> HashOutTarget { + self.dm + } +} diff --git a/mp2-v1/src/values_extraction/branch.rs b/mp2-v1/src/values_extraction/branch.rs index 8b713129f..ec85c487c 100644 --- a/mp2-v1/src/values_extraction/branch.rs +++ b/mp2-v1/src/values_extraction/branch.rs @@ -6,9 +6,10 @@ use mp2_common::{ array::{Array, Vector, VectorWire}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN, PACKED_HASH_LEN}, - mpt_sequential::{Circuit as MPTCircuit, MPTKeyWire, PAD_LEN}, + mpt_sequential::{advance_key_branch, MPTKeyWire, NIBBLES_TO_BYTES, PAD_LEN}, public_inputs::PublicInputCommon, - rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST}, + rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}, + serialization::{deserialize, serialize}, types::{CBuilder, GFp}, utils::{less_than, Endianness, PackerTarget}, D, @@ -56,7 +57,10 @@ where pub fn build( b: &mut CBuilder, inputs: &[PublicInputs; N_CHILDREN], - ) -> BranchWires { + ) -> BranchWires + where + [(); NIBBLES_TO_BYTES(MAX_KEY_NIBBLE_LEN)]:, + { let zero = b.zero(); let one = b.one(); let ttrue = b._true(); @@ -114,7 +118,7 @@ where let child_key = proof_inputs.mpt_key(); let (_, hash, is_valid, nibble) = - MPTCircuit::<1, NODE_LEN>::advance_key_branch(b, &node.arr, &child_key, &headers); + advance_key_branch(b, &node.arr, &child_key, &headers); // We always enforce it's a branch node, i.e. that it has 17 entries. b.connect(is_valid.target, ttrue.target); diff --git a/rustc-ice-2024-11-04T12_36_50-74186.txt b/rustc-ice-2024-11-04T12_36_50-74186.txt new file mode 100644 index 000000000..d48781bb7 --- /dev/null +++ b/rustc-ice-2024-11-04T12_36_50-74186.txt @@ -0,0 +1,63 @@ +thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: +const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] +stack backtrace: + 0: 0x11209ec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea + 1: 0x10ff1b468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call + 2: 0x1120b9608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a + 3: 0x1120b9260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 + 4: 0x1120b6e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 + 5: 0x1120b8f24 - _rust_begin_unwind + 6: 0x1147a7ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 + 7: 0x1148ddc1c - >::const_param_out_of_range + 8: 0x110de5ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const + 9: 0x110db651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> + 10: 0x110daa120 - >::super_fold_with::> + 11: 0x110cf9f18 - >::super_fold_with::> + 12: 0x110d70d94 - <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 13: 0x110cf7c2c - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 14: 0x110cf73b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 15: 0x110df372c - >::try_fold_with::> + 16: 0x110dc5a1c - ::instantiate_into + 17: 0x111cc9848 - ::nominal_obligations + 18: 0x111cc8710 - >::visit_const + 19: 0x111cc7b58 - >::visit_ty + 20: 0x111cc5db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations + 21: 0x111e3b15c - ::process_obligation + 22: 0x111e1c724 - >::process_obligations:: + 23: 0x111e383c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible + 24: 0x111c66608 - >::assumed_wf_types_and_report_errors + 25: 0x110376c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed + 26: 0x11160ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 27: 0x1117112e0 - >::call_once + 28: 0x1115abf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 29: 0x111788630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace + 30: 0x11036a5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> + 31: 0x11037d898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf + 32: 0x11160ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 33: 0x111711048 - >::call_once + 34: 0x11156cf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 35: 0x111775ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace + 36: 0x11036534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> + 37: 0x11041513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate + 38: 0x1108bb918 - rustc_interface[6b7e568f89869ca2]::passes::analysis + 39: 0x11160e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 40: 0x1116b2cf0 - >::call_once + 41: 0x11152ae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 42: 0x1117636ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace + 43: 0x10ff66ee0 - ::enter::> + 44: 0x10ff34448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 45: 0x10ff81978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> + 46: 0x10ff7e0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 47: 0x10ff7edb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} + 48: 0x1120c3a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 + 49: 0x18b24ef94 - __pthread_joiner_wake + + +rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) +platform: aarch64-apple-darwin + +query stack during panic: +#0 [check_well_formed] checking that `mpt_sequential::` is well-formed +#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` +#2 [analysis] running analysis passes on this crate +end of query stack diff --git a/rustc-ice-2024-11-04T12_37_01-74253.txt b/rustc-ice-2024-11-04T12_37_01-74253.txt new file mode 100644 index 000000000..6bcecf0f7 --- /dev/null +++ b/rustc-ice-2024-11-04T12_37_01-74253.txt @@ -0,0 +1,62 @@ +thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: +const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] +stack backtrace: + 0: 0x110a2ec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea + 1: 0x10e8ab468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call + 2: 0x110a49608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a + 3: 0x110a49260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 + 4: 0x110a46e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 + 5: 0x110a48f24 - _rust_begin_unwind + 6: 0x113137ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 + 7: 0x11326dc1c - >::const_param_out_of_range + 8: 0x10f775ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const + 9: 0x10f74651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> + 10: 0x10f73a120 - >::super_fold_with::> + 11: 0x10f689f18 - >::super_fold_with::> + 12: 0x10f687ca0 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 13: 0x10f6873b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 14: 0x10f78372c - >::try_fold_with::> + 15: 0x10f755a1c - ::instantiate_into + 16: 0x110659848 - ::nominal_obligations + 17: 0x110658710 - >::visit_const + 18: 0x110657b58 - >::visit_ty + 19: 0x110655db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations + 20: 0x1107cb15c - ::process_obligation + 21: 0x1107ac724 - >::process_obligations:: + 22: 0x1107c83c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible + 23: 0x1105f6608 - >::assumed_wf_types_and_report_errors + 24: 0x10ed06c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed + 25: 0x10ff9ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 26: 0x1100a12e0 - >::call_once + 27: 0x10ff3bf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 28: 0x110118630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace + 29: 0x10ecfa5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> + 30: 0x10ed0d898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf + 31: 0x10ff9ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 32: 0x1100a1048 - >::call_once + 33: 0x10fefcf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 34: 0x110105ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace + 35: 0x10ecf534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> + 36: 0x10eda513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate + 37: 0x10f24b918 - rustc_interface[6b7e568f89869ca2]::passes::analysis + 38: 0x10ff9e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 39: 0x110042cf0 - >::call_once + 40: 0x10febae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 41: 0x1100f36ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace + 42: 0x10e8f6ee0 - ::enter::> + 43: 0x10e8c4448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 44: 0x10e911978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> + 45: 0x10e90e0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 46: 0x10e90edb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} + 47: 0x110a53a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 + 48: 0x18b24ef94 - __pthread_joiner_wake + + +rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) +platform: aarch64-apple-darwin + +query stack during panic: +#0 [check_well_formed] checking that `mpt_sequential::` is well-formed +#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` +#2 [analysis] running analysis passes on this crate +end of query stack diff --git a/rustc-ice-2024-11-04T12_37_13-74307.txt b/rustc-ice-2024-11-04T12_37_13-74307.txt new file mode 100644 index 000000000..6eb26635b --- /dev/null +++ b/rustc-ice-2024-11-04T12_37_13-74307.txt @@ -0,0 +1,62 @@ +thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: +const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] +stack backtrace: + 0: 0x10e1cec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea + 1: 0x10c04b468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call + 2: 0x10e1e9608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a + 3: 0x10e1e9260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 + 4: 0x10e1e6e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 + 5: 0x10e1e8f24 - _rust_begin_unwind + 6: 0x1108d7ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 + 7: 0x110a0dc1c - >::const_param_out_of_range + 8: 0x10cf15ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const + 9: 0x10cee651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> + 10: 0x10ceda120 - >::super_fold_with::> + 11: 0x10ce29f18 - >::super_fold_with::> + 12: 0x10ce27ca0 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 13: 0x10ce273b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 14: 0x10cf2372c - >::try_fold_with::> + 15: 0x10cef5a1c - ::instantiate_into + 16: 0x10ddf9848 - ::nominal_obligations + 17: 0x10ddf8710 - >::visit_const + 18: 0x10ddf7b58 - >::visit_ty + 19: 0x10ddf5db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations + 20: 0x10df6b15c - ::process_obligation + 21: 0x10df4c724 - >::process_obligations:: + 22: 0x10df683c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible + 23: 0x10dd96608 - >::assumed_wf_types_and_report_errors + 24: 0x10c4a6c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed + 25: 0x10d73ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 26: 0x10d8412e0 - >::call_once + 27: 0x10d6dbf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 28: 0x10d8b8630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace + 29: 0x10c49a5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> + 30: 0x10c4ad898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf + 31: 0x10d73ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 32: 0x10d841048 - >::call_once + 33: 0x10d69cf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 34: 0x10d8a5ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace + 35: 0x10c49534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> + 36: 0x10c54513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate + 37: 0x10c9eb918 - rustc_interface[6b7e568f89869ca2]::passes::analysis + 38: 0x10d73e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 39: 0x10d7e2cf0 - >::call_once + 40: 0x10d65ae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 41: 0x10d8936ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace + 42: 0x10c096ee0 - ::enter::> + 43: 0x10c064448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 44: 0x10c0b1978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> + 45: 0x10c0ae0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 46: 0x10c0aedb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} + 47: 0x10e1f3a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 + 48: 0x18b24ef94 - __pthread_joiner_wake + + +rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) +platform: aarch64-apple-darwin + +query stack during panic: +#0 [check_well_formed] checking that `mpt_sequential::` is well-formed +#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` +#2 [analysis] running analysis passes on this crate +end of query stack From 4261eabc6ad4e8288320c30abceb35b526d74a39 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Fri, 8 Nov 2024 13:36:46 +0000 Subject: [PATCH 06/47] Receipt Leaf Circuit added with tests --- mp2-common/src/array.rs | 8 +- mp2-common/src/eth.rs | 217 ++++++++++------- mp2-common/src/group_hashing/mod.rs | 2 - .../src/mpt_sequential/leaf_or_extension.rs | 62 ++++- mp2-common/src/mpt_sequential/mod.rs | 42 +++- mp2-test/src/circuit.rs | 99 ++++++++ mp2-test/src/mpt_sequential.rs | 17 +- mp2-v1/src/lib.rs | 1 + mp2-v1/src/receipt_extraction/leaf.rs | 221 ++++++++---------- mp2-v1/src/receipt_extraction/mod.rs | 29 +++ .../src/receipt_extraction/public_inputs.rs | 120 ++++++++-- mp2-v1/src/values_extraction/api.rs | 2 +- 12 files changed, 564 insertions(+), 256 deletions(-) diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index 7561f1679..27f99d6a5 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -643,7 +643,7 @@ where let (low_bits, high_bits) = b.split_low_high(at, 6, 12); // Search each of the smaller arrays for the target at `low_bits` - let first_search = arrays + let mut first_search = arrays .into_iter() .map(|array| { b.random_access( @@ -657,6 +657,10 @@ where }) .collect::>(); + // Now we push a number of zero targets into the array to make it a power of 2 + let next_power_of_two = first_search.len().next_power_of_two(); + let zero_target = b.zero(); + first_search.resize(next_power_of_two, zero_target); // Serach the result for the Target at `high_bits` T::from_target(b.random_access(high_bits, first_search)) } @@ -688,7 +692,7 @@ where let i_target = b.constant(F::from_canonical_usize(i)); let i_plus_n_target = b.add(at, i_target); - // out_val = arr[((i+n)<=n+M) * (i+n)] + self.random_access_large_array(b, i_plus_n_target) }), } diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 7be9e9999..54864d74d 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -572,23 +572,42 @@ impl ReceiptQuery { .into_iter() .map(|index| { let key = index.rlp_bytes(); + let index_size = key.len(); - let proof = block_util.receipts_trie.get_proof(&key)?; + + let proof = block_util.receipts_trie.get_proof(&key[..])?; + + // Since the compact encoding of the key is stored first plus an additional list header and + // then the first element in the receipt body is the transaction type we calculate the offset to that point + + let last_node = proof.last().ok_or(eth_trie::TrieError::DB( + "Could not get last node in proof".to_string(), + ))?; + + let list_length_hint = last_node[0] as usize - 247; + let key_length = if last_node[1 + list_length_hint] > 128 { + last_node[1 + list_length_hint] as usize - 128 + } else { + 0 + }; + let body_length_hint = last_node[2 + list_length_hint + key_length] as usize - 183; + let body_offset = 4 + list_length_hint + key_length + body_length_hint; + let receipt = block_util.txs[index as usize].receipt(); - let rlp_body = receipt.encoded_2718(); - // Skip the first byte as it refers to the transaction type - let length_hint = rlp_body[1] as usize - 247; - let status_offset = 2 + length_hint; - let gas_hint = rlp_body[3 + length_hint] as usize - 128; + let body_length_hint = last_node[body_offset] as usize - 247; + let length_hint = body_offset + body_length_hint; + + let status_offset = 1 + length_hint; + let gas_hint = last_node[2 + length_hint] as usize - 128; // Logs bloom is always 256 bytes long and comes after the gas used the first byte is 185 then 1 then 0 then the bloom so the // log data starts at 4 + length_hint + gas_hint + 259 - let log_offset = 4 + length_hint + gas_hint + 259; + let log_offset = 3 + length_hint + gas_hint + 259; - let log_hint = if rlp_body[log_offset] < 247 { - rlp_body[log_offset] as usize - 192 + let log_hint = if last_node[log_offset] < 247 { + last_node[log_offset] as usize - 192 } else { - rlp_body[log_offset] as usize - 247 + last_node[log_offset] as usize - 247 }; // We iterate through the logs and store the offsets we care about. let mut current_log_offset = log_offset + 1 + log_hint; @@ -709,11 +728,7 @@ impl BlockUtil { let body_rlp = receipt_primitive.encoded_2718(); let tx_body_rlp = transaction_primitive.encoded_2718(); - println!( - "TX index {} RLP encoded: {:?}", - receipt.transaction_index.unwrap(), - tx_index.to_vec() - ); + receipts_trie .insert(&tx_index, &body_rlp) .expect("can't insert receipt"); @@ -723,6 +738,8 @@ impl BlockUtil { TxWithReceipt(transaction.clone(), receipt_primitive) }) .collect::>(); + receipts_trie.root_hash()?; + transactions_trie.root_hash()?; Ok(BlockUtil { block, txs: consensus_receipts, @@ -777,11 +794,10 @@ mod tryethers { use ethers::{ providers::{Http, Middleware, Provider}, types::{ - Address, Block, BlockId, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, - H256, U64, + Block, BlockId, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, H256, U64, }, }; - use rlp::{Encodable, Rlp, RlpStream}; + use rlp::{Encodable, RlpStream}; /// A wrapper around a transaction and its receipt. The receipt is used to filter /// bad transactions, so we only compute over valid transactions. @@ -928,8 +944,8 @@ mod test { use alloy::{ node_bindings::Anvil, - primitives::{Bytes, Log}, - providers::ProviderBuilder, + primitives::{Bytes, Log, U256}, + providers::{ext::AnvilApi, Provider, ProviderBuilder, WalletProvider}, rlp::Decodable, sol, }; @@ -940,10 +956,10 @@ mod test { types::BlockNumber, }; use hashbrown::HashMap; + use tokio::task::JoinSet; use crate::{ mpt_sequential::utils::nibbles_to_bytes, - types::MAX_BLOCK_LEN, utils::{Endianness, Packer}, }; use mp2_test::eth::{get_mainnet_url, get_sepolia_url}; @@ -1079,14 +1095,11 @@ mod test { #[tokio::test] async fn test_receipt_query() -> Result<()> { - // Spin up a local node. - let anvil = Anvil::new().spawn(); - // Create a provider with the wallet for contract deployment and interaction. - let rpc_url = anvil.endpoint(); - - let rpc = ProviderBuilder::new().on_http(rpc_url.parse().unwrap()); + let rpc = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_wallet_and_config(|anvil| Anvil::block_time(anvil, 1)); - // Make a contract taht emits events so we can pick up on them + // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin @@ -1113,84 +1126,108 @@ mod test { } } // Deploy the contract using anvil - let contract = EventEmitter::deploy(&rpc).await?; + let contract = EventEmitter::deploy(rpc.clone()).await?; // Fire off a few transactions to emit some events - let mut transactions = Vec::::new(); - - for i in 0..10 { - if i % 2 == 0 { - let builder = contract.testEmit(); - let tx_hash = builder.send().await?.watch().await?; - let transaction = rpc.get_transaction_by_hash(tx_hash).await?.unwrap(); - transactions.push(transaction); - } else { - let builder = contract.twoEmits(); - let tx_hash = builder.send().await?.watch().await?; - let transaction = rpc.get_transaction_by_hash(tx_hash).await?.unwrap(); - transactions.push(transaction); - } + + let address = rpc.default_signer_address(); + rpc.anvil_set_nonce(address, U256::from(0)).await.unwrap(); + let tx_reqs = (0..10) + .map(|i| match i % 2 { + 0 => contract + .testEmit() + .into_transaction_request() + .nonce(i as u64), + 1 => contract + .twoEmits() + .into_transaction_request() + .nonce(i as u64), + _ => unreachable!(), + }) + .collect::>(); + let mut join_set = JoinSet::new(); + tx_reqs.into_iter().for_each(|tx_req| { + let rpc_clone = rpc.clone(); + join_set.spawn(async move { + rpc_clone + .send_transaction(tx_req) + .await + .unwrap() + .watch() + .await + .unwrap() + }); + }); + + let hashes = join_set.join_all().await; + let mut transactions = Vec::new(); + for hash in hashes.into_iter() { + transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); } + let block_number = transactions.first().unwrap().block_number.unwrap(); + // We want to get the event signature so we can make a ReceiptQuery let all_events = EventEmitter::abi::events(); let events = all_events.get("testEvent").unwrap(); let receipt_query = ReceiptQuery::new(*contract.address(), events[0].clone()); - // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it - for transaction in transactions.iter() { - let index = transaction - .block_number - .ok_or(anyhow!("Could not get block number from transaction"))?; - let block = rpc - .get_block( - BlockNumberOrTag::Number(index).into(), - alloy::rpc::types::BlockTransactionsKind::Full, - ) - .await? - .ok_or(anyhow!("Could not get block test"))?; - let proofs = receipt_query - .query_receipt_proofs(&rpc, BlockNumberOrTag::Number(index)) - .await?; - - for proof in proofs.into_iter() { - let memdb = Arc::new(MemoryDB::new(true)); - let tx_trie = EthTrie::new(Arc::clone(&memdb)); + let block = rpc + .get_block( + BlockNumberOrTag::Number(block_number).into(), + alloy::rpc::types::BlockTransactionsKind::Full, + ) + .await? + .ok_or(anyhow!("Could not get block test"))?; + let receipt_hash = block.header().receipts_root; + let proofs = receipt_query + .query_receipt_proofs(&rpc.root(), BlockNumberOrTag::Number(block_number)) + .await?; - let mpt_key = transaction.transaction_index.unwrap().rlp_bytes(); - let receipt_hash = block.header().receipts_root; - let is_valid = tx_trie - .verify_proof(receipt_hash.0.into(), &mpt_key, proof.mpt_proof.clone())? - .ok_or(anyhow!("No proof found when verifying"))?; + // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it - let expected_sig: [u8; 32] = keccak256(receipt_query.event.signature().as_bytes()) + for proof in proofs.iter() { + let memdb = Arc::new(MemoryDB::new(true)); + let tx_trie = EthTrie::new(Arc::clone(&memdb)); + + let mpt_key = proof.tx_index.rlp_bytes(); + + let _ = tx_trie + .verify_proof(receipt_hash.0.into(), &mpt_key, proof.mpt_proof.clone())? + .ok_or(anyhow!("No proof found when verifying"))?; + + let last_node = proof + .mpt_proof + .last() + .ok_or(anyhow!("Couldn't get first node in proof"))?; + let expected_sig: [u8; 32] = keccak256(receipt_query.event.signature().as_bytes()) + .try_into() + .unwrap(); + + for log_offset in proof.relevant_logs_offset.iter() { + let mut buf = &last_node[*log_offset..*log_offset + proof.event_log_info.size]; + let decoded_log = Log::decode(&mut buf)?; + let raw_bytes: [u8; 20] = last_node[*log_offset + + proof.event_log_info.add_rel_offset + ..*log_offset + proof.event_log_info.add_rel_offset + 20] + .to_vec() .try_into() .unwrap(); - - for log_offset in proof.relevant_logs_offset.iter() { - let mut buf = &is_valid[*log_offset..*log_offset + proof.event_log_info.size]; - let decoded_log = Log::decode(&mut buf)?; - let raw_bytes: [u8; 20] = is_valid[*log_offset - + proof.event_log_info.add_rel_offset - ..*log_offset + proof.event_log_info.add_rel_offset + 20] - .to_vec() - .try_into() - .unwrap(); - assert_eq!(decoded_log.address, receipt_query.contract); - assert_eq!(raw_bytes, receipt_query.contract); - let topics = decoded_log.topics(); - assert_eq!(topics[0].0, expected_sig); - let raw_bytes: [u8; 32] = is_valid[*log_offset - + proof.event_log_info.sig_rel_offset - ..*log_offset + proof.event_log_info.sig_rel_offset + 32] - .to_vec() - .try_into() - .unwrap(); - assert_eq!(topics[0].0, raw_bytes); - } + assert_eq!(decoded_log.address, receipt_query.contract); + assert_eq!(raw_bytes, receipt_query.contract); + let topics = decoded_log.topics(); + assert_eq!(topics[0].0, expected_sig); + let raw_bytes: [u8; 32] = last_node[*log_offset + + proof.event_log_info.sig_rel_offset + ..*log_offset + proof.event_log_info.sig_rel_offset + 32] + .to_vec() + .try_into() + .unwrap(); + assert_eq!(topics[0].0, raw_bytes); } } + Ok(()) } diff --git a/mp2-common/src/group_hashing/mod.rs b/mp2-common/src/group_hashing/mod.rs index bf4360676..47a8822aa 100644 --- a/mp2-common/src/group_hashing/mod.rs +++ b/mp2-common/src/group_hashing/mod.rs @@ -21,8 +21,6 @@ use plonky2_ecgfp5::{ }, }; -use std::array::from_fn as create_array; - mod curve_add; pub mod field_to_curve; mod sswu_gadget; diff --git a/mp2-common/src/mpt_sequential/leaf_or_extension.rs b/mp2-common/src/mpt_sequential/leaf_or_extension.rs index 8c64d7584..e5c0cf482 100644 --- a/mp2-common/src/mpt_sequential/leaf_or_extension.rs +++ b/mp2-common/src/mpt_sequential/leaf_or_extension.rs @@ -1,6 +1,8 @@ //! MPT leaf or extension node gadget -use super::{advance_key_leaf_or_extension, key::MPTKeyWireGeneric, PAD_LEN}; +use super::{ + advance_key_leaf_or_extension, advance_key_receipt_leaf, key::MPTKeyWireGeneric, PAD_LEN, +}; use crate::{ array::{Array, Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires}, @@ -96,3 +98,61 @@ impl MPTLeafOrExtensionNodeGeneric { } } } + +/// Wrapped wires for a MPT receipt leaf +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct MPTReceiptLeafWiresGeneric +where + [(); PAD_LEN(NODE_LEN)]:, +{ + /// MPT node + pub node: VectorWire, + /// MPT root + pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + /// New MPT key after advancing the current key + pub key: MPTKeyWireGeneric, +} + +/// Receipt leaf node as we have to do things differently for efficiency reasons. +pub struct MPTReceiptLeafNode; + +impl MPTReceiptLeafNode { + /// Build the MPT node and advance the current key. + pub fn build_and_advance_key< + F: RichField + Extendable, + const D: usize, + const NODE_LEN: usize, + >( + b: &mut CircuitBuilder, + current_key: &MPTKeyWireGeneric, + ) -> MPTReceiptLeafWiresGeneric + where + [(); PAD_LEN(NODE_LEN)]:, + { + let zero = b.zero(); + let tru = b._true(); + + // Build the node and ensure it only includes bytes. + let node = VectorWire::::new(b); + + node.assert_bytes(b); + + // Expose the keccak root of this subtree starting at this node. + let root = KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::hash_vector(b, &node); + + // We know that the rlp encoding of the compact encoding of the key is going to be in roughly the first 10 bytes of + // the node since the node is list byte, 2 bytes for list length (maybe 3), key length byte (1), key compact encoding (4 max) + // so we take 10 bytes to be safe since this won't effect the number of random access gates we use. + let rlp_headers = decode_fixed_list::<_, D, 1>(b, &node.arr.arr[..10], zero); + + let (key, valid) = advance_key_receipt_leaf::( + b, + &node, + current_key, + &rlp_headers, + ); + b.connect(tru.target, valid.target); + + MPTReceiptLeafWiresGeneric { node, root, key } + } +} diff --git a/mp2-common/src/mpt_sequential/mod.rs b/mp2-common/src/mpt_sequential/mod.rs index 50087c1af..4606402de 100644 --- a/mp2-common/src/mpt_sequential/mod.rs +++ b/mp2-common/src/mpt_sequential/mod.rs @@ -38,7 +38,7 @@ pub use key::{ }; pub use leaf_or_extension::{ MPTLeafOrExtensionNode, MPTLeafOrExtensionNodeGeneric, MPTLeafOrExtensionWires, - MPTLeafOrExtensionWiresGeneric, + MPTLeafOrExtensionWiresGeneric, MPTReceiptLeafNode, MPTReceiptLeafWiresGeneric, }; /// Number of items in the RLP encoded list in a leaf node. @@ -52,7 +52,7 @@ pub const MAX_LEAF_VALUE_LEN: usize = 33; /// This is the maximum size we allow for the value of Receipt Trie leaf /// currently set to be the same as we allow for a branch node in the Storage Trie /// minus the length of the key header and key -pub const MAX_RECEIPT_LEAF_VALUE_LEN: usize = 526; +pub const MAX_RECEIPT_LEAF_VALUE_LEN: usize = 503; /// RLP item size for the extension node pub const MPT_EXTENSION_RLP_SIZE: usize = 2; @@ -443,6 +443,44 @@ pub fn advance_key_leaf_or_extension< let condition = b.and(condition, should_true); (new_key, leaf_child_hash, condition) } + +/// Returns the key with the pointer moved in the case of a Receipt Trie leaf. +pub fn advance_key_receipt_leaf< + F: RichField + Extendable, + const D: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, +>( + b: &mut CircuitBuilder, + node: &VectorWire, + key: &MPTKeyWireGeneric, + rlp_headers: &RlpList<1>, +) -> (MPTKeyWireGeneric, BoolTarget) { + let key_header = RlpHeader { + data_type: rlp_headers.data_type[0], + offset: rlp_headers.offset[0], + len: rlp_headers.len[0], + }; + + // To save on operations we know the key is goin to be in the first 10 items so we + // only feed these into `decode_compact_encoding` + let sub_array: Array = Array { + arr: create_array(|i| node.arr.arr[i]), + }; + let (extracted_key, should_true) = + decode_compact_encoding::<_, _, _, KEY_LEN>(b, &sub_array, &key_header); + + // note we are going _backwards_ on the key, so we need to substract the expected key length + // we want to check against + let new_key = key.advance_by(b, extracted_key.real_len); + // NOTE: there is no need to check if the extracted_key is indeed a subvector of the full key + // in this case. Indeed, in leaf/ext. there is only one key possible. Since we decoded it + // from the beginning of the node, and that the hash of the node also starts at the beginning, + // either the attacker give the right node or it gives an invalid node and hashes will not + // match. + + (new_key, should_true) +} #[cfg(test)] mod test { use std::array::from_fn as create_array; diff --git a/mp2-test/src/circuit.rs b/mp2-test/src/circuit.rs index 262d4384e..f810dac93 100644 --- a/mp2-test/src/circuit.rs +++ b/mp2-test/src/circuit.rs @@ -105,6 +105,7 @@ pub fn prove_circuit< let now = std::time::Instant::now(); u.prove(&mut pw, &setup.0); let proof = setup.1.prove(pw).expect("invalid proof"); + println!("[+] Proof generated in {:?}ms", now.elapsed().as_millis()); setup .2 @@ -124,6 +125,7 @@ pub fn run_circuit< u: U, ) -> ProofWithPublicInputs { let setup = setup_circuit::(); + println!( "setup.verifierdata hash {:?}", setup.2.verifier_only.circuit_digest @@ -131,3 +133,100 @@ pub fn run_circuit< prove_circuit(&setup, &u) } + +/// Given a `PartitionWitness` that has only inputs set, populates the rest of the witness using the +/// given set of generators. +pub fn debug_generate_partial_witness< + 'a, + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +>( + inputs: PartialWitness, + prover_data: &'a plonky2::plonk::circuit_data::ProverOnlyCircuitData, + common_data: &'a plonky2::plonk::circuit_data::CommonCircuitData, +) -> plonky2::iop::witness::PartitionWitness<'a, F> { + use plonky2::iop::witness::WitnessWrite; + + let config = &common_data.config; + let generators = &prover_data.generators; + let generator_indices_by_watches = &prover_data.generator_indices_by_watches; + + let mut witness = plonky2::iop::witness::PartitionWitness::new( + config.num_wires, + common_data.degree(), + &prover_data.representative_map, + ); + + for (t, v) in inputs.target_values.into_iter() { + witness.set_target(t, v); + } + + // Build a list of "pending" generators which are queued to be run. Initially, all generators + // are queued. + let mut pending_generator_indices: Vec<_> = (0..generators.len()).collect(); + + // We also track a list of "expired" generators which have already returned false. + let mut generator_is_expired = vec![false; generators.len()]; + let mut remaining_generators = generators.len(); + + let mut buffer = plonky2::iop::generator::GeneratedValues::empty(); + + // Keep running generators until we fail to make progress. + while !pending_generator_indices.is_empty() { + let mut next_pending_generator_indices = Vec::new(); + + for &generator_idx in &pending_generator_indices { + if generator_is_expired[generator_idx] { + continue; + } + + let finished = generators[generator_idx].0.run(&witness, &mut buffer); + if finished { + generator_is_expired[generator_idx] = true; + remaining_generators -= 1; + } + + // Merge any generated values into our witness, and get a list of newly-populated + // targets' representatives. + let new_target_reps = buffer + .target_values + .drain(..) + .flat_map(|(t, v)| witness.set_target_returning_rep(t, v)); + + // Enqueue unfinished generators that were watching one of the newly populated targets. + for watch in new_target_reps { + let opt_watchers = generator_indices_by_watches.get(&watch); + if let Some(watchers) = opt_watchers { + for &watching_generator_idx in watchers { + if !generator_is_expired[watching_generator_idx] { + next_pending_generator_indices.push(watching_generator_idx); + } + } + } + } + } + + pending_generator_indices = next_pending_generator_indices; + } + if remaining_generators != 0 { + println!("{} generators weren't run", remaining_generators); + + let filtered = generator_is_expired + .iter() + .enumerate() + .filter_map(|(index, flag)| if !flag { Some(index) } else { None }) + .min(); + + if let Some(min_val) = filtered { + println!("generator at index: {} is the first to not run", min_val); + println!("This has ID: {}", generators[min_val].0.id()); + + for watch in generators[min_val].0.watch_list().iter() { + println!("watching: {:?}", watch); + } + } + } + + witness +} diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index d1e79caa1..570170235 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -2,8 +2,7 @@ use alloy::{ eips::BlockNumberOrTag, node_bindings::Anvil, primitives::U256, - providers::{ext::AnvilApi, Provider, ProviderBuilder, RootProvider, WalletProvider}, - rpc::types::Transaction, + providers::{ext::AnvilApi, Provider, ProviderBuilder, WalletProvider}, sol, }; use eth_trie::{EthTrie, MemoryDB, Trie}; @@ -53,7 +52,7 @@ pub fn generate_random_storage_mpt( /// This function is used so that we can generate a Receipt Trie for a blog with varying transactions /// (i.e. some we are interested in and some we are not). -fn generate_receipt_proofs() -> Vec { +pub fn generate_receipt_proofs() -> Vec { // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] @@ -179,15 +178,3 @@ fn generate_receipt_proofs() -> Vec { .unwrap() }) } - -#[cfg(test)] -mod tests { - use super::*; - #[test] - fn tester() { - let receipt_proofs = generate_receipt_proofs(); - for proof in receipt_proofs.iter() { - println!("proof: {}", proof.tx_index); - } - } -} diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index 547290b0f..e1eb5132e 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -17,6 +17,7 @@ pub const MAX_BRANCH_NODE_LEN_PADDED: usize = PAD_LEN(532); pub const MAX_EXTENSION_NODE_LEN: usize = 69; pub const MAX_EXTENSION_NODE_LEN_PADDED: usize = PAD_LEN(69); pub const MAX_LEAF_NODE_LEN: usize = MAX_EXTENSION_NODE_LEN; +pub const MAX_RECEIPT_LEAF_NODE_LEN: usize = 512; pub mod api; pub mod block_extraction; diff --git a/mp2-v1/src/receipt_extraction/leaf.rs b/mp2-v1/src/receipt_extraction/leaf.rs index f7c99d8a7..8fca8a1c5 100644 --- a/mp2-v1/src/receipt_extraction/leaf.rs +++ b/mp2-v1/src/receipt_extraction/leaf.rs @@ -1,17 +1,15 @@ //! Module handling the leaf node inside a Receipt Trie -use super::public_inputs::PublicInputArgs; +use crate::MAX_RECEIPT_LEAF_NODE_LEN; + +use super::public_inputs::{PublicInputArgs, PublicInputs}; use mp2_common::{ array::{Array, Vector, VectorWire}, eth::{EventLogInfo, LogDataInfo, ReceiptProofInfo}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires}, - mpt_sequential::{ - MPTLeafOrExtensionNodeGeneric, ReceiptKeyWire, MAX_RECEIPT_LEAF_VALUE_LEN, - MAX_TX_KEY_NIBBLE_LEN, PAD_LEN, - }, - poseidon::H, + mpt_sequential::{MPTReceiptLeafNode, ReceiptKeyWire, MAX_TX_KEY_NIBBLE_LEN, PAD_LEN}, public_inputs::PublicInputCommon, types::{CBuilder, GFp}, utils::{Endianness, PackerTarget}, @@ -23,13 +21,15 @@ use plonky2::{ target::Target, witness::{PartialWitness, WitnessWrite}, }, + plonk::circuit_builder::CircuitBuilder, }; use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; +use recursion_framework::circuit_builder::CircuitLogicWires; use rlp::Encodable; use serde::{Deserialize, Serialize}; - +use std::array::from_fn; /// Maximum number of logs per transaction we can process const MAX_LOGS_PER_TX: usize = 2; @@ -42,10 +42,10 @@ where pub event: EventWires, /// The node bytes pub node: VectorWire, - /// The actual value stored in the node - pub value: Array, /// the hash of the node bytes pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + /// The index of this receipt in the block + pub index: Target, /// The offset of the status of the transaction in the RLP encoded receipt node. pub status_offset: Target, /// The offsets of the relevant logs inside the node @@ -102,7 +102,7 @@ impl LogColumn { impl EventWires { /// Convert to an array for metadata digest - pub fn to_slice(&self) -> [Target; 70] { + pub fn to_vec(&self) -> Vec { let topics_flat = self .topics .iter() @@ -113,60 +113,45 @@ impl EventWires { .iter() .flat_map(|t| t.to_array()) .collect::>(); - let mut out = [Target::default(); 70]; - out[0] = self.size; - out.iter_mut() - .skip(1) - .take(20) - .enumerate() - .for_each(|(i, entry)| *entry = self.address.arr[i]); - out[21] = self.add_rel_offset; - out.iter_mut() - .skip(22) - .take(32) - .enumerate() - .for_each(|(i, entry)| *entry = self.event_signature.arr[i]); - out[54] = self.sig_rel_offset; - out.iter_mut() - .skip(55) - .take(9) - .enumerate() - .for_each(|(i, entry)| *entry = topics_flat[i]); - out.iter_mut() - .skip(64) - .take(6) - .enumerate() - .for_each(|(i, entry)| *entry = data_flat[i]); + let mut out = Vec::new(); + out.push(self.size); + out.extend_from_slice(&self.address.arr); + out.push(self.add_rel_offset); + out.extend_from_slice(&self.event_signature.arr); + out.push(self.sig_rel_offset); + out.extend_from_slice(&topics_flat); + out.extend_from_slice(&data_flat); + out } - pub fn verify_logs_and_extract_values( + pub fn verify_logs_and_extract_values( &self, b: &mut CBuilder, - value: &Array, + value: &VectorWire, status_offset: Target, relevant_logs_offsets: &VectorWire, ) -> CurveTarget { let t = b._true(); let zero = b.zero(); let curve_zero = b.curve_zero(); - let mut value_digest = b.curve_zero(); + let mut points = Vec::new(); // Enforce status is true. - let status = value.random_access_large_array(b, status_offset); + let status = value.arr.random_access_large_array(b, status_offset); b.connect(status, t.target); for log_offset in relevant_logs_offsets.arr.arr { // Extract the address bytes let address_start = b.add(log_offset, self.add_rel_offset); - let address_bytes = value.extract_array_large::<_, _, 20>(b, address_start); + let address_bytes = value.arr.extract_array_large::<_, _, 20>(b, address_start); let address_check = address_bytes.equals(b, &self.address); // Extract the signature bytes let sig_start = b.add(log_offset, self.sig_rel_offset); - let sig_bytes = value.extract_array_large::<_, _, 32>(b, sig_start); + let sig_bytes = value.arr.extract_array_large::<_, _, 32>(b, sig_start); let sig_check = sig_bytes.equals(b, &self.event_signature); @@ -182,7 +167,7 @@ impl EventWires { for &log_column in self.topics.iter().chain(self.data.iter()) { let data_start = b.add(log_offset, log_column.rel_byte_offset); // The data is always 32 bytes long - let data_bytes = value.extract_array_large::<_, _, 32>(b, data_start); + let data_bytes = value.arr.extract_array_large::<_, _, 32>(b, data_start); // Pack the data and get the digest let packed_data = data_bytes.arr.pack(b, Endianness::Big); @@ -197,11 +182,11 @@ impl EventWires { let selector = b.and(dummy_column, dummy); let selected_point = b.select_curve_point(selector, curve_zero, data_digest); - value_digest = b.add_curve_point(&[selected_point, value_digest]); + points.push(selected_point); } } - value_digest + b.add_curve_point(&points) } } @@ -215,7 +200,7 @@ impl ReceiptLeafCircuit where [(); PAD_LEN(NODE_LEN)]:, { - pub fn build_leaf_wires(b: &mut CBuilder) -> ReceiptLeafWires { + pub fn build(b: &mut CBuilder) -> ReceiptLeafWires { // Build the event wires let event_wires = Self::build_event_wires(b); @@ -227,27 +212,24 @@ where let mpt_key = ReceiptKeyWire::new(b); // Build the node wires. - let wires = MPTLeafOrExtensionNodeGeneric::build_and_advance_key::< - _, - D, - NODE_LEN, - MAX_RECEIPT_LEAF_VALUE_LEN, - >(b, &mpt_key); + let wires = MPTReceiptLeafNode::build_and_advance_key::<_, D, NODE_LEN>(b, &mpt_key); + let node = wires.node; let root = wires.root; // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for - let receipt_body = wires.value; - let mut dv = event_wires.verify_logs_and_extract_values( + let mut dv = event_wires.verify_logs_and_extract_values::( b, - &receipt_body, + &node, status_offset, &relevant_logs_offset, ); + let value_id = b.map_to_curve_point(&[index]); + dv = b.add_curve_point(&[value_id, dv]); - let dm = b.hash_n_to_hash_no_pad::(event_wires.to_slice().to_vec()); + let dm = b.map_to_curve_point(&event_wires.to_vec()); // Register the public inputs PublicInputArgs { @@ -261,8 +243,8 @@ where ReceiptLeafWires { event: event_wires, node, - value: receipt_body, root, + index, status_offset, relevant_logs_offset, mpt_key, @@ -273,24 +255,22 @@ where let size = b.add_virtual_target(); // Packed address - let arr = [b.add_virtual_target(); 20]; - let address = Array::from_array(arr); + let address = Array::::new(b); // relative offset of the address let add_rel_offset = b.add_virtual_target(); // Event signature - let arr = [b.add_virtual_target(); 32]; - let event_signature = Array::from_array(arr); + let event_signature = Array::::new(b); // Signature relative offset let sig_rel_offset = b.add_virtual_target(); // topics - let topics = [Self::build_log_column(b); 3]; + let topics: [LogColumn; 3] = from_fn(|_| Self::build_log_column(b)); // data - let data = [Self::build_log_column(b); 2]; + let data: [LogColumn; 2] = from_fn(|_| Self::build_log_column(b)); EventWires { size, @@ -331,7 +311,7 @@ where &wires.root, &InputData::Assigned(&pad_node), ); - + pw.set_target(wires.index, GFp::from_canonical_u64(self.info.tx_index)); pw.set_target( wires.status_offset, GFp::from_canonical_usize(self.info.status_offset), @@ -406,13 +386,47 @@ where } } +/// Num of children = 0 +impl CircuitLogicWires for ReceiptLeafWires { + type CircuitBuilderParams = (); + + type Inputs = ReceiptLeafCircuit; + + const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; + + fn circuit_logic( + builder: &mut CircuitBuilder, + _verified_proofs: [&plonky2::plonk::proof::ProofWithPublicInputsTarget; 0], + _builder_parameters: Self::CircuitBuilderParams, + ) -> Self { + ReceiptLeafCircuit::build(builder) + } + + fn assign_input( + &self, + inputs: Self::Inputs, + pw: &mut PartialWitness, + ) -> anyhow::Result<()> { + inputs.assign(pw, self); + Ok(()) + } +} + #[cfg(test)] mod tests { use super::*; + use crate::receipt_extraction::compute_receipt_leaf_metadata_digest; + use mp2_common::{ + utils::{keccak256, Packer}, + C, + }; + use mp2_test::{ + circuit::{run_circuit, UserCircuit}, + mpt_sequential::generate_receipt_proofs, + }; #[derive(Clone, Debug)] struct TestReceiptLeafCircuit { c: ReceiptLeafCircuit, - exp_value: Vec, } impl UserCircuit for TestReceiptLeafCircuit @@ -420,91 +434,38 @@ mod tests { [(); PAD_LEN(NODE_LEN)]:, { // Leaf wires + expected extracted value - type Wires = ( - ReceiptLeafWires, - Array, - ); + type Wires = ReceiptLeafWires; fn build(b: &mut CircuitBuilder) -> Self::Wires { - let exp_value = Array::::new(b); - - let leaf_wires = ReceiptLeafCircuit::::build(b); - leaf_wires.value.enforce_equal(b, &exp_value); - - (leaf_wires, exp_value) + ReceiptLeafCircuit::::build(b) } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - self.c.assign(pw, &wires.0); - wires - .1 - .assign_bytes(pw, &self.exp_value.clone().try_into().unwrap()); + self.c.assign(pw, &wires); } } #[test] fn test_leaf_circuit() { - const NODE_LEN: usize = 80; - - let simple_slot = 2_u8; - let slot = StorageSlot::Simple(simple_slot as usize); - let contract_address = Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(); - let chain_id = 10; - let id = identifier_single_var_column(simple_slot, &contract_address, chain_id, vec![]); - - let (mut trie, _) = generate_random_storage_mpt::<3, MAPPING_LEAF_VALUE_LEN>(); - let value = random_vector(MAPPING_LEAF_VALUE_LEN); - let encoded_value: Vec = rlp::encode(&value).to_vec(); - // assert we added one byte of RLP header - assert_eq!(encoded_value.len(), MAPPING_LEAF_VALUE_LEN + 1); - println!("encoded value {:?}", encoded_value); - trie.insert(&slot.mpt_key(), &encoded_value).unwrap(); - trie.root_hash().unwrap(); - - let proof = trie.get_proof(&slot.mpt_key_vec()).unwrap(); - let node = proof.last().unwrap().clone(); - - let c = LeafSingleCircuit:: { - node: node.clone(), - slot: SimpleSlot::new(simple_slot), - id, - }; - let test_circuit = TestLeafSingleCircuit { - c, - exp_value: value.clone(), - }; + const NODE_LEN: usize = 512; + + let receipt_proof_infos = generate_receipt_proofs(); + let info = receipt_proof_infos.first().unwrap().clone(); + let c = ReceiptLeafCircuit:: { info: info.clone() }; + let test_circuit = TestReceiptLeafCircuit { c }; let proof = run_circuit::(test_circuit); let pi = PublicInputs::new(&proof.public_inputs); - + let node = info.mpt_proof.last().unwrap().clone(); + // Check the output hash { let exp_hash = keccak256(&node).pack(Endianness::Little); assert_eq!(pi.root_hash(), exp_hash); } - { - let (key, ptr) = pi.mpt_key_info(); - - let exp_key = slot.mpt_key_vec(); - let exp_key: Vec<_> = bytes_to_nibbles(&exp_key) - .into_iter() - .map(F::from_canonical_u8) - .collect(); - assert_eq!(key, exp_key); - - let leaf_key: Vec> = rlp::decode_list(&node); - let nib = Nibbles::from_compact(&leaf_key[0]); - let exp_ptr = F::from_canonical_usize(MAX_KEY_NIBBLE_LEN - 1 - nib.nibbles().len()); - assert_eq!(exp_ptr, ptr); - } - // Check values digest - { - let exp_digest = compute_leaf_single_values_digest(id, &value); - assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); - } + // Check metadata digest { - let exp_digest = compute_leaf_single_metadata_digest(id, simple_slot); + let exp_digest = compute_receipt_leaf_metadata_digest(&info.event_log_info); assert_eq!(pi.metadata_digest(), exp_digest.to_weierstrass()); } - assert_eq!(pi.n(), F::ONE); } -} \ No newline at end of file +} diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs index 6c3803e08..4950aef20 100644 --- a/mp2-v1/src/receipt_extraction/mod.rs +++ b/mp2-v1/src/receipt_extraction/mod.rs @@ -1,2 +1,31 @@ pub mod leaf; pub mod public_inputs; + +use mp2_common::{ + digest::Digest, eth::EventLogInfo, group_hashing::map_to_curve_point, types::GFp, +}; +use plonky2::field::types::Field; + +/// Calculate `metadata_digest = D(key_id || value_id || slot)` for receipt leaf. +pub fn compute_receipt_leaf_metadata_digest(event: &EventLogInfo) -> Digest { + let topics_flat = event + .topics + .iter() + .chain(event.data.iter()) + .flat_map(|t| [t.column_id, t.rel_byte_offset, t.len]) + .collect::>(); + + let mut out = Vec::new(); + out.push(event.size); + out.extend_from_slice(&event.address.0.map(|byte| byte as usize)); + out.push(event.add_rel_offset); + out.extend_from_slice(&event.event_signature.map(|byte| byte as usize)); + out.push(event.sig_rel_offset); + out.extend_from_slice(&topics_flat); + + let data = out + .into_iter() + .map(GFp::from_canonical_usize) + .collect::>(); + map_to_curve_point(&data) +} diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs index 901fc0b29..7a44ed175 100644 --- a/mp2-v1/src/receipt_extraction/public_inputs.rs +++ b/mp2-v1/src/receipt_extraction/public_inputs.rs @@ -1,14 +1,22 @@ //! Public inputs for Receipt Extraction circuits use mp2_common::{ + array::Array, keccak::{OutputHash, PACKED_HASH_LEN}, mpt_sequential::ReceiptKeyWire, public_inputs::{PublicInputCommon, PublicInputRange}, - types::{CBuilder, CURVE_TARGET_LEN}, + types::{CBuilder, GFp, GFp5, CURVE_TARGET_LEN}, + utils::{convert_point_to_curve_target, convert_slice_to_curve_point, FromTargets}, }; -use plonky2::hash::hash_types::{HashOutTarget, NUM_HASH_OUT_ELTS}; -use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; +use plonky2::{ + field::{extension::FieldExtension, types::Field}, + iop::target::Target, +}; +use plonky2_ecgfp5::{ + curve::curve::WeierstrassPoint, + gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, +}; /// The maximum length of a transaction index in a block in nibbles. /// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 3 bytes to represent. @@ -23,7 +31,7 @@ const T_RANGE: PublicInputRange = K_RANGE.end..K_RANGE.end + 1; /// - `DV : Digest[F]` : value digest of all rows to extract const DV_RANGE: PublicInputRange = T_RANGE.end..T_RANGE.end + CURVE_TARGET_LEN; /// - `DM : Digest[F]` : metadata digest to extract -const DM_RANGE: PublicInputRange = DV_RANGE.end..DV_RANGE.end + NUM_HASH_OUT_ELTS; +const DM_RANGE: PublicInputRange = DV_RANGE.end..DV_RANGE.end + CURVE_TARGET_LEN; /// Public inputs for contract extraction #[derive(Clone, Debug)] @@ -35,7 +43,7 @@ pub struct PublicInputArgs<'a> { /// Digest of the values pub(crate) dv: CurveTarget, /// The poseidon hash of the metadata - pub(crate) dm: HashOutTarget, + pub(crate) dm: CurveTarget, } impl<'a> PublicInputCommon for PublicInputArgs<'a> { @@ -48,12 +56,7 @@ impl<'a> PublicInputCommon for PublicInputArgs<'a> { impl<'a> PublicInputArgs<'a> { /// Create a new public inputs. - pub fn new( - h: &'a OutputHash, - k: &'a ReceiptKeyWire, - dv: CurveTarget, - dm: HashOutTarget, - ) -> Self { + pub fn new(h: &'a OutputHash, k: &'a ReceiptKeyWire, dv: CurveTarget, dm: CurveTarget) -> Self { Self { h, k, dv, dm } } } @@ -63,14 +66,105 @@ impl<'a> PublicInputArgs<'a> { self.h.register_as_public_input(cb); self.k.register_as_input(cb); cb.register_curve_public_input(self.dv); - cb.register_public_inputs(&self.dm.elements); + cb.register_curve_public_input(self.dm); } pub fn digest_value(&self) -> CurveTarget { self.dv } - pub fn digest_metadata(&self) -> HashOutTarget { + pub fn digest_metadata(&self) -> CurveTarget { self.dm } } + +/// Public inputs wrapper of any proof generated in this module +#[derive(Clone, Debug)] +pub struct PublicInputs<'a, T> { + pub(crate) proof_inputs: &'a [T], +} + +impl PublicInputs<'_, Target> { + /// Get the merkle hash of the subtree this proof has processed. + pub fn root_hash_target(&self) -> OutputHash { + OutputHash::from_targets(self.root_hash_info()) + } + + /// Get the MPT key defined over the public inputs. + pub fn mpt_key(&self) -> ReceiptKeyWire { + let (key, ptr) = self.mpt_key_info(); + ReceiptKeyWire { + key: Array { + arr: std::array::from_fn(|i| key[i]), + }, + pointer: ptr, + } + } + + /// Get the values digest defined over the public inputs. + pub fn values_digest_target(&self) -> CurveTarget { + convert_point_to_curve_target(self.values_digest_info()) + } + + /// Get the metadata digest defined over the public inputs. + pub fn metadata_digest_target(&self) -> CurveTarget { + convert_point_to_curve_target(self.metadata_digest_info()) + } +} + +impl PublicInputs<'_, GFp> { + /// Get the merkle hash of the subtree this proof has processed. + pub fn root_hash(&self) -> Vec { + let hash = self.root_hash_info(); + hash.iter().map(|t| t.0 as u32).collect() + } + + /// Get the values digest defined over the public inputs. + pub fn values_digest(&self) -> WeierstrassPoint { + let (x, y, is_inf) = self.values_digest_info(); + + WeierstrassPoint { + x: GFp5::from_basefield_array(std::array::from_fn::(|i| x[i])), + y: GFp5::from_basefield_array(std::array::from_fn::(|i| y[i])), + is_inf: is_inf.is_nonzero(), + } + } + + /// Get the metadata digest defined over the public inputs. + pub fn metadata_digest(&self) -> WeierstrassPoint { + let (x, y, is_inf) = self.metadata_digest_info(); + + WeierstrassPoint { + x: GFp5::from_basefield_array(std::array::from_fn::(|i| x[i])), + y: GFp5::from_basefield_array(std::array::from_fn::(|i| y[i])), + is_inf: is_inf.is_nonzero(), + } + } +} + +impl<'a, T: Copy> PublicInputs<'a, T> { + pub(crate) const TOTAL_LEN: usize = DM_RANGE.end; + + pub fn new(proof_inputs: &'a [T]) -> Self { + Self { proof_inputs } + } + + pub fn root_hash_info(&self) -> &[T] { + &self.proof_inputs[H_RANGE] + } + + pub fn mpt_key_info(&self) -> (&[T], T) { + let key = &self.proof_inputs[K_RANGE]; + let ptr = self.proof_inputs[T_RANGE.start]; + + (key, ptr) + } + + pub fn values_digest_info(&self) -> ([T; 5], [T; 5], T) { + convert_slice_to_curve_point(&self.proof_inputs[DV_RANGE]) + } + + pub fn metadata_digest_info(&self) -> ([T; 5], [T; 5], T) { + convert_slice_to_curve_point(&self.proof_inputs[DM_RANGE]) + } +} diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 40646b685..f53e35e5e 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -278,7 +278,7 @@ macro_rules! impl_branch_circuits { } /// generates a proof from the inputs stored in `branch`. Depending on the size of the node, /// and the number of children proofs, it selects the right specialized circuit to generate the proof. - fn generate_proof( + pub fn generate_proof( &self, set: &RecursiveCircuits, branch_node: InputNode, From 7f9702c7a42b940354be54db18c116f855f75443 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Fri, 8 Nov 2024 15:57:51 +0000 Subject: [PATCH 07/47] Change Receipt query test --- mp2-common/src/eth.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 54864d74d..4fbc15120 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -751,7 +751,7 @@ impl BlockUtil { // recompute the receipts trie by first converting all receipts form RPC type to consensus type // since in Alloy these are two different types and RLP functions are only implemented for // consensus ones. - fn check(&mut self) -> Result<()> { + pub fn check(&mut self) -> Result<()> { let computed = self.receipts_trie.root_hash()?; let tx_computed = self.transactions_trie.root_hash()?; let expected = self.block.header.receipts_root; @@ -944,8 +944,8 @@ mod test { use alloy::{ node_bindings::Anvil, - primitives::{Bytes, Log, U256}, - providers::{ext::AnvilApi, Provider, ProviderBuilder, WalletProvider}, + primitives::{Bytes, Log}, + providers::{Provider, ProviderBuilder, WalletProvider}, rlp::Decodable, sol, }; @@ -1131,17 +1131,18 @@ mod test { // Fire off a few transactions to emit some events let address = rpc.default_signer_address(); - rpc.anvil_set_nonce(address, U256::from(0)).await.unwrap(); + let current_nonce = rpc.get_transaction_count(address).await?; + let tx_reqs = (0..10) .map(|i| match i % 2 { 0 => contract .testEmit() .into_transaction_request() - .nonce(i as u64), + .nonce(current_nonce + i as u64), 1 => contract .twoEmits() .into_transaction_request() - .nonce(i as u64), + .nonce(current_nonce + i as u64), _ => unreachable!(), }) .collect::>(); From 9b81835d951e0627587adf623dca7cab1533be71 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 11 Nov 2024 11:55:02 +0000 Subject: [PATCH 08/47] Address review comments --- Cargo.toml | 1 + mp2-common/Cargo.toml | 1 - mp2-common/src/array.rs | 24 ++++++- mp2-common/src/eth.rs | 51 ++++++++------- mp2-common/src/mpt_sequential/key.rs | 2 +- mp2-test/src/circuit.rs | 2 +- mp2-test/src/mpt_sequential.rs | 43 ++++++------- mp2-v1/src/block_extraction/mod.rs | 2 +- mp2-v1/src/receipt_extraction/mod.rs | 4 +- .../src/receipt_extraction/public_inputs.rs | 8 +-- mp2-v1/src/values_extraction/api.rs | 2 +- rustc-ice-2024-11-04T12_36_50-74186.txt | 63 ------------------- rustc-ice-2024-11-04T12_37_01-74253.txt | 62 ------------------ rustc-ice-2024-11-04T12_37_13-74307.txt | 62 ------------------ 14 files changed, 81 insertions(+), 246 deletions(-) delete mode 100644 rustc-ice-2024-11-04T12_36_50-74186.txt delete mode 100644 rustc-ice-2024-11-04T12_37_01-74253.txt delete mode 100644 rustc-ice-2024-11-04T12_37_13-74307.txt diff --git a/Cargo.toml b/Cargo.toml index 9436c46a4..952415d9a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,6 +31,7 @@ alloy = { version = "0.6", default-features = false, features = [ "transports", "postgres", ] } + anyhow = "1.0" base64 = "0.22" bb8 = "0.8.5" diff --git a/mp2-common/Cargo.toml b/mp2-common/Cargo.toml index 2ca2673a0..084b5b3a2 100644 --- a/mp2-common/Cargo.toml +++ b/mp2-common/Cargo.toml @@ -32,7 +32,6 @@ hex.workspace = true rand.workspace = true rstest.workspace = true tokio.workspace = true - mp2_test = { path = "../mp2-test" } [features] diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index 27f99d6a5..2650f0a31 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -611,6 +611,19 @@ where /// This function allows you to search a larger [`Array`] by representing it as a number of /// smaller [`Array`]s with size [`RANDOM_ACCESS_SIZE`], padding the final smaller array where required. + /// For example if we have an array of length `512` and we wish to find the value at index `324` the following + /// occurs: + /// 1) Split the original [`Array`] into `512 / 64 = 8` chunks `[A_0, ... , A_7]` + /// 2) Express `324` in base 64 (Little Endian) `[4, 5]` + /// 3) For each `i \in [0, 7]` use a [`RandomAccesGate`] to lookup the `4`th element, `v_i,3` of `A_i` + /// and create a new list of length `8` that consists of `[v_0,3, v_1,3, ... v_7,3]` + /// 4) Now use another [`RandomAccessGate`] to select the `5`th elemnt of this new list (`v_4,3` as we have zero-indexed both times) + /// + /// For comparison using [`Self::value_at`] on an [`Array`] with length `512` results in 129 rows, using this method + /// on the same [`Array`] results in 15 rows. + /// + /// As an aside, if the [`Array`] length is not divisible by `64` then we pad with zero values, since the size of the + /// [`Array`] is a compile time constant this will not affect circuit preprocessing. pub fn random_access_large_array, const D: usize>( &self, b: &mut CircuitBuilder, @@ -665,9 +678,12 @@ where T::from_target(b.random_access(high_bits, first_search)) } - /// Returns [`self[at..at+SUB_SIZE]`]. - /// This is more expensive than [`Self::extract_array`] due to using [`Self::random_access_large_array`] + /// Returns [`Self[at..at+SUB_SIZE]`]. + /// This is more expensive than [`Self::extract_array`] for [`Array`]s that are shorter than 64 elements long due to using [`Self::random_access_large_array`] /// instead of [`Self::value_at`]. This function enforces that the values extracted are within the array. + /// + /// For comparison usin [`Self::extract_array`] on an [`Array`] of size `512` results in 5179 rows, using this method instead + /// results in 508 rows. pub fn extract_array_large< F: RichField + Extendable, const D: usize, @@ -692,7 +708,6 @@ where let i_target = b.constant(F::from_canonical_usize(i)); let i_plus_n_target = b.add(at, i_target); - self.random_access_large_array(b, i_plus_n_target) }), } @@ -932,6 +947,7 @@ mod test { let index = c.add_virtual_target(); let extracted = array.random_access_large_array(c, index); c.connect(exp_value, extracted); + (array, index, exp_value) } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { @@ -942,6 +958,7 @@ mod test { pw.set_target(wires.2, F::from_canonical_u8(self.exp)); } } + let mut rng = thread_rng(); let mut arr = [0u8; SIZE]; rng.fill(&mut arr[..]); @@ -1035,6 +1052,7 @@ mod test { .assign(pw, &create_array(|i| F::from_canonical_u8(self.exp[i]))); } } + let mut rng = thread_rng(); let mut arr = [0u8; SIZE]; rng.fill(&mut arr[..]); diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 4fbc15120..e2949e92e 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -793,9 +793,7 @@ mod tryethers { use eth_trie::{EthTrie, MemoryDB, Trie}; use ethers::{ providers::{Http, Middleware, Provider}, - types::{ - Block, BlockId, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, H256, U64, - }, + types::{BlockId, Bytes, Transaction, TransactionReceipt, U64}, }; use rlp::{Encodable, RlpStream}; @@ -943,12 +941,15 @@ mod test { use std::str::FromStr; use alloy::{ + network::TransactionBuilder, node_bindings::Anvil, - primitives::{Bytes, Log}, - providers::{Provider, ProviderBuilder, WalletProvider}, + primitives::{Bytes, Log, U256}, + providers::{ext::AnvilApi, Provider, ProviderBuilder}, rlp::Decodable, sol, }; + use alloy_multicall::Multicall; + use eth_trie::Nibbles; use ethereum_types::U64; use ethers::{ @@ -1096,8 +1097,7 @@ mod test { #[tokio::test] async fn test_receipt_query() -> Result<()> { let rpc = ProviderBuilder::new() - .with_recommended_fillers() - .on_anvil_with_wallet_and_config(|anvil| Anvil::block_time(anvil, 1)); + .on_anvil_with_config(|anvil| Anvil::fork(anvil, get_sepolia_url())); // Make a contract that emits events so we can pick up on them sol! { @@ -1128,30 +1128,37 @@ mod test { // Deploy the contract using anvil let contract = EventEmitter::deploy(rpc.clone()).await?; - // Fire off a few transactions to emit some events - - let address = rpc.default_signer_address(); - let current_nonce = rpc.get_transaction_count(address).await?; - + // (0..10).for_each(|j| { + // match i % 2 { + // 0 => multicall.add_call(), + // 1 => contract.twoEmits().into_transaction_request(), + // _ => unreachable!(), + // } + // }); let tx_reqs = (0..10) .map(|i| match i % 2 { - 0 => contract - .testEmit() - .into_transaction_request() - .nonce(current_nonce + i as u64), - 1 => contract - .twoEmits() - .into_transaction_request() - .nonce(current_nonce + i as u64), + 0 => contract.testEmit().into_transaction_request(), + 1 => contract.twoEmits().into_transaction_request(), _ => unreachable!(), }) .collect::>(); let mut join_set = JoinSet::new(); + tx_reqs.into_iter().for_each(|tx_req| { let rpc_clone = rpc.clone(); join_set.spawn(async move { rpc_clone - .send_transaction(tx_req) + .anvil_auto_impersonate_account(true) + .await + .unwrap(); + let sender_address = Address::random(); + let balance = U256::from(1e18 as u64); + rpc_clone + .anvil_set_balance(sender_address, balance) + .await + .unwrap(); + rpc_clone + .send_transaction(tx_req.with_from(sender_address)) .await .unwrap() .watch() @@ -1167,7 +1174,7 @@ mod test { } let block_number = transactions.first().unwrap().block_number.unwrap(); - + println!("block number: {block_number}"); // We want to get the event signature so we can make a ReceiptQuery let all_events = EventEmitter::abi::events(); diff --git a/mp2-common/src/mpt_sequential/key.rs b/mp2-common/src/mpt_sequential/key.rs index f98b57aac..2a14780d7 100644 --- a/mp2-common/src/mpt_sequential/key.rs +++ b/mp2-common/src/mpt_sequential/key.rs @@ -19,7 +19,7 @@ pub type MPTKeyWire = MPTKeyWireGeneric; pub type ReceiptKeyWire = MPTKeyWireGeneric; -pub const MAX_TX_KEY_NIBBLE_LEN: usize = 6; +pub const MAX_TX_KEY_NIBBLE_LEN: usize = 4; /// Calculate the pointer from the MPT key. pub fn mpt_key_ptr(mpt_key: &[u8]) -> usize { diff --git a/mp2-test/src/circuit.rs b/mp2-test/src/circuit.rs index f810dac93..bed5a98c9 100644 --- a/mp2-test/src/circuit.rs +++ b/mp2-test/src/circuit.rs @@ -85,7 +85,7 @@ pub fn setup_circuit< }; println!("[+] Circuit data built in {:?}s", now.elapsed().as_secs()); - + println!("FRI config: {:?}", circuit_data.common.fri_params); (wires, circuit_data, vcd) } diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 570170235..3ab1346e1 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -1,8 +1,9 @@ use alloy::{ eips::BlockNumberOrTag, + network::TransactionBuilder, node_bindings::Anvil, - primitives::U256, - providers::{ext::AnvilApi, Provider, ProviderBuilder, WalletProvider}, + primitives::{Address, U256}, + providers::{ext::AnvilApi, Provider, ProviderBuilder}, sol, }; use eth_trie::{EthTrie, MemoryDB, Trie}; @@ -111,9 +112,7 @@ pub fn generate_receipt_proofs() -> Vec { rt.block_on(async { // Spin up a local node. - let rpc = ProviderBuilder::new() - .with_recommended_fillers() - .on_anvil_with_wallet_and_config(|a| Anvil::block_time(a, 1)); + let rpc = ProviderBuilder::new().on_anvil_with_config(|anvil| Anvil::block_time(anvil, 1)); // Deploy the contract using anvil let event_contract = EventEmitter::deploy(rpc.clone()).await.unwrap(); @@ -121,26 +120,12 @@ pub fn generate_receipt_proofs() -> Vec { // Deploy the contract using anvil let other_contract = OtherEmitter::deploy(rpc.clone()).await.unwrap(); - let address = rpc.default_signer_address(); - rpc.anvil_set_nonce(address, U256::from(0)).await.unwrap(); let tx_reqs = (0..25) .map(|i| match i % 4 { - 0 => event_contract - .testEmit() - .into_transaction_request() - .nonce(i as u64), - 1 => event_contract - .twoEmits() - .into_transaction_request() - .nonce(i as u64), - 2 => other_contract - .otherEmit() - .into_transaction_request() - .nonce(i as u64), - 3 => other_contract - .twoEmits() - .into_transaction_request() - .nonce(i as u64), + 0 => event_contract.testEmit().into_transaction_request(), + 1 => event_contract.twoEmits().into_transaction_request(), + 2 => other_contract.otherEmit().into_transaction_request(), + 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), }) .collect::>(); @@ -148,8 +133,18 @@ pub fn generate_receipt_proofs() -> Vec { tx_reqs.into_iter().for_each(|tx_req| { let rpc_clone = rpc.clone(); join_set.spawn(async move { + let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); rpc_clone - .send_transaction(tx_req) + .anvil_set_balance(sender_address, funding) + .await + .unwrap(); + rpc_clone + .anvil_auto_impersonate_account(true) + .await + .unwrap(); + rpc_clone + .send_transaction(tx_req.with_from(sender_address)) .await .unwrap() .watch() diff --git a/mp2-v1/src/block_extraction/mod.rs b/mp2-v1/src/block_extraction/mod.rs index 261cf95d1..9515ea5ef 100644 --- a/mp2-v1/src/block_extraction/mod.rs +++ b/mp2-v1/src/block_extraction/mod.rs @@ -121,7 +121,7 @@ mod test { ); assert_eq!( U256::from_fields(pi.block_number_raw()), - U256::from(block.header.number), + U256::from(block.header.number) ); assert_eq!( pi.state_root_raw(), diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs index 4950aef20..a21f7fc41 100644 --- a/mp2-v1/src/receipt_extraction/mod.rs +++ b/mp2-v1/src/receipt_extraction/mod.rs @@ -6,7 +6,9 @@ use mp2_common::{ }; use plonky2::field::types::Field; -/// Calculate `metadata_digest = D(key_id || value_id || slot)` for receipt leaf. +/// Calculate `metadata_digest = D(address || signature || topics)` for receipt leaf. +/// Topics is an array of 5 values (some are dummies), each being `column_id`, `rel_byte_offset` (from the start of the log) +/// and `len`. pub fn compute_receipt_leaf_metadata_digest(event: &EventLogInfo) -> Digest { let topics_flat = event .topics diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs index 7a44ed175..e4fc8d5b9 100644 --- a/mp2-v1/src/receipt_extraction/public_inputs.rs +++ b/mp2-v1/src/receipt_extraction/public_inputs.rs @@ -19,14 +19,14 @@ use plonky2_ecgfp5::{ }; /// The maximum length of a transaction index in a block in nibbles. -/// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 3 bytes to represent. -const MAX_INDEX_NIBBLES: usize = 6; +/// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 2 bytes to represent. +const MAX_INDEX_NIBBLES: usize = 4; // Contract extraction public Inputs: /// - `H : [8]F` : packed node hash const H_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; -/// - `K : [6]F` : Length of the transaction index in nibbles +/// - `K : [4]F` : Length of the transaction index in nibbles const K_RANGE: PublicInputRange = H_RANGE.end..H_RANGE.end + MAX_INDEX_NIBBLES; -/// `T : F` pointer in the MPT indicating portion of the key already traversed (from 6 → 0) +/// `T : F` pointer in the MPT indicating portion of the key already traversed (from 4 → 0) const T_RANGE: PublicInputRange = K_RANGE.end..K_RANGE.end + 1; /// - `DV : Digest[F]` : value digest of all rows to extract const DV_RANGE: PublicInputRange = T_RANGE.end..T_RANGE.end + CURVE_TARGET_LEN; diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index f53e35e5e..40646b685 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -278,7 +278,7 @@ macro_rules! impl_branch_circuits { } /// generates a proof from the inputs stored in `branch`. Depending on the size of the node, /// and the number of children proofs, it selects the right specialized circuit to generate the proof. - pub fn generate_proof( + fn generate_proof( &self, set: &RecursiveCircuits, branch_node: InputNode, diff --git a/rustc-ice-2024-11-04T12_36_50-74186.txt b/rustc-ice-2024-11-04T12_36_50-74186.txt deleted file mode 100644 index d48781bb7..000000000 --- a/rustc-ice-2024-11-04T12_36_50-74186.txt +++ /dev/null @@ -1,63 +0,0 @@ -thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: -const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] -stack backtrace: - 0: 0x11209ec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea - 1: 0x10ff1b468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call - 2: 0x1120b9608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a - 3: 0x1120b9260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 - 4: 0x1120b6e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 - 5: 0x1120b8f24 - _rust_begin_unwind - 6: 0x1147a7ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 - 7: 0x1148ddc1c - >::const_param_out_of_range - 8: 0x110de5ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const - 9: 0x110db651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> - 10: 0x110daa120 - >::super_fold_with::> - 11: 0x110cf9f18 - >::super_fold_with::> - 12: 0x110d70d94 - <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 13: 0x110cf7c2c - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 14: 0x110cf73b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 15: 0x110df372c - >::try_fold_with::> - 16: 0x110dc5a1c - ::instantiate_into - 17: 0x111cc9848 - ::nominal_obligations - 18: 0x111cc8710 - >::visit_const - 19: 0x111cc7b58 - >::visit_ty - 20: 0x111cc5db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations - 21: 0x111e3b15c - ::process_obligation - 22: 0x111e1c724 - >::process_obligations:: - 23: 0x111e383c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible - 24: 0x111c66608 - >::assumed_wf_types_and_report_errors - 25: 0x110376c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed - 26: 0x11160ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 27: 0x1117112e0 - >::call_once - 28: 0x1115abf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 29: 0x111788630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace - 30: 0x11036a5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> - 31: 0x11037d898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf - 32: 0x11160ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 33: 0x111711048 - >::call_once - 34: 0x11156cf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 35: 0x111775ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace - 36: 0x11036534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> - 37: 0x11041513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate - 38: 0x1108bb918 - rustc_interface[6b7e568f89869ca2]::passes::analysis - 39: 0x11160e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 40: 0x1116b2cf0 - >::call_once - 41: 0x11152ae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 42: 0x1117636ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace - 43: 0x10ff66ee0 - ::enter::> - 44: 0x10ff34448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 45: 0x10ff81978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> - 46: 0x10ff7e0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 47: 0x10ff7edb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} - 48: 0x1120c3a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 - 49: 0x18b24ef94 - __pthread_joiner_wake - - -rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) -platform: aarch64-apple-darwin - -query stack during panic: -#0 [check_well_formed] checking that `mpt_sequential::` is well-formed -#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` -#2 [analysis] running analysis passes on this crate -end of query stack diff --git a/rustc-ice-2024-11-04T12_37_01-74253.txt b/rustc-ice-2024-11-04T12_37_01-74253.txt deleted file mode 100644 index 6bcecf0f7..000000000 --- a/rustc-ice-2024-11-04T12_37_01-74253.txt +++ /dev/null @@ -1,62 +0,0 @@ -thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: -const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] -stack backtrace: - 0: 0x110a2ec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea - 1: 0x10e8ab468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call - 2: 0x110a49608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a - 3: 0x110a49260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 - 4: 0x110a46e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 - 5: 0x110a48f24 - _rust_begin_unwind - 6: 0x113137ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 - 7: 0x11326dc1c - >::const_param_out_of_range - 8: 0x10f775ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const - 9: 0x10f74651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> - 10: 0x10f73a120 - >::super_fold_with::> - 11: 0x10f689f18 - >::super_fold_with::> - 12: 0x10f687ca0 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 13: 0x10f6873b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 14: 0x10f78372c - >::try_fold_with::> - 15: 0x10f755a1c - ::instantiate_into - 16: 0x110659848 - ::nominal_obligations - 17: 0x110658710 - >::visit_const - 18: 0x110657b58 - >::visit_ty - 19: 0x110655db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations - 20: 0x1107cb15c - ::process_obligation - 21: 0x1107ac724 - >::process_obligations:: - 22: 0x1107c83c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible - 23: 0x1105f6608 - >::assumed_wf_types_and_report_errors - 24: 0x10ed06c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed - 25: 0x10ff9ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 26: 0x1100a12e0 - >::call_once - 27: 0x10ff3bf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 28: 0x110118630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace - 29: 0x10ecfa5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> - 30: 0x10ed0d898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf - 31: 0x10ff9ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 32: 0x1100a1048 - >::call_once - 33: 0x10fefcf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 34: 0x110105ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace - 35: 0x10ecf534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> - 36: 0x10eda513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate - 37: 0x10f24b918 - rustc_interface[6b7e568f89869ca2]::passes::analysis - 38: 0x10ff9e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 39: 0x110042cf0 - >::call_once - 40: 0x10febae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 41: 0x1100f36ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace - 42: 0x10e8f6ee0 - ::enter::> - 43: 0x10e8c4448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 44: 0x10e911978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> - 45: 0x10e90e0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 46: 0x10e90edb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} - 47: 0x110a53a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 - 48: 0x18b24ef94 - __pthread_joiner_wake - - -rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) -platform: aarch64-apple-darwin - -query stack during panic: -#0 [check_well_formed] checking that `mpt_sequential::` is well-formed -#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` -#2 [analysis] running analysis passes on this crate -end of query stack diff --git a/rustc-ice-2024-11-04T12_37_13-74307.txt b/rustc-ice-2024-11-04T12_37_13-74307.txt deleted file mode 100644 index 6eb26635b..000000000 --- a/rustc-ice-2024-11-04T12_37_13-74307.txt +++ /dev/null @@ -1,62 +0,0 @@ -thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: -const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] -stack backtrace: - 0: 0x10e1cec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea - 1: 0x10c04b468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call - 2: 0x10e1e9608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a - 3: 0x10e1e9260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 - 4: 0x10e1e6e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 - 5: 0x10e1e8f24 - _rust_begin_unwind - 6: 0x1108d7ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 - 7: 0x110a0dc1c - >::const_param_out_of_range - 8: 0x10cf15ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const - 9: 0x10cee651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> - 10: 0x10ceda120 - >::super_fold_with::> - 11: 0x10ce29f18 - >::super_fold_with::> - 12: 0x10ce27ca0 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 13: 0x10ce273b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 14: 0x10cf2372c - >::try_fold_with::> - 15: 0x10cef5a1c - ::instantiate_into - 16: 0x10ddf9848 - ::nominal_obligations - 17: 0x10ddf8710 - >::visit_const - 18: 0x10ddf7b58 - >::visit_ty - 19: 0x10ddf5db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations - 20: 0x10df6b15c - ::process_obligation - 21: 0x10df4c724 - >::process_obligations:: - 22: 0x10df683c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible - 23: 0x10dd96608 - >::assumed_wf_types_and_report_errors - 24: 0x10c4a6c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed - 25: 0x10d73ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 26: 0x10d8412e0 - >::call_once - 27: 0x10d6dbf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 28: 0x10d8b8630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace - 29: 0x10c49a5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> - 30: 0x10c4ad898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf - 31: 0x10d73ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 32: 0x10d841048 - >::call_once - 33: 0x10d69cf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 34: 0x10d8a5ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace - 35: 0x10c49534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> - 36: 0x10c54513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate - 37: 0x10c9eb918 - rustc_interface[6b7e568f89869ca2]::passes::analysis - 38: 0x10d73e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 39: 0x10d7e2cf0 - >::call_once - 40: 0x10d65ae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 41: 0x10d8936ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace - 42: 0x10c096ee0 - ::enter::> - 43: 0x10c064448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 44: 0x10c0b1978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> - 45: 0x10c0ae0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 46: 0x10c0aedb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} - 47: 0x10e1f3a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 - 48: 0x18b24ef94 - __pthread_joiner_wake - - -rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) -platform: aarch64-apple-darwin - -query stack during panic: -#0 [check_well_formed] checking that `mpt_sequential::` is well-formed -#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` -#2 [analysis] running analysis passes on this crate -end of query stack From f1abff807f9775a725d450daddb02fa9948cfa20 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 27 Nov 2024 09:26:31 +0000 Subject: [PATCH 09/47] Value digest computation corrected --- mp2-common/src/eth.rs | 356 ++++++++++-------- mp2-common/src/mpt_sequential/mod.rs | 2 +- mp2-common/src/rlp.rs | 2 +- mp2-test/src/mpt_sequential.rs | 2 +- mp2-v1/src/receipt_extraction/leaf.rs | 109 ++++-- mp2-v1/src/receipt_extraction/mod.rs | 60 ++- .../src/receipt_extraction/public_inputs.rs | 4 +- mp2-v1/tests/common/block_extraction.rs | 2 +- 8 files changed, 338 insertions(+), 199 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index e2949e92e..927bffb0d 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -6,7 +6,7 @@ use alloy::{ network::{eip2718::Encodable2718, TransactionResponse}, primitives::{Address, B256}, providers::{Provider, RootProvider}, - rlp::Encodable as AlloyEncodable, + rlp::{Decodable, Encodable as AlloyEncodable}, rpc::types::{ Block, BlockTransactions, EIP1186AccountProofResponse, Filter, ReceiptEnvelope, Transaction, }, @@ -131,6 +131,8 @@ pub struct ReceiptQuery { pub struct ReceiptProofInfo { /// The MPT proof that this Receipt is in the tree pub mpt_proof: Vec>, + /// The root of the Receipt Trie this receipt belongs to + pub mpt_root: H256, /// The index of this transaction in the block pub tx_index: u64, /// The size of the index in bytes @@ -228,7 +230,7 @@ impl TryFrom<&Log> for EventLogInfo { .take(remaining_topics) .for_each(|(j, info)| { *info = LogDataInfo { - column_id: j, + column_id: j + 1, rel_byte_offset: current_topic_offset, len: 32, }; @@ -261,7 +263,7 @@ impl TryFrom<&Log> for EventLogInfo { let chunk_header = chunk_rlp.payload_info()?; if chunk_header.value_len <= 32 { data[j] = LogDataInfo { - column_id: 3 + j, + column_id: remaining_topics + 1 + j, rel_byte_offset: current_topic_offset + additional_offset + chunk_header.header_len, @@ -531,6 +533,23 @@ impl ProofQuery { } } +impl ReceiptProofInfo { + pub fn to_receipt(&self) -> Result { + let memdb = Arc::new(MemoryDB::new(true)); + let tx_trie = EthTrie::new(Arc::clone(&memdb)); + + let mpt_key = self.tx_index.rlp_bytes(); + + let valid = tx_trie + .verify_proof(self.mpt_root, &mpt_key, self.mpt_proof.clone())? + .ok_or(anyhow!("No proof found when verifying"))?; + + let rlp_receipt = rlp::Rlp::new(&valid[1..]); + ReceiptWithBloom::decode(&mut rlp_receipt.as_raw()) + .map_err(|e| anyhow!("Could not decode receipt got: {}", e)) + } +} + impl ReceiptQuery { pub fn new(contract: Address, event: Event) -> Self { Self { contract, event } @@ -567,7 +586,7 @@ impl ReceiptQuery { // Construct the Receipt Trie for this block so we can retrieve MPT proofs. let mut block_util = BlockUtil::fetch(provider, block).await?; - + let mpt_root = block_util.receipts_trie.root_hash()?; let proofs = tx_indices .into_iter() .map(|index| { @@ -632,6 +651,7 @@ impl ReceiptQuery { Ok(ReceiptProofInfo { mpt_proof: proof, + mpt_root, tx_index: index, index_size, status_offset, @@ -707,7 +727,7 @@ impl BlockUtil { let mut transactions_trie = EthTrie::new(memdb.clone()); let consensus_receipts = receipts .into_iter() - .zip(all_tx.into_iter()) + .zip(all_tx.iter()) .map(|(receipt, transaction)| { let tx_index = receipt.transaction_index.unwrap().rlp_bytes(); @@ -783,157 +803,6 @@ fn from_rpc_logs_to_consensus( } } -// for compatibility check with alloy -#[cfg(test)] -mod tryethers { - - use std::sync::Arc; - - use anyhow::Result; - use eth_trie::{EthTrie, MemoryDB, Trie}; - use ethers::{ - providers::{Http, Middleware, Provider}, - types::{BlockId, Bytes, Transaction, TransactionReceipt, U64}, - }; - use rlp::{Encodable, RlpStream}; - - /// A wrapper around a transaction and its receipt. The receipt is used to filter - /// bad transactions, so we only compute over valid transactions. - pub struct TxAndReceipt(Transaction, TransactionReceipt); - - impl TxAndReceipt { - pub fn tx(&self) -> &Transaction { - &self.0 - } - pub fn receipt(&self) -> &TransactionReceipt { - &self.1 - } - pub fn tx_rlp(&self) -> Bytes { - self.0.rlp() - } - // TODO: this should be upstreamed to ethers-rs - pub fn receipt_rlp(&self) -> Bytes { - let tx_type = self.tx().transaction_type; - let mut rlp = RlpStream::new(); - rlp.begin_unbounded_list(); - match &self.1.status { - Some(s) if s.as_u32() == 1 => rlp.append(s), - _ => rlp.append_empty_data(), - }; - rlp.append(&self.1.cumulative_gas_used) - .append(&self.1.logs_bloom) - .append_list(&self.1.logs); - - rlp.finalize_unbounded_list(); - let rlp_bytes: Bytes = rlp.out().freeze().into(); - let mut encoded = vec![]; - match tx_type { - // EIP-2930 (0x01) - Some(x) if x == U64::from(0x1) => { - encoded.extend_from_slice(&[0x1]); - encoded.extend_from_slice(rlp_bytes.as_ref()); - encoded.into() - } - // EIP-1559 (0x02) - Some(x) if x == U64::from(0x2) => { - encoded.extend_from_slice(&[0x2]); - encoded.extend_from_slice(rlp_bytes.as_ref()); - encoded.into() - } - _ => rlp_bytes, - } - } - } - /// Structure containing the block header and its transactions / receipts. Amongst other things, - /// it is used to create a proof of inclusion for any transaction inside this block. - pub struct BlockData { - pub block: ethers::types::Block, - pub txs: Vec, - // TODO: add generics later - this may be re-used amongst different workers - pub tx_trie: EthTrie, - pub receipts_trie: EthTrie, - } - - impl BlockData { - pub async fn fetch + Send + Sync>( - blockid: T, - url: String, - ) -> Result { - let provider = - Provider::::try_from(url).expect("could not instantiate HTTP Provider"); - Self::fetch_from(&provider, blockid).await - } - pub async fn fetch_from + Send + Sync>( - provider: &Provider, - blockid: T, - ) -> Result { - let block = provider - .get_block_with_txs(blockid) - .await? - .expect("should have been a block"); - let receipts = provider.get_block_receipts(block.number.unwrap()).await?; - - let tx_with_receipt = block - .transactions - .clone() - .into_iter() - .map(|tx| { - let tx_hash = tx.hash(); - let r = receipts - .iter() - .find(|r| r.transaction_hash == tx_hash) - .expect("RPC sending invalid data"); - // TODO remove cloning - TxAndReceipt(tx, r.clone()) - }) - .collect::>(); - - // check transaction root - let memdb = Arc::new(MemoryDB::new(true)); - let mut tx_trie = EthTrie::new(Arc::clone(&memdb)); - for tr in tx_with_receipt.iter() { - tx_trie - .insert(&tr.receipt().transaction_index.rlp_bytes(), &tr.tx().rlp()) - .expect("can't insert tx"); - } - - // check receipt root - let memdb = Arc::new(MemoryDB::new(true)); - let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); - for tr in tx_with_receipt.iter() { - if tr.tx().transaction_index.unwrap() == U64::from(0) { - println!( - "Ethers: Index {} -> {:?}", - tr.tx().transaction_index.unwrap(), - tr.receipt_rlp().to_vec() - ); - } - receipts_trie - .insert( - &tr.receipt().transaction_index.rlp_bytes(), - // TODO: make getter value for rlp encoding - &tr.receipt_rlp(), - ) - .expect("can't insert tx"); - } - let computed = tx_trie.root_hash().expect("root hash problem"); - let expected = block.transactions_root; - assert_eq!(expected, computed); - - let computed = receipts_trie.root_hash().expect("root hash problem"); - let expected = block.receipts_root; - assert_eq!(expected, computed); - - Ok(BlockData { - block, - tx_trie, - receipts_trie, - txs: tx_with_receipt, - }) - } - } -} - #[cfg(test)] mod test { #[cfg(feature = "ci")] @@ -948,7 +817,6 @@ mod test { rlp::Decodable, sol, }; - use alloy_multicall::Multicall; use eth_trie::Nibbles; use ethereum_types::U64; @@ -978,11 +846,12 @@ mod test { // check if we compute the RLP correctly now block.check()?; let mut be = tryethers::BlockData::fetch(bn, url).await?; + be.check()?; let er = be.receipts_trie.root_hash()?; let ar = block.receipts_trie.root_hash()?; assert_eq!(er, ar); // dissect one receipt entry in the trie - let tx_receipt = block.txs.first().clone().unwrap(); + let tx_receipt = block.txs.first().unwrap(); // https://sepolia.etherscan.io/tx/0x9bef12fafd3962b0e0d66b738445d6ea2c1f3daabe10c889bd1916acc75d698b#eventlog println!( "Looking at tx hash on sepolia: {}", @@ -1051,7 +920,7 @@ mod test { // final is tokenid - not in topic let expected_data = "000000000000000000000000000000000000000000115eec47f6cf7e35000000"; let log_data: Vec = log_state.val_at(2).context("can't decode log data")?; - let found_data = hex::encode(&left_pad32( + let found_data = hex::encode(left_pad32( &log_data.into_iter().take(32).collect::>(), )); assert_eq!(expected_data, found_data); @@ -1128,13 +997,6 @@ mod test { // Deploy the contract using anvil let contract = EventEmitter::deploy(rpc.clone()).await?; - // (0..10).for_each(|j| { - // match i % 2 { - // 0 => multicall.add_call(), - // 1 => contract.twoEmits().into_transaction_request(), - // _ => unreachable!(), - // } - // }); let tx_reqs = (0..10) .map(|i| match i % 2 { 0 => contract.testEmit().into_transaction_request(), @@ -1190,7 +1052,7 @@ mod test { .ok_or(anyhow!("Could not get block test"))?; let receipt_hash = block.header().receipts_root; let proofs = receipt_query - .query_receipt_proofs(&rpc.root(), BlockNumberOrTag::Number(block_number)) + .query_receipt_proofs(rpc.root(), BlockNumberOrTag::Number(block_number)) .await?; // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it @@ -1529,4 +1391,164 @@ mod test { rlp.append(inner); } } + // for compatibility check with alloy + mod tryethers { + + use std::sync::Arc; + + use anyhow::Result; + use eth_trie::{EthTrie, MemoryDB, Trie}; + use ethers::{ + providers::{Http, Middleware, Provider}, + types::{BlockId, Bytes, Transaction, TransactionReceipt, U64}, + }; + use rlp::{Encodable, RlpStream}; + + /// A wrapper around a transaction and its receipt. The receipt is used to filter + /// bad transactions, so we only compute over valid transactions. + pub struct TxAndReceipt(Transaction, TransactionReceipt); + + impl TxAndReceipt { + pub fn tx(&self) -> &Transaction { + &self.0 + } + pub fn receipt(&self) -> &TransactionReceipt { + &self.1 + } + pub fn tx_rlp(&self) -> Bytes { + self.0.rlp() + } + // TODO: this should be upstreamed to ethers-rs + pub fn receipt_rlp(&self) -> Bytes { + let tx_type = self.tx().transaction_type; + let mut rlp = RlpStream::new(); + rlp.begin_unbounded_list(); + match &self.1.status { + Some(s) if s.as_u32() == 1 => rlp.append(s), + _ => rlp.append_empty_data(), + }; + rlp.append(&self.1.cumulative_gas_used) + .append(&self.1.logs_bloom) + .append_list(&self.1.logs); + + rlp.finalize_unbounded_list(); + let rlp_bytes: Bytes = rlp.out().freeze().into(); + let mut encoded = vec![]; + match tx_type { + // EIP-2930 (0x01) + Some(x) if x == U64::from(0x1) => { + encoded.extend_from_slice(&[0x1]); + encoded.extend_from_slice(rlp_bytes.as_ref()); + encoded.into() + } + // EIP-1559 (0x02) + Some(x) if x == U64::from(0x2) => { + encoded.extend_from_slice(&[0x2]); + encoded.extend_from_slice(rlp_bytes.as_ref()); + encoded.into() + } + _ => rlp_bytes, + } + } + } + /// Structure containing the block header and its transactions / receipts. Amongst other things, + /// it is used to create a proof of inclusion for any transaction inside this block. + pub struct BlockData { + pub block: ethers::types::Block, + // TODO: add generics later - this may be re-used amongst different workers + pub tx_trie: EthTrie, + pub receipts_trie: EthTrie, + } + + impl BlockData { + pub async fn fetch + Send + Sync>( + blockid: T, + url: String, + ) -> Result { + let provider = + Provider::::try_from(url).expect("could not instantiate HTTP Provider"); + Self::fetch_from(&provider, blockid).await + } + pub async fn fetch_from + Send + Sync>( + provider: &Provider, + blockid: T, + ) -> Result { + let block = provider + .get_block_with_txs(blockid) + .await? + .expect("should have been a block"); + let receipts = provider.get_block_receipts(block.number.unwrap()).await?; + + let tx_with_receipt = block + .transactions + .clone() + .into_iter() + .map(|tx| { + let tx_hash = tx.hash(); + let r = receipts + .iter() + .find(|r| r.transaction_hash == tx_hash) + .expect("RPC sending invalid data"); + // TODO remove cloning + TxAndReceipt(tx, r.clone()) + }) + .collect::>(); + + // check transaction root + let memdb = Arc::new(MemoryDB::new(true)); + let mut tx_trie = EthTrie::new(Arc::clone(&memdb)); + for tr in tx_with_receipt.iter() { + tx_trie + .insert(&tr.receipt().transaction_index.rlp_bytes(), &tr.tx_rlp()) + .expect("can't insert tx"); + } + + // check receipt root + let memdb = Arc::new(MemoryDB::new(true)); + let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); + for tr in tx_with_receipt.iter() { + if tr.tx().transaction_index.unwrap() == U64::from(0) { + println!( + "Ethers: Index {} -> {:?}", + tr.tx().transaction_index.unwrap(), + tr.receipt_rlp().to_vec() + ); + } + receipts_trie + .insert( + &tr.receipt().transaction_index.rlp_bytes(), + // TODO: make getter value for rlp encoding + &tr.receipt_rlp(), + ) + .expect("can't insert tx"); + } + let computed = tx_trie.root_hash().expect("root hash problem"); + let expected = block.transactions_root; + assert_eq!(expected, computed); + + let computed = receipts_trie.root_hash().expect("root hash problem"); + let expected = block.receipts_root; + assert_eq!(expected, computed); + + Ok(BlockData { + block, + tx_trie, + receipts_trie, + }) + } + + // recompute the receipts trie by first converting all receipts form RPC type to consensus type + // since in Alloy these are two different types and RLP functions are only implemented for + // consensus ones. + pub fn check(&mut self) -> Result<()> { + let computed = self.receipts_trie.root_hash()?; + let tx_computed = self.tx_trie.root_hash()?; + let expected = self.block.receipts_root; + let tx_expected = self.block.transactions_root; + assert_eq!(expected.0, computed.0); + assert_eq!(tx_expected.0, tx_computed.0); + Ok(()) + } + } + } } diff --git a/mp2-common/src/mpt_sequential/mod.rs b/mp2-common/src/mpt_sequential/mod.rs index 4606402de..522c61d67 100644 --- a/mp2-common/src/mpt_sequential/mod.rs +++ b/mp2-common/src/mpt_sequential/mod.rs @@ -361,7 +361,7 @@ where /// * The key where to lookup the next nibble and thus the hash stored at /// nibble position in the branch node. /// * RLP headers of the current node. -/// And it returns: +/// And it returns: /// * New key with the pointer moved. /// * The child hash / value of the node. /// * A boolean that must be true if the given node is a leaf or an extension. diff --git a/mp2-common/src/rlp.rs b/mp2-common/src/rlp.rs index 3c50eb8cc..01d6824ab 100644 --- a/mp2-common/src/rlp.rs +++ b/mp2-common/src/rlp.rs @@ -16,7 +16,7 @@ const MAX_LEN_BYTES: usize = 2; /// Maximum size a key can have inside a MPT node. /// 33 bytes because key is compacted encoded, so it can add up to 1 byte more. -const MAX_ENC_KEY_LEN: usize = 33; +pub const MAX_ENC_KEY_LEN: usize = 33; /// Simply the maximum number of nibbles a key can have. pub const MAX_KEY_NIBBLE_LEN: usize = 64; diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 3ab1346e1..70080429a 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -168,7 +168,7 @@ pub fn generate_receipt_proofs() -> Vec { let receipt_query = ReceiptQuery::new(*event_contract.address(), events[0].clone()); receipt_query - .query_receipt_proofs(&rpc.root(), BlockNumberOrTag::Number(block_number)) + .query_receipt_proofs(rpc.root(), BlockNumberOrTag::Number(block_number)) .await .unwrap() }) diff --git a/mp2-v1/src/receipt_extraction/leaf.rs b/mp2-v1/src/receipt_extraction/leaf.rs index 8fca8a1c5..429f46bd9 100644 --- a/mp2-v1/src/receipt_extraction/leaf.rs +++ b/mp2-v1/src/receipt_extraction/leaf.rs @@ -12,7 +12,7 @@ use mp2_common::{ mpt_sequential::{MPTReceiptLeafNode, ReceiptKeyWire, MAX_TX_KEY_NIBBLE_LEN, PAD_LEN}, public_inputs::PublicInputCommon, types::{CBuilder, GFp}, - utils::{Endianness, PackerTarget}, + utils::{less_than, less_than_or_equal_to, Endianness, PackerTarget}, D, F, }; use plonky2::{ @@ -129,17 +129,66 @@ impl EventWires { &self, b: &mut CBuilder, value: &VectorWire, - status_offset: Target, relevant_logs_offsets: &VectorWire, ) -> CurveTarget { let t = b._true(); + let one = b.one(); + let two = b.two(); let zero = b.zero(); let curve_zero = b.curve_zero(); let mut points = Vec::new(); - // Enforce status is true. - let status = value.arr.random_access_large_array(b, status_offset); - b.connect(status, t.target); + // Extract the gas used in the transaction, since the position of this can vary because it is after the key + // we have to prove we extracted from the correct location. + let header_len_len = b.add_const( + value.arr[0], + F::from_canonical_u64(1) - F::from_canonical_u64(247), + ); + // let key_header = value.arr.random_access_large_array(b, header_len_len); + // let key_header_len = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); + + // This is the start of the string that is the rlp encoded receipt (a string since the first element is transaction type). + // From here we subtract 183 to get the length of the length, then the encoded gas used is at length of length + 1 (for tx type) + (1 + list length) + // + 1 (for status) + 1 to get the header for the gas used string. + let string_offset = b.add(one, header_len_len); + let string_header = value.arr.random_access_large_array(b, string_offset); + let string_len_len = b.add_const(string_header, -F::from_canonical_u64(183)); + + let list_offset = b.add_many([string_offset, string_len_len, two]); + let list_header = value.arr.random_access_large_array(b, list_offset); + + let gas_used_offset_lo = b.add_const( + list_header, + F::from_canonical_u64(2) - F::from_canonical_u64(247), + ); + let gas_used_offset = b.add(gas_used_offset_lo, list_offset); + + let gas_used_header = value.arr.random_access_large_array(b, gas_used_offset); + let gas_used_len = b.add_const(gas_used_header, -F::from_canonical_u64(128)); + + let initial_gas_index = b.add(gas_used_offset, one); + let final_gas_index = b.add(gas_used_offset, gas_used_len); + + let combiner = b.constant(F::from_canonical_u64(1 << 8)); + + let gas_used = (0..3u64).fold(zero, |acc, i| { + let access_index = b.add_const(initial_gas_index, F::from_canonical_u64(i)); + let array_value = value.arr.random_access_large_array(b, access_index); + + // If we have extracted a value from an index in the desired range (so lte final_gas_index) we want to add it. + // If access_index was strictly less than final_gas_index we need to multiply by 1 << 8 after (since the encoding is big endian) + let valid = less_than_or_equal_to(b, access_index, final_gas_index, 12); + let need_scalar = less_than(b, access_index, final_gas_index, 12); + + let to_add = b.select(valid, array_value, zero); + + let scalar = b.select(need_scalar, combiner, one); + let tmp = b.add(acc, to_add); + b.mul(tmp, scalar) + }); + + // Map the gas used to a curve point for the value digest, gas used is the first column so use one as its column id. + let gas_digest = b.map_to_curve_point(&[zero, gas_used]); for log_offset in relevant_logs_offsets.arr.arr { // Extract the address bytes @@ -179,13 +228,17 @@ impl EventWires { // For each column we use the `column_id` field to tell if its a dummy or not, zero indicates a dummy. let dummy_column = b.is_equal(log_column.column_id, zero); - let selector = b.and(dummy_column, dummy); - let selected_point = b.select_curve_point(selector, curve_zero, data_digest); + let selected_point = b.select_curve_point(dummy_column, curve_zero, data_digest); + let selected_point = b.select_curve_point(dummy, curve_zero, selected_point); + points.push(selected_point); } - } + let gas_select = b.select_curve_point(dummy, curve_zero, gas_digest); + points.push(gas_select); + } + println!("points length: {}", points.len()); b.add_curve_point(&points) } } @@ -218,13 +271,9 @@ where let root = wires.root; // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for - let mut dv = event_wires.verify_logs_and_extract_values::( - b, - &node, - status_offset, - &relevant_logs_offset, - ); - + let mut dv = + event_wires.verify_logs_and_extract_values::(b, &node, &relevant_logs_offset); + println!("dv target: {:?}", dv); let value_id = b.map_to_curve_point(&[index]); dv = b.add_curve_point(&[value_id, dv]); @@ -356,17 +405,16 @@ where wires .address - .assign(pw, &address.0.map(|byte| GFp::from_canonical_u8(byte))); + .assign(pw, &address.0.map(GFp::from_canonical_u8)); pw.set_target( wires.add_rel_offset, F::from_canonical_usize(add_rel_offset), ); - wires.event_signature.assign( - pw, - &event_signature.map(|byte| GFp::from_canonical_u8(byte)), - ); + wires + .event_signature + .assign(pw, &event_signature.map(GFp::from_canonical_u8)); pw.set_target( wires.sig_rel_offset, @@ -376,12 +424,12 @@ where wires .topics .iter() - .zip(topics.into_iter()) + .zip(topics) .for_each(|(topic_wire, topic)| topic_wire.assign(pw, topic)); wires .data .iter() - .zip(data.into_iter()) + .zip(data) .for_each(|(data_wire, data)| data_wire.assign(pw, data)); } } @@ -415,7 +463,9 @@ impl CircuitLogicWires for ReceiptLeafWires, wires: &Self::Wires) { - self.c.assign(pw, &wires); + self.c.assign(pw, wires); } } #[test] @@ -450,18 +500,27 @@ mod tests { let receipt_proof_infos = generate_receipt_proofs(); let info = receipt_proof_infos.first().unwrap().clone(); + let c = ReceiptLeafCircuit:: { info: info.clone() }; let test_circuit = TestReceiptLeafCircuit { c }; + let node = info.mpt_proof.last().unwrap().clone(); + let proof = run_circuit::(test_circuit); let pi = PublicInputs::new(&proof.public_inputs); - let node = info.mpt_proof.last().unwrap().clone(); + // Check the output hash { let exp_hash = keccak256(&node).pack(Endianness::Little); assert_eq!(pi.root_hash(), exp_hash); } + // Check value digest + { + let exp_digest = compute_receipt_leaf_value_digest(&info); + assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); + } + // Check metadata digest { let exp_digest = compute_receipt_leaf_metadata_digest(&info.event_log_info); diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs index a21f7fc41..004a9cfea 100644 --- a/mp2-v1/src/receipt_extraction/mod.rs +++ b/mp2-v1/src/receipt_extraction/mod.rs @@ -1,8 +1,14 @@ pub mod leaf; pub mod public_inputs; +use alloy::{consensus::TxReceipt, primitives::IntoLogData}; + use mp2_common::{ - digest::Digest, eth::EventLogInfo, group_hashing::map_to_curve_point, types::GFp, + digest::Digest, + eth::{EventLogInfo, ReceiptProofInfo}, + group_hashing::map_to_curve_point, + types::GFp, + utils::{Packer, ToFields}, }; use plonky2::field::types::Field; @@ -31,3 +37,55 @@ pub fn compute_receipt_leaf_metadata_digest(event: &EventLogInfo) -> Digest { .collect::>(); map_to_curve_point(&data) } + +/// Calculate `value_digest` for receipt leaf. +pub fn compute_receipt_leaf_value_digest(receipt_proof_info: &ReceiptProofInfo) -> Digest { + let receipt = receipt_proof_info.to_receipt().unwrap(); + let gas_used = receipt.cumulative_gas_used(); + + // Only use events that we are indexing + let address = receipt_proof_info.event_log_info.address; + let sig = receipt_proof_info.event_log_info.event_signature; + + let index_digest = map_to_curve_point(&[GFp::from_canonical_u64(receipt_proof_info.tx_index)]); + + let gas_digest = map_to_curve_point(&[GFp::ZERO, GFp::from_noncanonical_u128(gas_used)]); + + receipt + .logs() + .iter() + .cloned() + .filter_map(|log| { + let log_address = log.address; + let log_data = log.to_log_data(); + let (topics, data) = log_data.split(); + + if log_address == address && topics[0].0 == sig { + let topics_field = topics + .iter() + .skip(1) + .map(|fixed| fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields()) + .collect::>(); + let data_fixed_bytes = data + .chunks(32) + .map(|chunk| chunk.pack(mp2_common::utils::Endianness::Big).to_fields()) + .take(2) + .collect::>(); + + Some( + topics_field + .iter() + .chain(data_fixed_bytes.iter()) + .enumerate() + .fold(gas_digest, |acc, (i, fixed)| { + let mut values = vec![GFp::from_canonical_usize(i) + GFp::ONE]; + values.extend_from_slice(fixed); + acc + map_to_curve_point(&values) + }), + ) + } else { + None + } + }) + .fold(index_digest, |acc, p| acc + p) +} diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs index e4fc8d5b9..2916c32bb 100644 --- a/mp2-v1/src/receipt_extraction/public_inputs.rs +++ b/mp2-v1/src/receipt_extraction/public_inputs.rs @@ -46,7 +46,7 @@ pub struct PublicInputArgs<'a> { pub(crate) dm: CurveTarget, } -impl<'a> PublicInputCommon for PublicInputArgs<'a> { +impl PublicInputCommon for PublicInputArgs<'_> { const RANGES: &'static [PublicInputRange] = &[H_RANGE, K_RANGE, T_RANGE, DV_RANGE, DM_RANGE]; fn register_args(&self, cb: &mut CBuilder) { @@ -61,7 +61,7 @@ impl<'a> PublicInputArgs<'a> { } } -impl<'a> PublicInputArgs<'a> { +impl PublicInputArgs<'_> { pub fn generic_register_args(&self, cb: &mut CBuilder) { self.h.register_as_public_input(cb); self.k.register_as_input(cb); diff --git a/mp2-v1/tests/common/block_extraction.rs b/mp2-v1/tests/common/block_extraction.rs index 933823e56..51b50c5c1 100644 --- a/mp2-v1/tests/common/block_extraction.rs +++ b/mp2-v1/tests/common/block_extraction.rs @@ -1,7 +1,7 @@ use alloy::primitives::U256; use anyhow::Result; use mp2_common::{ - eth::{left_pad_generic, BlockUtil, Rlpable}, + eth::Rlpable, proof::deserialize_proof, utils::{Endianness, Packer, ToFields}, C, D, F, From d0715759a7c9d7debb14bf8ae5323ebc35821bf3 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 27 Nov 2024 12:13:39 +0000 Subject: [PATCH 10/47] Moved receipt value extraction location --- mp2-common/src/eth.rs | 4 +- mp2-v1/src/api.rs | 12 +- mp2-v1/src/block_extraction/circuit.rs | 38 +++- mp2-v1/src/block_extraction/mod.rs | 18 +- mp2-v1/src/final_extraction/api.rs | 28 ++- mp2-v1/src/final_extraction/mod.rs | 1 + .../src/final_extraction/receipt_circuit.rs | 213 ++++++++++++++++++ mp2-v1/src/lib.rs | 1 - mp2-v1/src/receipt_extraction/mod.rs | 91 -------- .../src/receipt_extraction/public_inputs.rs | 170 -------------- mp2-v1/src/values_extraction/api.rs | 26 ++- .../leaf_receipt.rs} | 76 ++++--- mp2-v1/src/values_extraction/mod.rs | 89 +++++++- mp2-v1/tests/common/context.rs | 17 +- mp2-v1/tests/integrated_tests.rs | 4 +- 15 files changed, 459 insertions(+), 329 deletions(-) create mode 100644 mp2-v1/src/final_extraction/receipt_circuit.rs delete mode 100644 mp2-v1/src/receipt_extraction/mod.rs delete mode 100644 mp2-v1/src/receipt_extraction/public_inputs.rs rename mp2-v1/src/{receipt_extraction/leaf.rs => values_extraction/leaf_receipt.rs} (88%) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 927bffb0d..c23ee5ed4 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -230,7 +230,7 @@ impl TryFrom<&Log> for EventLogInfo { .take(remaining_topics) .for_each(|(j, info)| { *info = LogDataInfo { - column_id: j + 1, + column_id: j + 2, rel_byte_offset: current_topic_offset, len: 32, }; @@ -263,7 +263,7 @@ impl TryFrom<&Log> for EventLogInfo { let chunk_header = chunk_rlp.payload_info()?; if chunk_header.value_len <= 32 { data[j] = LogDataInfo { - column_id: remaining_topics + 1 + j, + column_id: remaining_topics + 2 + j, rel_byte_offset: current_topic_offset + additional_offset + chunk_header.header_len, diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index e0863eaf3..17a68c3e1 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -101,10 +101,9 @@ impl /// Instantiate the circuits employed for the pre-processing stage of LPN, /// returning their corresponding parameters -pub fn build_circuits_params( -) -> PublicParameters { - sanity_check(); - +pub fn build_circuits_params( + extraction_type: block_extraction::ExtractionType, +) -> PublicParameters { log::info!("Building contract_extraction parameters..."); let contract_extraction = contract_extraction::build_circuits_params(); log::info!("Building length_extraction parameters..."); @@ -112,7 +111,7 @@ pub fn build_circuits_params( length_circuit_set, ) } + final_extraction::CircuitInput::Receipt(input) => params + .final_extraction + .generate_receipt_proof(input, value_circuit_set), } } CircuitInput::CellsTree(input) => verifiable_db::api::generate_proof( diff --git a/mp2-v1/src/block_extraction/circuit.rs b/mp2-v1/src/block_extraction/circuit.rs index ceb6df077..4ba2c643d 100644 --- a/mp2-v1/src/block_extraction/circuit.rs +++ b/mp2-v1/src/block_extraction/circuit.rs @@ -22,6 +22,12 @@ const HEADER_PARENT_HASH_OFFSET: usize = 4; /// State root offset in RLP encoded header. const HEADER_STATE_ROOT_OFFSET: usize = 91; +/// Transaction root offset in RLP encoded header. +const HEADER_TRANSACTION_ROOT_OFFSET: usize = 124; + +/// Receipt root offset in RLP encoded header. +const HEADER_RECEIPT_ROOT_OFFSET: usize = 157; + /// Block number offset in RLP encoded header. const HEADER_BLOCK_NUMBER_OFFSET: usize = 449; /// We define u64 as the maximum block mnumber ever to be reached @@ -50,6 +56,25 @@ pub struct BlockCircuit { pub rlp_headers: Vec, } +/// Enum that represents the extraction type, storage, receipt or transaction +#[derive(Debug, Clone, Serialize, Deserialize, Copy)] +pub enum ExtractionType { + Storage, + Receipt, + Transaction, +} + +impl ExtractionType { + /// This function returns the offset of the relevant root for that type of extraction + pub fn offset(&self) -> usize { + match self { + ExtractionType::Storage => HEADER_STATE_ROOT_OFFSET, + ExtractionType::Receipt => HEADER_RECEIPT_ROOT_OFFSET, + ExtractionType::Transaction => HEADER_TRANSACTION_ROOT_OFFSET, + } + } +} + impl BlockCircuit { /// Creates a new instance of the circuit. pub fn new(rlp_headers: Vec) -> Result { @@ -61,7 +86,7 @@ impl BlockCircuit { } /// Build the circuit, assigning the public inputs and returning the internal wires. - pub fn build(cb: &mut CBuilder) -> BlockWires { + pub fn build(cb: &mut CBuilder, extraction_type: ExtractionType) -> BlockWires { // already right padded to right size for keccak let rlp_headers = VectorWire::new(cb); @@ -69,15 +94,16 @@ impl BlockCircuit { rlp_headers.assert_bytes(cb); // extract the previous block hash from the RLP header - let prev_bh = Array::::from_array(create_array(|i| { + let prev_bh: Array = Array::::from_array(create_array(|i| { rlp_headers.arr.arr[HEADER_PARENT_HASH_OFFSET + i] })); let packed_prev_bh = prev_bh.pack(cb, Endianness::Little).downcast_to_targets(); // extract the state root of the block - let state_root = Array::::from_array(create_array(|i| { - rlp_headers.arr.arr[HEADER_STATE_ROOT_OFFSET + i] - })); + let state_root: Array = + Array::::from_array(create_array(|i| { + rlp_headers.arr.arr[extraction_type.offset() + i] + })); let state_root_packed = state_root.pack(cb, Endianness::Little); // compute the block hash @@ -200,7 +226,7 @@ mod test { type Wires = BlockWires; fn build(cb: &mut CBuilder) -> Self::Wires { - Self::build(cb) + Self::build(cb, super::ExtractionType::Storage) } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { diff --git a/mp2-v1/src/block_extraction/mod.rs b/mp2-v1/src/block_extraction/mod.rs index 9515ea5ef..af268f2b9 100644 --- a/mp2-v1/src/block_extraction/mod.rs +++ b/mp2-v1/src/block_extraction/mod.rs @@ -15,6 +15,7 @@ use mp2_common::{ }; use serde::{Deserialize, Serialize}; +pub use circuit::ExtractionType; pub use public_inputs::PublicInputs; pub struct CircuitInput(Vec); impl CircuitInput { @@ -31,15 +32,15 @@ pub struct PublicParameters { } /// Returns the parameters necessary to prove block extraction circuits -pub fn build_circuits_params() -> PublicParameters { - PublicParameters::build() +pub fn build_circuits_params(extraction_type: ExtractionType) -> PublicParameters { + PublicParameters::build(extraction_type) } impl PublicParameters { - pub fn build() -> Self { + pub fn build(extraction_type: ExtractionType) -> Self { let config = default_config(); let mut cb = CircuitBuilder::new(config); - let wires = circuit::BlockCircuit::build(&mut cb); + let wires = circuit::BlockCircuit::build(&mut cb, extraction_type); let cd = cb.build(); Self { circuit_data: cd, @@ -76,10 +77,13 @@ mod test { }; use mp2_test::eth::get_sepolia_url; - use crate::block_extraction::{public_inputs::PublicInputs, PublicParameters}; + use crate::block_extraction::{ + circuit::ExtractionType, public_inputs::PublicInputs, PublicParameters, + }; + #[tokio::test] - async fn test_api() -> Result<()> { - let params = PublicParameters::build(); + async fn test_api_storage() -> Result<()> { + let params = PublicParameters::build(ExtractionType::Storage); let url = get_sepolia_url(); let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); let block_number = BlockNumberOrTag::Latest; diff --git a/mp2-v1/src/final_extraction/api.rs b/mp2-v1/src/final_extraction/api.rs index 064ded1f4..45f88831a 100644 --- a/mp2-v1/src/final_extraction/api.rs +++ b/mp2-v1/src/final_extraction/api.rs @@ -11,6 +11,7 @@ use super::{ base_circuit::BaseCircuitInput, lengthed_circuit::LengthedRecursiveWires, merge_circuit::{MergeTable, MergeTableRecursiveWires}, + receipt_circuit::{ReceiptCircuitInput, ReceiptCircuitProofInputs, ReceiptRecursiveWires}, simple_circuit::SimpleCircuitRecursiveWires, BaseCircuitProofInputs, LengthedCircuit, MergeCircuit, PublicInputs, SimpleCircuit, }; @@ -20,6 +21,7 @@ pub enum CircuitInput { Simple(SimpleCircuitInput), Lengthed(LengthedCircuitInput), MergeTable(MergeCircuitInput), + Receipt(ReceiptCircuitInput), } #[derive(Clone, Debug)] pub struct FinalExtractionBuilderParams { @@ -51,6 +53,7 @@ pub struct PublicParameters { simple: CircuitWithUniversalVerifier, lengthed: CircuitWithUniversalVerifier, merge: CircuitWithUniversalVerifier, + receipt: CircuitWithUniversalVerifier, circuit_set: RecursiveCircuits, } @@ -76,12 +79,14 @@ impl PublicParameters { ); let simple = builder.build_circuit(builder_params.clone()); let lengthed = builder.build_circuit(builder_params.clone()); - let merge = builder.build_circuit(builder_params); + let merge = builder.build_circuit(builder_params.clone()); + let receipt = builder.build_circuit(builder_params); let circuits = vec![ prepare_recursive_circuit_for_circuit_set(&simple), prepare_recursive_circuit_for_circuit_set(&lengthed), prepare_recursive_circuit_for_circuit_set(&merge), + prepare_recursive_circuit_for_circuit_set(&receipt), ]; let circuit_set = RecursiveCircuits::new(circuits); @@ -90,6 +95,7 @@ impl PublicParameters { simple, lengthed, merge, + receipt, circuit_set, } } @@ -155,6 +161,19 @@ impl PublicParameters { ProofWithVK::serialize(&(proof, self.lengthed.circuit_data().verifier_only.clone()).into()) } + pub(crate) fn generate_receipt_proof( + &self, + input: ReceiptCircuitInput, + value_circuit_set: &RecursiveCircuits, + ) -> Result> { + let receipt_input = + ReceiptCircuitProofInputs::new_from_proofs(input, value_circuit_set.clone()); + let proof = self + .circuit_set + .generate_proof(&self.receipt, [], [], receipt_input)?; + ProofWithVK::serialize(&(proof, self.receipt.circuit_data().verifier_only.clone()).into()) + } + pub(crate) fn get_circuit_set(&self) -> &RecursiveCircuits { &self.circuit_set } @@ -219,6 +238,13 @@ impl CircuitInput { let length_proof = ProofWithVK::deserialize(&length_proof)?; Ok(Self::Lengthed(LengthedCircuitInput { base, length_proof })) } + + pub fn new_receipt_input(block_proof: Vec, value_proof: Vec) -> Result { + Ok(Self::Receipt(ReceiptCircuitInput::new( + block_proof, + value_proof, + )?)) + } } #[cfg(test)] diff --git a/mp2-v1/src/final_extraction/mod.rs b/mp2-v1/src/final_extraction/mod.rs index cb6e1c6a4..3d78f3af6 100644 --- a/mp2-v1/src/final_extraction/mod.rs +++ b/mp2-v1/src/final_extraction/mod.rs @@ -3,6 +3,7 @@ mod base_circuit; mod lengthed_circuit; mod merge_circuit; mod public_inputs; +mod receipt_circuit; mod simple_circuit; pub use api::{CircuitInput, PublicParameters}; diff --git a/mp2-v1/src/final_extraction/receipt_circuit.rs b/mp2-v1/src/final_extraction/receipt_circuit.rs new file mode 100644 index 000000000..ef536ef83 --- /dev/null +++ b/mp2-v1/src/final_extraction/receipt_circuit.rs @@ -0,0 +1,213 @@ +use mp2_common::{ + default_config, + keccak::{OutputHash, PACKED_HASH_LEN}, + proof::{deserialize_proof, verify_proof_fixed_circuit, ProofWithVK}, + serialization::{deserialize, serialize}, + u256::UInt256Target, + utils::FromTargets, + C, D, F, +}; +use plonky2::{ + field::{goldilocks_field::GoldilocksField, types::Field}, + iop::{ + target::Target, + witness::{PartialWitness, WitnessWrite}, + }, + plonk::{ + circuit_builder::CircuitBuilder, + proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget}, + }, +}; +use plonky2_ecgfp5::gadgets::curve::CurveTarget; +use recursion_framework::{ + circuit_builder::CircuitLogicWires, + framework::{ + RecursiveCircuits, RecursiveCircuitsVerifierGagdet, RecursiveCircuitsVerifierTarget, + }, +}; +use serde::{Deserialize, Serialize}; + +use crate::{block_extraction, values_extraction}; + +use super::api::{FinalExtractionBuilderParams, NUM_IO}; + +use anyhow::Result; + +/// This circuit is more like a gadget. This contains the logic of the common part +/// between all the final extraction circuits. It should not be used on its own. +#[derive(Debug, Clone, Copy)] +pub struct ReceiptExtractionCircuit; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReceiptExtractionWires { + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] + pub(crate) dm: CurveTarget, + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] + pub(crate) dv: CurveTarget, + pub(crate) bh: [Target; PACKED_HASH_LEN], + pub(crate) prev_bh: [Target; PACKED_HASH_LEN], + pub(crate) bn: UInt256Target, +} + +impl ReceiptExtractionCircuit { + pub(crate) fn build( + b: &mut CircuitBuilder, + block_pi: &[Target], + value_pi: &[Target], + ) -> ReceiptExtractionWires { + // TODO: homogeinize the public inputs structs + let block_pi = + block_extraction::public_inputs::PublicInputs::::from_slice(block_pi); + let value_pi = values_extraction::PublicInputs::::new(value_pi); + + let minus_one = b.constant(GoldilocksField::NEG_ONE); + + // enforce the MPT key extraction reached the root + b.connect(value_pi.mpt_key().pointer, minus_one); + + // enforce block_pi.state_root == contract_pi.state_root + block_pi + .state_root() + .enforce_equal(b, &OutputHash::from_targets(value_pi.root_hash_info())); + ReceiptExtractionWires { + dm: value_pi.metadata_digest_target(), + dv: value_pi.values_digest_target(), + bh: block_pi.block_hash_raw().try_into().unwrap(), // safe to unwrap as we give as input the slice of the expected length + prev_bh: block_pi.prev_block_hash_raw().try_into().unwrap(), // safe to unwrap as we give as input the slice of the expected length + bn: block_pi.block_number(), + } + } +} + +/// The wires that are needed for the recursive framework, that concerns verifying the input +/// proofs +#[derive(Serialize, Deserialize, Clone, Debug)] +pub(crate) struct ReceiptRecursiveWires { + /// Wires containing the block and value proof + verification: ReceiptCircuitProofWires, + /// Wires information to check that the value corresponds to the block + consistency: ReceiptExtractionWires, +} + +impl CircuitLogicWires for ReceiptRecursiveWires { + type CircuitBuilderParams = FinalExtractionBuilderParams; + + type Inputs = ReceiptCircuitProofInputs; + + const NUM_PUBLIC_INPUTS: usize = NUM_IO; + + fn circuit_logic( + builder: &mut CircuitBuilder, + _verified_proofs: [&plonky2::plonk::proof::ProofWithPublicInputsTarget; 0], + builder_parameters: Self::CircuitBuilderParams, + ) -> Self { + // value proof for table a and value proof for table b = 2 + let verification = ReceiptCircuitProofInputs::build(builder, &builder_parameters); + let consistency = ReceiptExtractionCircuit::build( + builder, + verification.get_block_public_inputs(), + verification.get_value_public_inputs(), + ); + Self { + verification, + consistency, + } + } + + fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> anyhow::Result<()> { + inputs.assign_proof_targets(pw, &self.verification)?; + Ok(()) + } +} + +/// This parameter struct is not intended to be built on its own +/// but rather as a sub-component of the two final extraction parameters set. +/// This parameter contains the common logic of verifying a block and +/// value proof automatically from the right verification keys / circuit set. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub(crate) struct ReceiptCircuitProofWires { + /// single circuit proof extracting block hash, block number, previous hash + /// and receipt root + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] + block_proof: ProofWithPublicInputsTarget, + /// circuit set extracting the values from receipt trie of the block + value_proof: RecursiveCircuitsVerifierTarget, +} + +pub(crate) const VALUE_SET_NUM_IO: usize = values_extraction::PublicInputs::::TOTAL_LEN; + +#[derive(Clone, Debug)] +pub struct ReceiptCircuitInput { + block_proof: ProofWithPublicInputs, + value_proof: ProofWithVK, +} + +impl ReceiptCircuitInput { + pub(super) fn new(block_proof: Vec, value_proof: Vec) -> Result { + Ok(Self { + block_proof: deserialize_proof(&block_proof)?, + value_proof: ProofWithVK::deserialize(&value_proof)?, + }) + } +} +#[derive(Clone, Debug)] +pub(crate) struct ReceiptCircuitProofInputs { + proofs: ReceiptCircuitInput, + value_circuit_set: RecursiveCircuits, +} + +impl ReceiptCircuitProofInputs { + pub(crate) fn new_from_proofs( + proofs: ReceiptCircuitInput, + value_circuit_set: RecursiveCircuits, + ) -> Self { + Self { + proofs, + value_circuit_set, + } + } + + pub(crate) fn build( + cb: &mut CircuitBuilder, + params: &FinalExtractionBuilderParams, + ) -> ReceiptCircuitProofWires { + let config = default_config(); + let value_proof_wires = RecursiveCircuitsVerifierGagdet::::new( + config.clone(), + ¶ms.value_circuit_set, + ) + .verify_proof_in_circuit_set(cb); + + let block_proof_wires = verify_proof_fixed_circuit(cb, ¶ms.block_vk); + ReceiptCircuitProofWires { + block_proof: block_proof_wires, + value_proof: value_proof_wires, + } + } + + pub(crate) fn assign_proof_targets( + &self, + pw: &mut PartialWitness, + wires: &ReceiptCircuitProofWires, + ) -> anyhow::Result<()> { + pw.set_proof_with_pis_target(&wires.block_proof, &self.proofs.block_proof); + + let (proof, vd) = (&self.proofs.value_proof).into(); + wires + .value_proof + .set_target(pw, &self.value_circuit_set, proof, vd)?; + + Ok(()) + } +} + +impl ReceiptCircuitProofWires { + pub(crate) fn get_block_public_inputs(&self) -> &[Target] { + self.block_proof.public_inputs.as_slice() + } + + pub(crate) fn get_value_public_inputs(&self) -> &[Target] { + self.value_proof + .get_public_input_targets::() + } +} diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index e1eb5132e..e1defbc81 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -26,7 +26,6 @@ pub mod final_extraction; pub mod indexing; pub mod length_extraction; pub mod query; -pub mod receipt_extraction; pub mod values_extraction; #[cfg(test)] diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs deleted file mode 100644 index 004a9cfea..000000000 --- a/mp2-v1/src/receipt_extraction/mod.rs +++ /dev/null @@ -1,91 +0,0 @@ -pub mod leaf; -pub mod public_inputs; - -use alloy::{consensus::TxReceipt, primitives::IntoLogData}; - -use mp2_common::{ - digest::Digest, - eth::{EventLogInfo, ReceiptProofInfo}, - group_hashing::map_to_curve_point, - types::GFp, - utils::{Packer, ToFields}, -}; -use plonky2::field::types::Field; - -/// Calculate `metadata_digest = D(address || signature || topics)` for receipt leaf. -/// Topics is an array of 5 values (some are dummies), each being `column_id`, `rel_byte_offset` (from the start of the log) -/// and `len`. -pub fn compute_receipt_leaf_metadata_digest(event: &EventLogInfo) -> Digest { - let topics_flat = event - .topics - .iter() - .chain(event.data.iter()) - .flat_map(|t| [t.column_id, t.rel_byte_offset, t.len]) - .collect::>(); - - let mut out = Vec::new(); - out.push(event.size); - out.extend_from_slice(&event.address.0.map(|byte| byte as usize)); - out.push(event.add_rel_offset); - out.extend_from_slice(&event.event_signature.map(|byte| byte as usize)); - out.push(event.sig_rel_offset); - out.extend_from_slice(&topics_flat); - - let data = out - .into_iter() - .map(GFp::from_canonical_usize) - .collect::>(); - map_to_curve_point(&data) -} - -/// Calculate `value_digest` for receipt leaf. -pub fn compute_receipt_leaf_value_digest(receipt_proof_info: &ReceiptProofInfo) -> Digest { - let receipt = receipt_proof_info.to_receipt().unwrap(); - let gas_used = receipt.cumulative_gas_used(); - - // Only use events that we are indexing - let address = receipt_proof_info.event_log_info.address; - let sig = receipt_proof_info.event_log_info.event_signature; - - let index_digest = map_to_curve_point(&[GFp::from_canonical_u64(receipt_proof_info.tx_index)]); - - let gas_digest = map_to_curve_point(&[GFp::ZERO, GFp::from_noncanonical_u128(gas_used)]); - - receipt - .logs() - .iter() - .cloned() - .filter_map(|log| { - let log_address = log.address; - let log_data = log.to_log_data(); - let (topics, data) = log_data.split(); - - if log_address == address && topics[0].0 == sig { - let topics_field = topics - .iter() - .skip(1) - .map(|fixed| fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields()) - .collect::>(); - let data_fixed_bytes = data - .chunks(32) - .map(|chunk| chunk.pack(mp2_common::utils::Endianness::Big).to_fields()) - .take(2) - .collect::>(); - - Some( - topics_field - .iter() - .chain(data_fixed_bytes.iter()) - .enumerate() - .fold(gas_digest, |acc, (i, fixed)| { - let mut values = vec![GFp::from_canonical_usize(i) + GFp::ONE]; - values.extend_from_slice(fixed); - acc + map_to_curve_point(&values) - }), - ) - } else { - None - } - }) - .fold(index_digest, |acc, p| acc + p) -} diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs deleted file mode 100644 index 2916c32bb..000000000 --- a/mp2-v1/src/receipt_extraction/public_inputs.rs +++ /dev/null @@ -1,170 +0,0 @@ -//! Public inputs for Receipt Extraction circuits - -use mp2_common::{ - array::Array, - keccak::{OutputHash, PACKED_HASH_LEN}, - mpt_sequential::ReceiptKeyWire, - public_inputs::{PublicInputCommon, PublicInputRange}, - types::{CBuilder, GFp, GFp5, CURVE_TARGET_LEN}, - utils::{convert_point_to_curve_target, convert_slice_to_curve_point, FromTargets}, -}; - -use plonky2::{ - field::{extension::FieldExtension, types::Field}, - iop::target::Target, -}; -use plonky2_ecgfp5::{ - curve::curve::WeierstrassPoint, - gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, -}; - -/// The maximum length of a transaction index in a block in nibbles. -/// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 2 bytes to represent. -const MAX_INDEX_NIBBLES: usize = 4; -// Contract extraction public Inputs: -/// - `H : [8]F` : packed node hash -const H_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; -/// - `K : [4]F` : Length of the transaction index in nibbles -const K_RANGE: PublicInputRange = H_RANGE.end..H_RANGE.end + MAX_INDEX_NIBBLES; -/// `T : F` pointer in the MPT indicating portion of the key already traversed (from 4 → 0) -const T_RANGE: PublicInputRange = K_RANGE.end..K_RANGE.end + 1; -/// - `DV : Digest[F]` : value digest of all rows to extract -const DV_RANGE: PublicInputRange = T_RANGE.end..T_RANGE.end + CURVE_TARGET_LEN; -/// - `DM : Digest[F]` : metadata digest to extract -const DM_RANGE: PublicInputRange = DV_RANGE.end..DV_RANGE.end + CURVE_TARGET_LEN; - -/// Public inputs for contract extraction -#[derive(Clone, Debug)] -pub struct PublicInputArgs<'a> { - /// The hash of the node - pub(crate) h: &'a OutputHash, - /// The MPT key - pub(crate) k: &'a ReceiptKeyWire, - /// Digest of the values - pub(crate) dv: CurveTarget, - /// The poseidon hash of the metadata - pub(crate) dm: CurveTarget, -} - -impl PublicInputCommon for PublicInputArgs<'_> { - const RANGES: &'static [PublicInputRange] = &[H_RANGE, K_RANGE, T_RANGE, DV_RANGE, DM_RANGE]; - - fn register_args(&self, cb: &mut CBuilder) { - self.generic_register_args(cb) - } -} - -impl<'a> PublicInputArgs<'a> { - /// Create a new public inputs. - pub fn new(h: &'a OutputHash, k: &'a ReceiptKeyWire, dv: CurveTarget, dm: CurveTarget) -> Self { - Self { h, k, dv, dm } - } -} - -impl PublicInputArgs<'_> { - pub fn generic_register_args(&self, cb: &mut CBuilder) { - self.h.register_as_public_input(cb); - self.k.register_as_input(cb); - cb.register_curve_public_input(self.dv); - cb.register_curve_public_input(self.dm); - } - - pub fn digest_value(&self) -> CurveTarget { - self.dv - } - - pub fn digest_metadata(&self) -> CurveTarget { - self.dm - } -} - -/// Public inputs wrapper of any proof generated in this module -#[derive(Clone, Debug)] -pub struct PublicInputs<'a, T> { - pub(crate) proof_inputs: &'a [T], -} - -impl PublicInputs<'_, Target> { - /// Get the merkle hash of the subtree this proof has processed. - pub fn root_hash_target(&self) -> OutputHash { - OutputHash::from_targets(self.root_hash_info()) - } - - /// Get the MPT key defined over the public inputs. - pub fn mpt_key(&self) -> ReceiptKeyWire { - let (key, ptr) = self.mpt_key_info(); - ReceiptKeyWire { - key: Array { - arr: std::array::from_fn(|i| key[i]), - }, - pointer: ptr, - } - } - - /// Get the values digest defined over the public inputs. - pub fn values_digest_target(&self) -> CurveTarget { - convert_point_to_curve_target(self.values_digest_info()) - } - - /// Get the metadata digest defined over the public inputs. - pub fn metadata_digest_target(&self) -> CurveTarget { - convert_point_to_curve_target(self.metadata_digest_info()) - } -} - -impl PublicInputs<'_, GFp> { - /// Get the merkle hash of the subtree this proof has processed. - pub fn root_hash(&self) -> Vec { - let hash = self.root_hash_info(); - hash.iter().map(|t| t.0 as u32).collect() - } - - /// Get the values digest defined over the public inputs. - pub fn values_digest(&self) -> WeierstrassPoint { - let (x, y, is_inf) = self.values_digest_info(); - - WeierstrassPoint { - x: GFp5::from_basefield_array(std::array::from_fn::(|i| x[i])), - y: GFp5::from_basefield_array(std::array::from_fn::(|i| y[i])), - is_inf: is_inf.is_nonzero(), - } - } - - /// Get the metadata digest defined over the public inputs. - pub fn metadata_digest(&self) -> WeierstrassPoint { - let (x, y, is_inf) = self.metadata_digest_info(); - - WeierstrassPoint { - x: GFp5::from_basefield_array(std::array::from_fn::(|i| x[i])), - y: GFp5::from_basefield_array(std::array::from_fn::(|i| y[i])), - is_inf: is_inf.is_nonzero(), - } - } -} - -impl<'a, T: Copy> PublicInputs<'a, T> { - pub(crate) const TOTAL_LEN: usize = DM_RANGE.end; - - pub fn new(proof_inputs: &'a [T]) -> Self { - Self { proof_inputs } - } - - pub fn root_hash_info(&self) -> &[T] { - &self.proof_inputs[H_RANGE] - } - - pub fn mpt_key_info(&self) -> (&[T], T) { - let key = &self.proof_inputs[K_RANGE]; - let ptr = self.proof_inputs[T_RANGE.start]; - - (key, ptr) - } - - pub fn values_digest_info(&self) -> ([T; 5], [T; 5], T) { - convert_slice_to_curve_point(&self.proof_inputs[DV_RANGE]) - } - - pub fn metadata_digest_info(&self) -> ([T; 5], [T; 5], T) { - convert_slice_to_curve_point(&self.proof_inputs[DM_RANGE]) - } -} diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 40646b685..d5888bb5c 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -6,15 +6,17 @@ use super::{ gadgets::{column_gadget::filter_table_column_identifiers, metadata_gadget::ColumnsMetadata}, leaf_mapping::{LeafMappingCircuit, LeafMappingWires}, leaf_mapping_of_mappings::{LeafMappingOfMappingsCircuit, LeafMappingOfMappingsWires}, + leaf_receipt::{ReceiptLeafCircuit, ReceiptLeafWires}, leaf_single::{LeafSingleCircuit, LeafSingleWires}, public_inputs::PublicInputs, ColumnId, ColumnInfo, MappingKey, }; -use crate::{api::InputNode, MAX_BRANCH_NODE_LEN}; +use crate::{api::InputNode, MAX_BRANCH_NODE_LEN, MAX_LEAF_NODE_LEN, MAX_RECEIPT_LEAF_NODE_LEN}; use anyhow::{bail, ensure, Result}; use log::debug; use mp2_common::{ default_config, + eth::ReceiptProofInfo, mpt_sequential::PAD_LEN, poseidon::H, proof::{ProofInputSerialized, ProofWithVK}, @@ -55,7 +57,7 @@ pub enum CircuitInput< LeafSingle(LeafSingleCircuit), LeafMapping(LeafMappingCircuit), LeafMappingOfMappings(LeafMappingOfMappingsCircuit), - Extension(ExtensionInput), + LeafReceipt(ReceiptLeafCircuit), Branch(BranchInput), } @@ -136,6 +138,11 @@ where }) } + /// Create a circuit input for proving a leaf MPT node of a transaction receipt. + pub fn new_receipt_leaf(info: ReceiptProofInfo) -> Self { + CircuitInput::LeafReceipt(ReceiptLeafCircuit { info }) + } + /// Create a circuit input for proving an extension MPT node. pub fn new_extension(node: Vec, child_proof: Vec) -> Self { CircuitInput::Extension(ExtensionInput { @@ -186,6 +193,7 @@ pub struct PublicParameters< 0, LeafMappingOfMappingsWires, >, + leaf_receipt: CircuitWithUniversalVerifier, extension: CircuitWithUniversalVerifier, #[cfg(not(test))] branches: BranchCircuits, @@ -375,8 +383,8 @@ impl_branch_circuits!(BranchCircuits, 2, 9, 16); impl_branch_circuits!(TestBranchCircuits, 1, 4, 9); /// Number of circuits in the set -/// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping + 1 leaf mapping of mappings -const MAPPING_CIRCUIT_SET_SIZE: usize = 7; +/// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping + 1 leaf mapping of mappings + 1 leaf receipt +const MAPPING_CIRCUIT_SET_SIZE: usize = 8; impl PublicParameters @@ -415,6 +423,10 @@ where LeafMappingOfMappingsWires >(()); + debug!("Building leaf receipt circuit"); + let leaf_receipt = + circuit_builder.build_circuit::>(()); + debug!("Building extension circuit"); let extension = circuit_builder.build_circuit::(()); @@ -428,6 +440,7 @@ where leaf_single.get_verifier_data().circuit_digest, leaf_mapping.get_verifier_data().circuit_digest, leaf_mapping_of_mappings.get_verifier_data().circuit_digest, + leaf_receipt.get_verifier_data().circuit_digest, extension.get_verifier_data().circuit_digest, ]; circuits_set.extend(branches.circuit_set()); @@ -437,6 +450,7 @@ where leaf_single, leaf_mapping, leaf_mapping_of_mappings, + leaf_receipt, extension, branches, #[cfg(not(test))] @@ -461,6 +475,10 @@ where CircuitInput::LeafMappingOfMappings(leaf) => set .generate_proof(&self.leaf_mapping_of_mappings, [], [], leaf) .map(|p| (p, self.leaf_mapping_of_mappings.get_verifier_data().clone()).into()), + CircuitInput::LeafReceipt(leaf) => set + .generate_proof(&self.leaf_receipt, [], [], leaf) + .map(|p| (p, self.leaf_receipt.get_verifier_data().clone()).into()), + CircuitInput::Extension(ext) => { let mut child_proofs = ext.get_child_proofs()?; let (child_proof, child_vk) = child_proofs diff --git a/mp2-v1/src/receipt_extraction/leaf.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs similarity index 88% rename from mp2-v1/src/receipt_extraction/leaf.rs rename to mp2-v1/src/values_extraction/leaf_receipt.rs index 429f46bd9..3e8926773 100644 --- a/mp2-v1/src/receipt_extraction/leaf.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -2,15 +2,16 @@ use crate::MAX_RECEIPT_LEAF_NODE_LEN; -use super::public_inputs::{PublicInputArgs, PublicInputs}; +use super::public_inputs::{PublicInputs, PublicInputsArgs}; use mp2_common::{ array::{Array, Vector, VectorWire}, eth::{EventLogInfo, LogDataInfo, ReceiptProofInfo}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires}, - mpt_sequential::{MPTReceiptLeafNode, ReceiptKeyWire, MAX_TX_KEY_NIBBLE_LEN, PAD_LEN}, + mpt_sequential::{MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, public_inputs::PublicInputCommon, + rlp::MAX_KEY_NIBBLE_LEN, types::{CBuilder, GFp}, utils::{less_than, less_than_or_equal_to, Endianness, PackerTarget}, D, F, @@ -29,7 +30,7 @@ use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; use recursion_framework::circuit_builder::CircuitLogicWires; use rlp::Encodable; use serde::{Deserialize, Serialize}; -use std::array::from_fn; +use std::{array::from_fn, iter}; /// Maximum number of logs per transaction we can process const MAX_LOGS_PER_TX: usize = 2; @@ -51,7 +52,7 @@ where /// The offsets of the relevant logs inside the node pub relevant_logs_offset: VectorWire, /// The key in the MPT Trie - pub mpt_key: ReceiptKeyWire, + pub mpt_key: MPTKeyWire, } /// Contains all the information for an [`Event`] in rlp form @@ -85,7 +86,7 @@ pub struct LogColumn { impl LogColumn { /// Convert to an array for metadata digest - pub fn to_array(&self) -> [Target; 3] { + pub fn to_array(self) -> [Target; 3] { [self.column_id, self.rel_byte_offset, self.len] } @@ -130,7 +131,7 @@ impl EventWires { b: &mut CBuilder, value: &VectorWire, relevant_logs_offsets: &VectorWire, - ) -> CurveTarget { + ) -> (Target, CurveTarget) { let t = b._true(); let one = b.one(); let two = b.two(); @@ -144,13 +145,16 @@ impl EventWires { value.arr[0], F::from_canonical_u64(1) - F::from_canonical_u64(247), ); - // let key_header = value.arr.random_access_large_array(b, header_len_len); - // let key_header_len = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); + let key_header = value.arr.random_access_large_array(b, header_len_len); + let less_than_val = b.constant(F::from_canonical_u8(128)); + let single_value = less_than(b, key_header, less_than_val, 8); + let key_len_maybe = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); + let key_len = b.select(single_value, one, key_len_maybe); // This is the start of the string that is the rlp encoded receipt (a string since the first element is transaction type). // From here we subtract 183 to get the length of the length, then the encoded gas used is at length of length + 1 (for tx type) + (1 + list length) // + 1 (for status) + 1 to get the header for the gas used string. - let string_offset = b.add(one, header_len_len); + let string_offset = b.add(key_len, header_len_len); let string_header = value.arr.random_access_large_array(b, string_offset); let string_len_len = b.add_const(string_header, -F::from_canonical_u64(183)); @@ -190,7 +194,9 @@ impl EventWires { // Map the gas used to a curve point for the value digest, gas used is the first column so use one as its column id. let gas_digest = b.map_to_curve_point(&[zero, gas_used]); - for log_offset in relevant_logs_offsets.arr.arr { + // We also keep track of the number of real logs we process as each log forms a row in our table + let mut n = zero; + for (index, log_offset) in relevant_logs_offsets.arr.arr.into_iter().enumerate() { // Extract the address bytes let address_start = b.add(log_offset, self.add_rel_offset); @@ -234,12 +240,22 @@ impl EventWires { points.push(selected_point); } - + // If this is a real row we record the gas used in the transaction let gas_select = b.select_curve_point(dummy, curve_zero, gas_digest); points.push(gas_select); + + // We also keep track of which log this is in the receipt to avoid having identical rows in the table in the case + // that the event we are tracking can be emitted multiple times in the same transaction but has no topics or data. + let log_number = b.constant(F::from_canonical_usize(index + 1)); + let log_no_digest = b.map_to_curve_point(&[one, log_number]); + let log_no_select = b.select_curve_point(dummy, curve_zero, log_no_digest); + points.push(log_no_select); + + let increment = b.select(dummy, zero, one); + n = b.add(n, increment); } - println!("points length: {}", points.len()); - b.add_curve_point(&points) + + (n, b.add_curve_point(&points)) } } @@ -262,7 +278,7 @@ where let status_offset = b.add_virtual_target(); let relevant_logs_offset = VectorWire::::new(b); - let mpt_key = ReceiptKeyWire::new(b); + let mpt_key = MPTKeyWire::new(b); // Build the node wires. let wires = MPTReceiptLeafNode::build_and_advance_key::<_, D, NODE_LEN>(b, &mpt_key); @@ -271,9 +287,9 @@ where let root = wires.root; // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for - let mut dv = + let (n, mut dv) = event_wires.verify_logs_and_extract_values::(b, &node, &relevant_logs_offset); - println!("dv target: {:?}", dv); + let value_id = b.map_to_curve_point(&[index]); dv = b.add_curve_point(&[value_id, dv]); @@ -281,11 +297,12 @@ where let dm = b.map_to_curve_point(&event_wires.to_vec()); // Register the public inputs - PublicInputArgs { + PublicInputsArgs { h: &root.output_array, k: &wires.key, dv, dm, + n, } .register_args(b); @@ -372,20 +389,14 @@ where wires.relevant_logs_offset.assign(pw, &relevant_logs_vector); let key_encoded = self.info.tx_index.rlp_bytes(); - let nibbles = key_encoded + let key_nibbles: [u8; MAX_KEY_NIBBLE_LEN] = key_encoded .iter() .flat_map(|byte| [byte / 16, byte % 16]) - .collect::>(); - - let mut key_nibbles = [0u8; MAX_TX_KEY_NIBBLE_LEN]; - key_nibbles - .iter_mut() - .enumerate() - .for_each(|(index, nibble)| { - if index < nibbles.len() { - *nibble = nibbles[index] - } - }); + .chain(iter::repeat(0u8)) + .take(64) + .collect::>() + .try_into() + .expect("Couldn't create mpt key with correct length"); wires.mpt_key.assign(pw, &key_nibbles, self.info.index_size); } @@ -462,10 +473,11 @@ impl CircuitLogicWires for ReceiptLeafWires Digest { + let topics_flat = event + .topics + .iter() + .chain(event.data.iter()) + .flat_map(|t| [t.column_id, t.rel_byte_offset, t.len]) + .collect::>(); + + let mut out = Vec::new(); + out.push(event.size); + out.extend_from_slice(&event.address.0.map(|byte| byte as usize)); + out.push(event.add_rel_offset); + out.extend_from_slice(&event.event_signature.map(|byte| byte as usize)); + out.push(event.sig_rel_offset); + out.extend_from_slice(&topics_flat); + + let data = out + .into_iter() + .map(GFp::from_canonical_usize) + .collect::>(); + map_to_curve_point(&data) +} + +/// Calculate `value_digest` for receipt leaf. +pub fn compute_receipt_leaf_value_digest(receipt_proof_info: &ReceiptProofInfo) -> Digest { + let receipt = receipt_proof_info.to_receipt().unwrap(); + let gas_used = receipt.cumulative_gas_used(); + + // Only use events that we are indexing + let address = receipt_proof_info.event_log_info.address; + let sig = receipt_proof_info.event_log_info.event_signature; + + let index_digest = map_to_curve_point(&[GFp::from_canonical_u64(receipt_proof_info.tx_index)]); + + let gas_digest = map_to_curve_point(&[GFp::ZERO, GFp::from_noncanonical_u128(gas_used)]); + let mut n = 0; + receipt + .logs() + .iter() + .cloned() + .filter_map(|log| { + let log_address = log.address; + let log_data = log.to_log_data(); + let (topics, data) = log_data.split(); + + if log_address == address && topics[0].0 == sig { + n += 1; + let topics_field = topics + .iter() + .skip(1) + .map(|fixed| fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields()) + .collect::>(); + let data_fixed_bytes = data + .chunks(32) + .map(|chunk| chunk.pack(mp2_common::utils::Endianness::Big).to_fields()) + .take(2) + .collect::>(); + let log_no_digest = map_to_curve_point(&[GFp::ONE, GFp::from_canonical_usize(n)]); + let initial_digest = gas_digest + log_no_digest; + Some( + topics_field + .iter() + .chain(data_fixed_bytes.iter()) + .enumerate() + .fold(initial_digest, |acc, (i, fixed)| { + let mut values = vec![GFp::from_canonical_usize(i + 2)]; + values.extend_from_slice(fixed); + acc + map_to_curve_point(&values) + }), + ) + } else { + None + } + }) + .fold(index_digest, |acc, p| acc + p) +} diff --git a/mp2-v1/tests/common/context.rs b/mp2-v1/tests/common/context.rs index 149ba80bb..305a4fd20 100644 --- a/mp2-v1/tests/common/context.rs +++ b/mp2-v1/tests/common/context.rs @@ -12,7 +12,10 @@ use anyhow::{Context, Result}; use envconfig::Envconfig; use log::info; use mp2_common::eth::ProofQuery; -use mp2_v1::api::build_circuits_params; +use mp2_v1::{ + api::{build_circuits_params, PublicParameters}, + block_extraction::ExtractionType, +}; use std::{ fs::File, io::{BufReader, BufWriter}, @@ -94,14 +97,14 @@ pub async fn new_local_chain(storage: ProofKV) -> TestContext { } pub enum ParamsType { - Indexing, + Indexing(ExtractionType), Query, } impl ParamsType { pub fn full_path(&self, mut pre: PathBuf) -> PathBuf { match self { - ParamsType::Indexing => pre.push("index.params"), + ParamsType::Indexing(_) => pre.push("index.params"), ParamsType::Query => pre.push("query.params"), }; pre @@ -117,7 +120,7 @@ impl ParamsType { .context("while parsing MP2 parameters")?; ctx.query_params = Some(params); } - ParamsType::Indexing => { + ParamsType::Indexing(_) => { info!("parsing the indexing mp2-v1 parameters"); let params = bincode::deserialize_from(BufReader::new( File::open(&path).with_context(|| format!("while opening {path:?}"))?, @@ -149,9 +152,9 @@ impl ParamsType { ctx.query_params = Some(params); Ok(()) } - ParamsType::Indexing => { + ParamsType::Indexing(et) => { info!("building the mp2 indexing parameters"); - let mp2 = build_circuits_params(); + let mp2 = build_circuits_params(*et); ctx.params = Some(mp2); info!("writing the mp2-v1 indexing parameters"); Ok(()) @@ -174,7 +177,7 @@ impl ParamsType { )?; Ok(()) } - ParamsType::Indexing => { + ParamsType::Indexing(_) => { bincode::serialize_into( BufWriter::new( File::create(&path).with_context(|| format!("while creating {path:?}"))?, diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index 058499e05..f4891e233 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -33,6 +33,7 @@ use common::{ }; use envconfig::Envconfig; use log::info; +use mp2_v1::block_extraction::ExtractionType; use parsil::{ assembler::DynamicCircuitPis, parse_and_validate, @@ -83,7 +84,8 @@ async fn integrated_indexing() -> Result<()> { let mut ctx = context::new_local_chain(storage).await; info!("Initial Anvil block: {}", ctx.block_number().await); info!("Building indexing params"); - ctx.build_params(ParamsType::Indexing).unwrap(); + ctx.build_params(ParamsType::Indexing(ExtractionType::Storage)) + .unwrap(); info!("Params built"); // NOTE: to comment to avoid very long tests... From a3a596f7bcd56f32e3903bbfb606e9a0cc0c5b03 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 2 Dec 2024 13:16:00 +0000 Subject: [PATCH 11/47] Added unit tests for receipt leaf api --- mp2-common/src/eth.rs | 112 +++++++++++------ mp2-test/src/mpt_sequential.rs | 73 +++++------ mp2-v1/src/block_extraction/circuit.rs | 161 ++++++++++++++++--------- mp2-v1/src/values_extraction/api.rs | 75 +++++++++++- 4 files changed, 293 insertions(+), 128 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index c23ee5ed4..adf935355 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -825,7 +825,6 @@ mod test { types::BlockNumber, }; use hashbrown::HashMap; - use tokio::task::JoinSet; use crate::{ mpt_sequential::utils::nibbles_to_bytes, @@ -965,9 +964,6 @@ mod test { #[tokio::test] async fn test_receipt_query() -> Result<()> { - let rpc = ProviderBuilder::new() - .on_anvil_with_config(|anvil| Anvil::fork(anvil, get_sepolia_url())); - // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] @@ -994,44 +990,84 @@ mod test { } } } + + sol! { + #[allow(missing_docs)] + // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780637229db15146100585780638381f58a14610062578063d09de08a14610080575b5f80fd5b61005661008a565b005b6100606100f8565b005b61006a610130565b6040516100779190610165565b60405180910390f35b610088610135565b005b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100c0610135565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100f6610135565b565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a261012e610135565b565b5f5481565b5f80815480929190610146906101ab565b9190505550565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212203b7602644bfff2df89c2fe9498cd533326876859a0df7b96ac10be1fdc09c3a064736f6c634300081a0033")] + + contract OtherEmitter { + uint256 public number; + event otherEvent(uint256 indexed num); + + function otherEmit() public { + emit otherEvent(number); + increment(); + } + + function twoEmits() public { + emit otherEvent(number); + increment(); + emit otherEvent(number); + increment(); + } + + function increment() public { + number++; + } + } + } + + // Spin up a local node. + + let rpc = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_config(|anvil| Anvil::arg(anvil, "--no-mining")); + + // Turn on auto mining to deploy the contracts + rpc.anvil_set_auto_mine(true).await.unwrap(); + // Deploy the contract using anvil - let contract = EventEmitter::deploy(rpc.clone()).await?; + let event_contract = EventEmitter::deploy(rpc.root()).await.unwrap(); - let tx_reqs = (0..10) - .map(|i| match i % 2 { - 0 => contract.testEmit().into_transaction_request(), - 1 => contract.twoEmits().into_transaction_request(), + // Deploy the contract using anvil + let other_contract = OtherEmitter::deploy(rpc.root()).await.unwrap(); + + // Disable auto mining so we can ensure that all the transaction appear in the same block + rpc.anvil_set_auto_mine(false).await.unwrap(); + + let mut pending_tx_builders = vec![]; + for i in 0..25 { + let tx_req = match i % 4 { + 0 => event_contract.testEmit().into_transaction_request(), + 1 => event_contract.twoEmits().into_transaction_request(), + 2 => other_contract.otherEmit().into_transaction_request(), + 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), - }) - .collect::>(); - let mut join_set = JoinSet::new(); - - tx_reqs.into_iter().for_each(|tx_req| { - let rpc_clone = rpc.clone(); - join_set.spawn(async move { - rpc_clone - .anvil_auto_impersonate_account(true) - .await - .unwrap(); - let sender_address = Address::random(); - let balance = U256::from(1e18 as u64); - rpc_clone - .anvil_set_balance(sender_address, balance) - .await - .unwrap(); - rpc_clone - .send_transaction(tx_req.with_from(sender_address)) - .await - .unwrap() - .watch() - .await - .unwrap() - }); - }); + }; + + let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); + rpc.anvil_set_balance(sender_address, funding) + .await + .unwrap(); + rpc.anvil_auto_impersonate_account(true).await.unwrap(); + let new_req = tx_req.with_from(sender_address); + let tx_req_final = rpc + .fill(new_req) + .await + .unwrap() + .as_builder() + .unwrap() + .clone(); + pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); + } + + rpc.anvil_mine(Some(U256::from(1u8)), None).await.unwrap(); - let hashes = join_set.join_all().await; let mut transactions = Vec::new(); - for hash in hashes.into_iter() { + for pending in pending_tx_builders.into_iter() { + let hash = pending.watch().await.unwrap(); transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); } @@ -1041,7 +1077,7 @@ mod test { let all_events = EventEmitter::abi::events(); let events = all_events.get("testEvent").unwrap(); - let receipt_query = ReceiptQuery::new(*contract.address(), events[0].clone()); + let receipt_query = ReceiptQuery::new(*event_contract.address(), events[0].clone()); let block = rpc .get_block( diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 70080429a..6f5fa8719 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -11,7 +11,6 @@ use eth_trie::{EthTrie, MemoryDB, Trie}; use mp2_common::eth::{ReceiptProofInfo, ReceiptQuery}; use rand::{thread_rng, Rng}; use std::sync::Arc; -use tokio::task::JoinSet; /// Simply the maximum number of nibbles a key can have. const MAX_KEY_NIBBLE_LEN: usize = 64; @@ -112,50 +111,56 @@ pub fn generate_receipt_proofs() -> Vec { rt.block_on(async { // Spin up a local node. - let rpc = ProviderBuilder::new().on_anvil_with_config(|anvil| Anvil::block_time(anvil, 1)); + let rpc = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_config(|anvil| Anvil::arg(anvil, "--no-mining")); + + // Turn on auto mining to deploy the contracts + rpc.anvil_set_auto_mine(true).await.unwrap(); // Deploy the contract using anvil - let event_contract = EventEmitter::deploy(rpc.clone()).await.unwrap(); + let event_contract = EventEmitter::deploy(rpc.root()).await.unwrap(); // Deploy the contract using anvil - let other_contract = OtherEmitter::deploy(rpc.clone()).await.unwrap(); + let other_contract = OtherEmitter::deploy(rpc.root()).await.unwrap(); + + // Disable auto mining so we can ensure that all the transaction appear in the same block + rpc.anvil_set_auto_mine(false).await.unwrap(); - let tx_reqs = (0..25) - .map(|i| match i % 4 { + // Send a bunch of transactions, some of which are related to the event we are testing for. + let mut pending_tx_builders = vec![]; + for i in 0..25 { + let tx_req = match i % 4 { 0 => event_contract.testEmit().into_transaction_request(), 1 => event_contract.twoEmits().into_transaction_request(), 2 => other_contract.otherEmit().into_transaction_request(), 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), - }) - .collect::>(); - let mut join_set = JoinSet::new(); - tx_reqs.into_iter().for_each(|tx_req| { - let rpc_clone = rpc.clone(); - join_set.spawn(async move { - let sender_address = Address::random(); - let funding = U256::from(1e18 as u64); - rpc_clone - .anvil_set_balance(sender_address, funding) - .await - .unwrap(); - rpc_clone - .anvil_auto_impersonate_account(true) - .await - .unwrap(); - rpc_clone - .send_transaction(tx_req.with_from(sender_address)) - .await - .unwrap() - .watch() - .await - .unwrap() - }); - }); - - let hashes = join_set.join_all().await; + }; + + let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); + rpc.anvil_set_balance(sender_address, funding) + .await + .unwrap(); + rpc.anvil_auto_impersonate_account(true).await.unwrap(); + let new_req = tx_req.with_from(sender_address); + let tx_req_final = rpc + .fill(new_req) + .await + .unwrap() + .as_builder() + .unwrap() + .clone(); + pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); + } + + // Mine a block, it should include all the transactions created above. + rpc.anvil_mine(Some(U256::from(1u8)), None).await.unwrap(); + let mut transactions = Vec::new(); - for hash in hashes.into_iter() { + for pending in pending_tx_builders.into_iter() { + let hash = pending.watch().await.unwrap(); transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); } diff --git a/mp2-v1/src/block_extraction/circuit.rs b/mp2-v1/src/block_extraction/circuit.rs index 4ba2c643d..f9d51c8f3 100644 --- a/mp2-v1/src/block_extraction/circuit.rs +++ b/mp2-v1/src/block_extraction/circuit.rs @@ -172,65 +172,116 @@ mod test { use super::{public_inputs::PublicInputs, BlockCircuit, BlockWires}; use anyhow::Result; - pub type SepoliaBlockCircuit = BlockCircuit; - #[tokio::test] async fn prove_and_verify_block_extraction_circuit() -> Result<()> { - let url = get_sepolia_url(); - let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let block_number = BlockNumberOrTag::Latest; - let block = provider - .get_block_by_number(block_number, true.into()) - .await - .unwrap() - .unwrap(); - - let rlp_headers = block.rlp(); - - let prev_block_hash = block - .header - .parent_hash - .0 - .pack(Endianness::Little) - .to_fields(); - let block_hash = block.block_hash().pack(Endianness::Little).to_fields(); - let state_root = block - .header - .state_root - .0 - .pack(Endianness::Little) - .to_fields(); - let block_number_buff = block.header.number.to_be_bytes(); - const NUM_LIMBS: usize = u256::NUM_LIMBS; - let block_number = - left_pad_generic::(&block_number_buff.pack(Endianness::Big)) - .to_fields(); - - let setup = setup_circuit::<_, D, C, SepoliaBlockCircuit>(); - let circuit = SepoliaBlockCircuit::new(rlp_headers).unwrap(); - let proof = prove_circuit(&setup, &circuit); - let pi = PublicInputs::::from_slice(&proof.public_inputs); - - assert_eq!(pi.prev_block_hash_raw(), &prev_block_hash); - assert_eq!(pi.block_hash_raw(), &block_hash); - assert_eq!( - pi.block_hash_raw(), - block.header.hash.0.pack(Endianness::Little).to_fields() - ); - assert_eq!(pi.state_root_raw(), &state_root); - assert_eq!(pi.block_number_raw(), &block_number); - Ok(()) + prove_and_verify_storage_block_extraction_circuit().await?; + prove_and_verify_receipt_block_extraction_circuit().await } - impl UserCircuit for BlockCircuit { - type Wires = BlockWires; - - fn build(cb: &mut CBuilder) -> Self::Wires { - Self::build(cb, super::ExtractionType::Storage) - } - - fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - self.assign(pw, wires); + /// Macro used to produce testing functions for the various types of extraction we do. + macro_rules! impl_test_block_circuit { + ($(($fn_name:ident, $extraction:expr)), *) => { + $( + pub async fn $fn_name() -> Result<()> { + #[derive(Clone, Debug)] + pub struct TestCircuit { + inner: BlockCircuit, + } + + impl TestCircuit { + pub fn new(rlp_headers: Vec) -> Result { + crate::block_extraction::circuit::ensure!( + rlp_headers.len() <= crate::block_extraction::circuit::MAX_BLOCK_LEN, + "block rlp headers too long" + ); + Ok(Self {inner: BlockCircuit { rlp_headers }}) + } + } + + impl UserCircuit for TestCircuit { + type Wires = BlockWires; + + fn build(cb: &mut CBuilder) -> Self::Wires { + BlockCircuit::build(cb, $extraction) + } + + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + self.inner.assign(pw, wires); + } + } + let url = get_sepolia_url(); + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + let block_number = BlockNumberOrTag::Latest; + let block = provider + .get_block_by_number(block_number, true.into()) + .await + .unwrap() + .unwrap(); + + let rlp_headers = block.rlp(); + + let prev_block_hash = block + .header + .parent_hash + .0 + .pack(Endianness::Little) + .to_fields(); + let block_hash = block.block_hash().pack(Endianness::Little).to_fields(); + let root = match $extraction { + super::ExtractionType::Storage => {block + .header + .state_root + .0 + .pack(Endianness::Little) + .to_fields()}, + super::ExtractionType::Receipt => {block + .header + .receipts_root + .0 + .pack(Endianness::Little) + .to_fields()}, + super::ExtractionType::Transaction => {block + .header + .transactions_root + .0 + .pack(Endianness::Little) + .to_fields()}, + + }; + let block_number_buff = block.header.number.to_be_bytes(); + const NUM_LIMBS: usize = u256::NUM_LIMBS; + let block_number = + left_pad_generic::(&block_number_buff.pack(Endianness::Big)) + .to_fields(); + + let setup = setup_circuit::<_, D, C, TestCircuit>(); + let circuit = TestCircuit::new(rlp_headers).unwrap(); + let proof = prove_circuit(&setup, &circuit); + let pi = PublicInputs::::from_slice(&proof.public_inputs); + + assert_eq!(pi.prev_block_hash_raw(), &prev_block_hash); + assert_eq!(pi.block_hash_raw(), &block_hash); + assert_eq!( + pi.block_hash_raw(), + block.header.hash.0.pack(Endianness::Little).to_fields() + ); + + assert_eq!(pi.state_root_raw(), &root); + assert_eq!(pi.block_number_raw(), &block_number); + Ok(()) + } + )* } } + + impl_test_block_circuit!( + ( + prove_and_verify_storage_block_extraction_circuit, + super::ExtractionType::Storage + ), + ( + prove_and_verify_receipt_block_extraction_circuit, + super::ExtractionType::Receipt + ) + ); } diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index d5888bb5c..c1cc80179 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -540,7 +540,10 @@ mod tests { mpt_sequential::utils::bytes_to_nibbles, types::MAPPING_LEAF_VALUE_LEN, }; - use mp2_test::{mpt_sequential::generate_random_storage_mpt, utils::random_vector}; + use mp2_test::{ + mpt_sequential::{generate_random_storage_mpt, generate_receipt_proofs}, + utils::random_vector, + }; use plonky2::field::types::Field; use plonky2_ecgfp5::curve::curve::Point; use rand::{thread_rng, Rng}; @@ -897,7 +900,77 @@ mod tests { let input = CircuitInput::new_branch(node, leaf_proofs); generate_proof(params, input).unwrap() } + #[test] + fn test_receipt_api() { + let receipt_proof_infos = generate_receipt_proofs(); + + // We check that we have enough receipts and then take the second and third info + // (the MPT proof for the first node is different). + // Then check that the node above both is a branch. + assert!(receipt_proof_infos.len() > 3); + let second_info = &receipt_proof_infos[1]; + let third_info = &receipt_proof_infos[2]; + + let proof_length_1 = second_info.mpt_proof.len(); + let proof_length_2 = third_info.mpt_proof.len(); + + let list_one = rlp::decode_list::>(&second_info.mpt_proof[proof_length_1 - 2]); + let list_two = rlp::decode_list::>(&third_info.mpt_proof[proof_length_2 - 2]); + + assert!(list_one == list_two); + assert!(list_one.len() == 17); + + println!("Generating params..."); + let params = build_circuits_params(); + + println!("Proving leaf 1..."); + let leaf_input_1 = CircuitInput::new_receipt_leaf(second_info.clone()); + let now = std::time::Instant::now(); + let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); + { + let lp = ProofWithVK::deserialize(&leaf_proof1).unwrap(); + let pub1 = PublicInputs::new(&lp.proof.public_inputs); + let (_, ptr) = pub1.mpt_key_info(); + println!("pointer: {}", ptr); + } + println!( + "Proof for leaf 1 generated in {} ms", + now.elapsed().as_millis() + ); + + println!("Proving leaf 2..."); + let leaf_input_2 = CircuitInput::new_receipt_leaf(third_info.clone()); + let now = std::time::Instant::now(); + let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); + println!( + "Proof for leaf 2 generated in {} ms", + now.elapsed().as_millis() + ); + + // The branch case for receipts is identical to that of a mapping so we use the same api. + println!("Proving branch..."); + let branch_input = CircuitInput::new_mapping_variable_branch( + second_info.mpt_proof[proof_length_1 - 2].clone(), + vec![leaf_proof1, leaf_proof2], + ); + + let now = std::time::Instant::now(); + generate_proof(¶ms, branch_input).unwrap(); + println!( + "Proof for branch node generated in {} ms", + now.elapsed().as_millis() + ); + } + fn test_circuits(is_simple_aggregation: bool, num_children: usize) { + let contract_address = Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(); + let chain_id = 10; + let id = identifier_single_var_column(TEST_SLOT, &contract_address, chain_id, vec![]); + let key_id = + identifier_for_mapping_key_column(TEST_SLOT, &contract_address, chain_id, vec![]); + let value_id = + identifier_for_mapping_value_column(TEST_SLOT, &contract_address, chain_id, vec![]); + } /// Generate a leaf proof. fn prove_leaf(params: &PublicParameters, node: Vec, test_slot: StorageSlotInfo) -> Vec { // RLP(RLP(compact(partial_key_in_nibble)), RLP(value)) From 931ab2df93089d9180b60a5985fb96535320a31b Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 2 Dec 2024 13:52:24 +0000 Subject: [PATCH 12/47] Rebased onto feat/receipt-trie --- mp2-common/src/eth.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index adf935355..f315e6091 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -3,8 +3,9 @@ use alloy::{ consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom, TxEnvelope}, eips::BlockNumberOrTag, - network::{eip2718::Encodable2718, TransactionResponse}, - primitives::{Address, B256}, + json_abi::Event, + network::{eip2718::Encodable2718, BlockResponse}, + primitives::{Address, Log, LogData, B256}, providers::{Provider, RootProvider}, rlp::{Decodable, Encodable as AlloyEncodable}, rpc::types::{ @@ -740,10 +741,7 @@ impl BlockUtil { _ => panic!("aie"), }; - let transaction_primitive = match TxEnvelope::try_from(transaction.clone()) { - Ok(t) => t, - _ => panic!("Couldn't get transaction envelope"), - }; + let transaction_primitive = TxEnvelope::from(transaction.clone()); let body_rlp = receipt_primitive.encoded_2718(); @@ -810,7 +808,7 @@ mod test { use std::str::FromStr; use alloy::{ - network::TransactionBuilder, + network::{TransactionBuilder, TransactionResponse}, node_bindings::Anvil, primitives::{Bytes, Log, U256}, providers::{ext::AnvilApi, Provider, ProviderBuilder}, From b72c9f12fb6f3644b0b6a38f165879cdf6f8778c Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 2 Dec 2024 15:06:33 +0000 Subject: [PATCH 13/47] Reworked block extraction to extract all three roots --- mp2-v1/src/api.rs | 6 +- mp2-v1/src/block_extraction/circuit.rs | 203 +++++++++---------- mp2-v1/src/block_extraction/mod.rs | 14 +- mp2-v1/src/block_extraction/public_inputs.rs | 62 +++++- mp2-v1/src/final_extraction/base_circuit.rs | 4 + mp2-v1/tests/common/context.rs | 17 +- mp2-v1/tests/integrated_tests.rs | 5 +- 7 files changed, 176 insertions(+), 135 deletions(-) diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 17a68c3e1..4af635a3f 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -101,9 +101,7 @@ impl /// Instantiate the circuits employed for the pre-processing stage of LPN, /// returning their corresponding parameters -pub fn build_circuits_params( - extraction_type: block_extraction::ExtractionType, -) -> PublicParameters { +pub fn build_circuits_params() -> PublicParameters { log::info!("Building contract_extraction parameters..."); let contract_extraction = contract_extraction::build_circuits_params(); log::info!("Building length_extraction parameters..."); @@ -111,7 +109,7 @@ pub fn build_circuits_params( log::info!("Building values_extraction parameters..."); let values_extraction = values_extraction::build_circuits_params(); log::info!("Building block_extraction parameters..."); - let block_extraction = block_extraction::build_circuits_params(extraction_type); + let block_extraction = block_extraction::build_circuits_params(); log::info!("Building final_extraction parameters..."); let final_extraction = final_extraction::PublicParameters::build( block_extraction.circuit_data().verifier_data(), diff --git a/mp2-v1/src/block_extraction/circuit.rs b/mp2-v1/src/block_extraction/circuit.rs index f9d51c8f3..4c69fe25d 100644 --- a/mp2-v1/src/block_extraction/circuit.rs +++ b/mp2-v1/src/block_extraction/circuit.rs @@ -86,7 +86,7 @@ impl BlockCircuit { } /// Build the circuit, assigning the public inputs and returning the internal wires. - pub fn build(cb: &mut CBuilder, extraction_type: ExtractionType) -> BlockWires { + pub fn build(cb: &mut CBuilder) -> BlockWires { // already right padded to right size for keccak let rlp_headers = VectorWire::new(cb); @@ -102,10 +102,24 @@ impl BlockCircuit { // extract the state root of the block let state_root: Array = Array::::from_array(create_array(|i| { - rlp_headers.arr.arr[extraction_type.offset() + i] + rlp_headers.arr.arr[HEADER_STATE_ROOT_OFFSET + i] })); let state_root_packed = state_root.pack(cb, Endianness::Little); + // extract the transaction root of the block + let transaction_root: Array = + Array::::from_array(create_array(|i| { + rlp_headers.arr.arr[HEADER_TRANSACTION_ROOT_OFFSET + i] + })); + let transaction_root_packed = transaction_root.pack(cb, Endianness::Little); + + // extract the receipt root of the block + let receipt_root: Array = + Array::::from_array(create_array(|i| { + rlp_headers.arr.arr[HEADER_RECEIPT_ROOT_OFFSET + i] + })); + let receipt_root_packed = receipt_root.pack(cb, Endianness::Little); + // compute the block hash let bh_wires = KeccakCircuit::hash_vector(cb, &rlp_headers); @@ -125,6 +139,8 @@ impl BlockCircuit { &packed_prev_bh.downcast_to_targets().arr, &bn_u256.to_targets(), &state_root_packed.downcast_to_targets().arr, + &transaction_root_packed.downcast_to_targets().arr, + &receipt_root_packed.downcast_to_targets().arr, ) .register(cb); @@ -173,115 +189,88 @@ mod test { use anyhow::Result; #[tokio::test] - async fn prove_and_verify_block_extraction_circuit() -> Result<()> { - prove_and_verify_storage_block_extraction_circuit().await?; - prove_and_verify_receipt_block_extraction_circuit().await - } + pub async fn prove_and_verify_block_extraction_circuit() -> Result<()> { + #[derive(Clone, Debug)] + pub struct TestCircuit { + inner: BlockCircuit, + } - /// Macro used to produce testing functions for the various types of extraction we do. - macro_rules! impl_test_block_circuit { - ($(($fn_name:ident, $extraction:expr)), *) => { - $( - pub async fn $fn_name() -> Result<()> { - #[derive(Clone, Debug)] - pub struct TestCircuit { - inner: BlockCircuit, - } - - impl TestCircuit { - pub fn new(rlp_headers: Vec) -> Result { - crate::block_extraction::circuit::ensure!( - rlp_headers.len() <= crate::block_extraction::circuit::MAX_BLOCK_LEN, - "block rlp headers too long" - ); - Ok(Self {inner: BlockCircuit { rlp_headers }}) - } - } - - impl UserCircuit for TestCircuit { - type Wires = BlockWires; - - fn build(cb: &mut CBuilder) -> Self::Wires { - BlockCircuit::build(cb, $extraction) - } - - fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - self.inner.assign(pw, wires); - } - } - let url = get_sepolia_url(); - let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let block_number = BlockNumberOrTag::Latest; - let block = provider - .get_block_by_number(block_number, true.into()) - .await - .unwrap() - .unwrap(); - - let rlp_headers = block.rlp(); - - let prev_block_hash = block - .header - .parent_hash - .0 - .pack(Endianness::Little) - .to_fields(); - let block_hash = block.block_hash().pack(Endianness::Little).to_fields(); - let root = match $extraction { - super::ExtractionType::Storage => {block - .header - .state_root - .0 - .pack(Endianness::Little) - .to_fields()}, - super::ExtractionType::Receipt => {block - .header - .receipts_root - .0 - .pack(Endianness::Little) - .to_fields()}, - super::ExtractionType::Transaction => {block - .header - .transactions_root - .0 - .pack(Endianness::Little) - .to_fields()}, - - }; - let block_number_buff = block.header.number.to_be_bytes(); - const NUM_LIMBS: usize = u256::NUM_LIMBS; - let block_number = - left_pad_generic::(&block_number_buff.pack(Endianness::Big)) - .to_fields(); - - let setup = setup_circuit::<_, D, C, TestCircuit>(); - let circuit = TestCircuit::new(rlp_headers).unwrap(); - let proof = prove_circuit(&setup, &circuit); - let pi = PublicInputs::::from_slice(&proof.public_inputs); - - assert_eq!(pi.prev_block_hash_raw(), &prev_block_hash); - assert_eq!(pi.block_hash_raw(), &block_hash); - assert_eq!( - pi.block_hash_raw(), - block.header.hash.0.pack(Endianness::Little).to_fields() + impl TestCircuit { + pub fn new(rlp_headers: Vec) -> Result { + crate::block_extraction::circuit::ensure!( + rlp_headers.len() <= crate::block_extraction::circuit::MAX_BLOCK_LEN, + "block rlp headers too long" ); + Ok(Self { + inner: BlockCircuit { rlp_headers }, + }) + } + } + + impl UserCircuit for TestCircuit { + type Wires = BlockWires; + + fn build(cb: &mut CBuilder) -> Self::Wires { + BlockCircuit::build(cb) + } - assert_eq!(pi.state_root_raw(), &root); - assert_eq!(pi.block_number_raw(), &block_number); - Ok(()) + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + self.inner.assign(pw, wires); } - )* } - } + let url = get_sepolia_url(); + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + let block_number = BlockNumberOrTag::Latest; + let block = provider + .get_block_by_number(block_number, true.into()) + .await + .unwrap() + .unwrap(); + + let rlp_headers = block.rlp(); + + let prev_block_hash = block + .header + .parent_hash + .0 + .pack(Endianness::Little) + .to_fields(); + let block_hash = block.block_hash().pack(Endianness::Little).to_fields(); + + let state_root = block.header.state_root.pack(Endianness::Little).to_fields(); + let transaction_root = block + .header + .transactions_root + .pack(Endianness::Little) + .to_fields(); + let receipt_root = block + .header + .receipts_root + .pack(Endianness::Little) + .to_fields(); + + let block_number_buff = block.header.number.to_be_bytes(); + const NUM_LIMBS: usize = u256::NUM_LIMBS; + let block_number = + left_pad_generic::(&block_number_buff.pack(Endianness::Big)) + .to_fields(); + + let setup = setup_circuit::<_, D, C, TestCircuit>(); + let circuit = TestCircuit::new(rlp_headers).unwrap(); + let proof = prove_circuit(&setup, &circuit); + let pi = PublicInputs::::from_slice(&proof.public_inputs); + + assert_eq!(pi.prev_block_hash_raw(), &prev_block_hash); + assert_eq!(pi.block_hash_raw(), &block_hash); + assert_eq!( + pi.block_hash_raw(), + block.header.hash.0.pack(Endianness::Little).to_fields() + ); - impl_test_block_circuit!( - ( - prove_and_verify_storage_block_extraction_circuit, - super::ExtractionType::Storage - ), - ( - prove_and_verify_receipt_block_extraction_circuit, - super::ExtractionType::Receipt - ) - ); + assert_eq!(pi.state_root_raw(), &state_root); + assert_eq!(pi.transaction_root_raw(), &transaction_root); + assert_eq!(pi.receipt_root_raw(), &receipt_root); + assert_eq!(pi.block_number_raw(), &block_number); + Ok(()) + } } diff --git a/mp2-v1/src/block_extraction/mod.rs b/mp2-v1/src/block_extraction/mod.rs index af268f2b9..76347b1fd 100644 --- a/mp2-v1/src/block_extraction/mod.rs +++ b/mp2-v1/src/block_extraction/mod.rs @@ -32,15 +32,15 @@ pub struct PublicParameters { } /// Returns the parameters necessary to prove block extraction circuits -pub fn build_circuits_params(extraction_type: ExtractionType) -> PublicParameters { - PublicParameters::build(extraction_type) +pub fn build_circuits_params() -> PublicParameters { + PublicParameters::build() } impl PublicParameters { - pub fn build(extraction_type: ExtractionType) -> Self { + pub fn build() -> Self { let config = default_config(); let mut cb = CircuitBuilder::new(config); - let wires = circuit::BlockCircuit::build(&mut cb, extraction_type); + let wires = circuit::BlockCircuit::build(&mut cb); let cd = cb.build(); Self { circuit_data: cd, @@ -77,13 +77,11 @@ mod test { }; use mp2_test::eth::get_sepolia_url; - use crate::block_extraction::{ - circuit::ExtractionType, public_inputs::PublicInputs, PublicParameters, - }; + use crate::block_extraction::{public_inputs::PublicInputs, PublicParameters}; #[tokio::test] async fn test_api_storage() -> Result<()> { - let params = PublicParameters::build(ExtractionType::Storage); + let params = PublicParameters::build(); let url = get_sepolia_url(); let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); let block_number = BlockNumberOrTag::Latest; diff --git a/mp2-v1/src/block_extraction/public_inputs.rs b/mp2-v1/src/block_extraction/public_inputs.rs index 143eeac93..e376baf9f 100644 --- a/mp2-v1/src/block_extraction/public_inputs.rs +++ b/mp2-v1/src/block_extraction/public_inputs.rs @@ -12,10 +12,14 @@ use plonky2::iop::target::Target; // - `PREV_BH : [8]F` packed Keccak hash of the block // - `BN : F` Proven block number // - `SH : [8]F` Packed state root hash +// - `TH : [8]F` Packed transaction root hash +// - `RH : [8]F` Packed receipt root hash const BH_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; const PREV_BH_RANGE: PublicInputRange = BH_RANGE.end..BH_RANGE.end + PACKED_HASH_LEN; const BN_RANGE: PublicInputRange = PREV_BH_RANGE.end..PREV_BH_RANGE.end + u256::NUM_LIMBS; const SH_RANGE: PublicInputRange = BN_RANGE.end..BN_RANGE.end + PACKED_HASH_LEN; +const TH_RANGE: PublicInputRange = SH_RANGE.end..SH_RANGE.end + PACKED_HASH_LEN; +const RH_RANGE: PublicInputRange = TH_RANGE.end..TH_RANGE.end + PACKED_HASH_LEN; /// Public inputs for the dynamic-length variable extraction. #[derive(Clone, Debug)] @@ -28,16 +32,29 @@ pub struct PublicInputs<'a, T> { pub(crate) bn: &'a [T], /// Packed state root pub(crate) sh: &'a [T], + /// Packed transaction root + pub(crate) th: &'a [T], + /// Packed receipt root + pub(crate) rh: &'a [T], } impl PublicInputCommon for PublicInputs<'_, Target> { - const RANGES: &'static [PublicInputRange] = &[BH_RANGE, PREV_BH_RANGE, BN_RANGE, SH_RANGE]; + const RANGES: &'static [PublicInputRange] = &[ + BH_RANGE, + PREV_BH_RANGE, + BN_RANGE, + SH_RANGE, + TH_RANGE, + RH_RANGE, + ]; fn register_args(&self, cb: &mut CBuilder) { cb.register_public_inputs(self.bh); cb.register_public_inputs(self.prev_bh); cb.register_public_inputs(self.bn); cb.register_public_inputs(self.sh); + cb.register_public_inputs(self.th); + cb.register_public_inputs(self.rh); } } @@ -48,16 +65,22 @@ impl<'a> PublicInputs<'a, Target> { prev_bh: &'a [Target], bn: &'a [Target], sh: &'a [Target], + th: &'a [Target], + rh: &'a [Target], ) -> Self { assert!(bh.len() == PACKED_HASH_LEN); assert!(prev_bh.len() == PACKED_HASH_LEN); assert!(sh.len() == PACKED_HASH_LEN); + assert!(th.len() == PACKED_HASH_LEN); + assert!(rh.len() == PACKED_HASH_LEN); assert!(bn.len() == u256::NUM_LIMBS); Self { bh, prev_bh, bn, sh, + th, + rh, } } @@ -72,6 +95,14 @@ impl<'a> PublicInputs<'a, Target> { pub fn state_root(&self) -> OutputHash { OutputHash::from_targets(self.sh) } + + pub fn transaction_root(&self) -> OutputHash { + OutputHash::from_targets(self.th) + } + + pub fn receipt_root(&self) -> OutputHash { + OutputHash::from_targets(self.rh) + } } impl PublicInputs<'_, T> { @@ -82,6 +113,8 @@ impl PublicInputs<'_, T> { .chain(self.prev_bh.iter()) .chain(self.bn.iter()) .chain(self.sh.iter()) + .chain(self.th.iter()) + .chain(self.rh.iter()) .cloned() .collect() } @@ -89,19 +122,30 @@ impl PublicInputs<'_, T> { impl<'a, T> PublicInputs<'a, T> { /// Total length of the public inputs. - pub const TOTAL_LEN: usize = SH_RANGE.end; + pub const TOTAL_LEN: usize = RH_RANGE.end; /// Creates a new instance from its internal parts. - pub fn from_parts(bh: &'a [T], prev_bh: &'a [T], bn: &'a [T], sh: &'a [T]) -> Self { + pub fn from_parts( + bh: &'a [T], + prev_bh: &'a [T], + bn: &'a [T], + sh: &'a [T], + th: &'a [T], + rh: &'a [T], + ) -> Self { assert_eq!(bh.len(), BH_RANGE.len()); assert_eq!(prev_bh.len(), PREV_BH_RANGE.len()); assert_eq!(sh.len(), SH_RANGE.len()); + assert_eq!(th.len(), TH_RANGE.len()); + assert_eq!(rh.len(), RH_RANGE.len()); Self { bh, prev_bh, bn, sh, + th, + rh, } } @@ -112,6 +156,8 @@ impl<'a, T> PublicInputs<'a, T> { prev_bh: &pi[PREV_BH_RANGE], bn: &pi[BN_RANGE], sh: &pi[SH_RANGE], + th: &pi[TH_RANGE], + rh: &pi[RH_RANGE], } } @@ -134,4 +180,14 @@ impl<'a, T> PublicInputs<'a, T> { pub const fn state_root_raw(&self) -> &[T] { self.sh } + + /// Returns the packed transaction root hash. + pub const fn transaction_root_raw(&self) -> &[T] { + self.th + } + + /// Returns the packed receipt root hash. + pub const fn receipt_root_raw(&self) -> &[T] { + self.rh + } } diff --git a/mp2-v1/src/final_extraction/base_circuit.rs b/mp2-v1/src/final_extraction/base_circuit.rs index e53d12c1d..ce2474eab 100644 --- a/mp2-v1/src/final_extraction/base_circuit.rs +++ b/mp2-v1/src/final_extraction/base_circuit.rs @@ -434,6 +434,8 @@ pub(crate) mod test { ); let h = &random_vector::(PACKED_HASH_LEN).to_fields(); + let th = &random_vector::(PACKED_HASH_LEN).to_fields(); + let rh = &random_vector::(PACKED_HASH_LEN).to_fields(); let contract_dm = Point::rand(); let key = &random_vector::(MAX_KEY_NIBBLE_LEN).to_fields(); let ptr = &F::NEG_ONE; // simulating end of MPT recursion @@ -460,6 +462,8 @@ pub(crate) mod test { prev_bh: &parent_block_hash, bn: &block_number, sh: h, + th, + rh, } .to_vec(); ProofsPi { diff --git a/mp2-v1/tests/common/context.rs b/mp2-v1/tests/common/context.rs index 305a4fd20..16b501a5b 100644 --- a/mp2-v1/tests/common/context.rs +++ b/mp2-v1/tests/common/context.rs @@ -12,10 +12,7 @@ use anyhow::{Context, Result}; use envconfig::Envconfig; use log::info; use mp2_common::eth::ProofQuery; -use mp2_v1::{ - api::{build_circuits_params, PublicParameters}, - block_extraction::ExtractionType, -}; +use mp2_v1::api::{build_circuits_params, PublicParameters}; use std::{ fs::File, io::{BufReader, BufWriter}, @@ -97,14 +94,14 @@ pub async fn new_local_chain(storage: ProofKV) -> TestContext { } pub enum ParamsType { - Indexing(ExtractionType), + Indexing, Query, } impl ParamsType { pub fn full_path(&self, mut pre: PathBuf) -> PathBuf { match self { - ParamsType::Indexing(_) => pre.push("index.params"), + ParamsType::Indexing => pre.push("index.params"), ParamsType::Query => pre.push("query.params"), }; pre @@ -120,7 +117,7 @@ impl ParamsType { .context("while parsing MP2 parameters")?; ctx.query_params = Some(params); } - ParamsType::Indexing(_) => { + ParamsType::Indexing => { info!("parsing the indexing mp2-v1 parameters"); let params = bincode::deserialize_from(BufReader::new( File::open(&path).with_context(|| format!("while opening {path:?}"))?, @@ -152,9 +149,9 @@ impl ParamsType { ctx.query_params = Some(params); Ok(()) } - ParamsType::Indexing(et) => { + ParamsType::Indexing => { info!("building the mp2 indexing parameters"); - let mp2 = build_circuits_params(*et); + let mp2 = build_circuits_params(); ctx.params = Some(mp2); info!("writing the mp2-v1 indexing parameters"); Ok(()) @@ -177,7 +174,7 @@ impl ParamsType { )?; Ok(()) } - ParamsType::Indexing(_) => { + ParamsType::Indexing => { bincode::serialize_into( BufWriter::new( File::create(&path).with_context(|| format!("while creating {path:?}"))?, diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index f4891e233..673b60a91 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -33,7 +33,7 @@ use common::{ }; use envconfig::Envconfig; use log::info; -use mp2_v1::block_extraction::ExtractionType; + use parsil::{ assembler::DynamicCircuitPis, parse_and_validate, @@ -84,8 +84,7 @@ async fn integrated_indexing() -> Result<()> { let mut ctx = context::new_local_chain(storage).await; info!("Initial Anvil block: {}", ctx.block_number().await); info!("Building indexing params"); - ctx.build_params(ParamsType::Indexing(ExtractionType::Storage)) - .unwrap(); + ctx.build_params(ParamsType::Indexing).unwrap(); info!("Params built"); // NOTE: to comment to avoid very long tests... From 57c99f2f856b8cbd35b46807b79231d42a698b63 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 3 Dec 2024 14:06:00 +0000 Subject: [PATCH 14/47] Added testing for final extraction API --- mp2-common/src/array.rs | 9 +- mp2-common/src/eth.rs | 543 ++++++------------ mp2-test/src/mpt_sequential.rs | 37 +- mp2-v1/src/final_extraction/api.rs | 35 ++ .../src/final_extraction/receipt_circuit.rs | 210 ++++++- mp2-v1/src/values_extraction/api.rs | 28 +- mp2-v1/src/values_extraction/leaf_receipt.rs | 416 +++++++++++--- mp2-v1/src/values_extraction/mod.rs | 235 ++++++-- mp2-v1/src/values_extraction/public_inputs.rs | 2 +- 9 files changed, 982 insertions(+), 533 deletions(-) diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index 2650f0a31..984fcc4a4 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -636,7 +636,7 @@ where let arrays: Vec> = (0..padded_size) .map(|i| Array { arr: create_array(|j| { - let index = 64 * i + j; + let index = RANDOM_ACCESS_SIZE * i + j; if index < self.arr.len() { self.arr[index] } else { @@ -652,7 +652,7 @@ where let less_than_check = less_than_unsafe(b, at, array_size, 12); let true_target = b._true(); b.connect(less_than_check.target, true_target.target); - b.range_check(at, 12); + let (low_bits, high_bits) = b.split_low_high(at, 6, 12); // Search each of the smaller arrays for the target at `low_bits` @@ -1298,7 +1298,10 @@ mod test { }; run_circuit::(circuit); - arr2[0] += 1; // ensure arr2 is different from arr + arr2[0] = match arr2[0].checked_add(1) { + Some(num) => num, + None => arr2[0] - 1, + }; let res = panic::catch_unwind(|| { let circuit = TestSliceEqual { arr, diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index f315e6091..9117866b9 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -1,11 +1,10 @@ //! Module containing several structure definitions for Ethereum related operations //! such as fetching blocks, transactions, creating MPTs, getting proofs, etc. use alloy::{ - consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom, TxEnvelope}, + consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom}, eips::BlockNumberOrTag, - json_abi::Event, network::{eip2718::Encodable2718, BlockResponse}, - primitives::{Address, Log, LogData, B256}, + primitives::{Address, B256}, providers::{Provider, RootProvider}, rlp::{Decodable, Encodable as AlloyEncodable}, rpc::types::{ @@ -19,15 +18,32 @@ use ethereum_types::H256; use itertools::Itertools; use log::debug; use log::warn; + use rlp::{Encodable, Rlp}; use serde::{Deserialize, Serialize}; -use std::{array::from_fn as create_array, sync::Arc}; +use std::{ + array::from_fn as create_array, + collections::{BTreeSet, HashMap}, + sync::Arc, +}; -use crate::{mpt_sequential::utils::bytes_to_nibbles, rlp::MAX_KEY_NIBBLE_LEN, utils::keccak256}; +use crate::{ + keccak::HASH_LEN, + mpt_sequential::utils::bytes_to_nibbles, + rlp::MAX_KEY_NIBBLE_LEN, + serialization::{deserialize_long_array, serialize_long_array}, + utils::keccak256, +}; /// Retry number for the RPC request const RETRY_NUM: usize = 3; +/// The maximum size an additional piece of data can be in bytes. +const MAX_DATA_SIZE: usize = 32; + +/// The size of an event topic rlp encoded. +const ENCODED_TOPIC_SIZE: usize = 33; + pub trait Rlpable { fn block_hash(&self) -> Vec { keccak256(&self.rlp()) @@ -119,15 +135,17 @@ pub struct ProofQuery { } /// Struct used for storing relevant data to query blocks as they come in. +/// The constant `NO_TOPICS` is the number of indexed items in the event (excluding the event signature) and +/// `MAX_DATA` is the number of 32 byte words of data we expect in addition to the topics. #[derive(Debug, Clone)] -pub struct ReceiptQuery { +pub struct ReceiptQuery { /// The contract that emits the event we care about pub contract: Address, - /// The event we wish to monitor for, - pub event: Event, + /// The signature of the event we wish to monitor for + pub event: EventLogInfo, } -/// Struct used to store all the information needed for proving a leaf in the Receipt Trie is one we care about. +/// Struct used to store all the information needed for proving a leaf is in the Receipt Trie. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ReceiptProofInfo { /// The MPT proof that this Receipt is in the tree @@ -136,21 +154,11 @@ pub struct ReceiptProofInfo { pub mpt_root: H256, /// The index of this transaction in the block pub tx_index: u64, - /// The size of the index in bytes - pub index_size: usize, - /// The offset in the leaf (in RLP form) to status - pub status_offset: usize, - /// The offset in the leaf (in RLP form) to the start of logs - pub logs_offset: usize, - /// Data about the type of log we are proving the existence of - pub event_log_info: EventLogInfo, - /// The offsets for the relevant logs - pub relevant_logs_offset: Vec, } /// Contains all the information for an [`Event`] in rlp form #[derive(Debug, Clone, Copy, Serialize, Deserialize)] -pub struct EventLogInfo { +pub struct EventLogInfo { /// Size in bytes of the whole log rlp encoded pub size: usize, /// Packed contract address to check @@ -158,134 +166,70 @@ pub struct EventLogInfo { /// Byte offset for the address from the beginning of a Log pub add_rel_offset: usize, /// Packed event signature, - pub event_signature: [u8; 32], + pub event_signature: [u8; HASH_LEN], /// Byte offset from the start of the log to event signature pub sig_rel_offset: usize, - /// The topics for this Log - pub topics: [LogDataInfo; 3], - /// The extra data stored by this Log - pub data: [LogDataInfo; 2], + /// The the offsets to the other topics for this Log + #[serde( + serialize_with = "serialize_long_array", + deserialize_with = "deserialize_long_array" + )] + pub topics: [usize; NO_TOPICS], + /// The offsets to the start of the extra data stored by this Log + #[serde( + serialize_with = "serialize_long_array", + deserialize_with = "deserialize_long_array" + )] + pub data: [usize; MAX_DATA], } -/// Contains all the information for data contained in an [`Event`] -#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] -pub struct LogDataInfo { - pub column_id: usize, - /// The byte offset from the beggining of the log to this target - pub rel_byte_offset: usize, - /// The length of this topic/data - pub len: usize, -} +impl EventLogInfo { + /// Create a new instance from a contract [`Address`] and a [`str`] that is the event signature + pub fn new(contract: Address, event_signature: &str) -> Self { + // To calculate the total size of the log rlp encoded we use the fact that the address takes 21 bytes to encode, topics + // take 33 bytes each to incode and form a list that has length between 33 bytes and 132 bytes and data is a string that has 32 * MAX_DATA length -impl TryFrom<&Log> for EventLogInfo { - type Error = anyhow::Error; - - fn try_from(log: &Log) -> std::result::Result { - // First we encode the log in rlp form - let mut buf = Vec::::new(); - log.encode(&mut buf); - - let rlp_log = rlp::Rlp::new(&buf); - // Extract the header - let log_header = rlp_log.payload_info()?; - let next_data = &buf[log_header.header_len..log_header.header_len + log_header.value_len]; - let rlp_log_no_header = rlp::Rlp::new(next_data); - // Find the address offset (skipping its header) - let address_header = rlp_log_no_header.payload_info()?; - let rel_address_offset = log_header.header_len + address_header.header_len; - // Find the signature offset (skipping its header) - let topics_data = &buf[rel_address_offset + address_header.value_len - ..log_header.header_len + log_header.value_len]; - let topics_rlp = rlp::Rlp::new(topics_data); - let topics_header = topics_rlp.payload_info()?; - let topic_0_data = - &buf[rel_address_offset + address_header.value_len + topics_header.header_len - ..log_header.header_len - + address_header.header_len - + address_header.value_len - + topics_header.header_len - + topics_header.value_len]; - let topic_0_rlp = rlp::Rlp::new(topic_0_data); - let topic_0_header = topic_0_rlp.payload_info()?; - let rel_sig_offset = log_header.header_len - + address_header.header_len - + address_header.value_len - + topics_header.header_len - + topic_0_header.header_len; - let event_signature: [u8; 32] = buf[rel_sig_offset..rel_sig_offset + 32].try_into()?; - // Each topic takes 33 bytes to encode so we divide this length by 33 to get the number of topics remaining - let remaining_topics = buf[rel_sig_offset + topic_0_header.value_len - ..log_header.header_len - + address_header.header_len - + address_header.value_len - + topics_header.header_len - + topics_header.value_len] - .len() - / 33; - - let mut topics = [LogDataInfo::default(); 3]; - let mut current_topic_offset = rel_sig_offset + topic_0_header.value_len + 1; - topics - .iter_mut() - .enumerate() - .take(remaining_topics) - .for_each(|(j, info)| { - *info = LogDataInfo { - column_id: j + 2, - rel_byte_offset: current_topic_offset, - len: 32, - }; - current_topic_offset += 33; - }); + // If we have more than one topic that is not the event signature the rlp encoding is a list that is over 55 bytes whose total length can be encoded in one byte, so the header length is 2 + // Otherwise its still a list but the header is a single byte. + let topics_header_len = alloy::rlp::length_of_length((1 + NO_TOPICS) * ENCODED_TOPIC_SIZE); - // Deal with any remaining data - let mut data = [LogDataInfo::default(); 2]; + // If the we have more than one piece of data it is rlp encoded as a string with length greater than 55 bytes + let data_header_len = alloy::rlp::length_of_length(MAX_DATA * MAX_DATA_SIZE); - let data_vec = if current_topic_offset < buf.len() { - buf.iter() - .skip(current_topic_offset - 1) - .copied() - .collect::>() - } else { - vec![] - }; + let address_size = 21; + let topics_size = (1 + NO_TOPICS) * ENCODED_TOPIC_SIZE + topics_header_len; + let data_size = MAX_DATA * MAX_DATA_SIZE + data_header_len; - if !data_vec.is_empty() { - let data_rlp = rlp::Rlp::new(&data_vec); - let data_header = data_rlp.payload_info()?; - // Since we can deal with at most two words of additional data we only need to take 66 bytes from this list - let mut additional_offset = data_header.header_len; - data_vec[data_header.header_len..] - .chunks(33) - .enumerate() - .take(2) - .try_for_each(|(j, chunk)| { - let chunk_rlp = rlp::Rlp::new(chunk); - let chunk_header = chunk_rlp.payload_info()?; - if chunk_header.value_len <= 32 { - data[j] = LogDataInfo { - column_id: remaining_topics + 2 + j, - rel_byte_offset: current_topic_offset - + additional_offset - + chunk_header.header_len, - len: chunk_header.value_len, - }; - additional_offset += chunk_header.header_len + chunk_header.value_len; - } else { - return Ok(()); - } - Result::<(), anyhow::Error>::Ok(()) - })?; - } - Ok(EventLogInfo { - size: log_header.header_len + log_header.value_len, - address: log.address, - add_rel_offset: rel_address_offset, - event_signature, - sig_rel_offset: rel_sig_offset, + let payload_size = address_size + topics_size + data_size; + let header_size = alloy::rlp::length_of_length(payload_size); + + let size = header_size + payload_size; + + // The address itself starts after the header plus one byte for the address header. + let add_rel_offset = header_size + 1; + + // The event signature offset is after the header, the address and the topics list header. + let sig_rel_offset = header_size + address_size + topics_header_len + 1; + + let topics: [usize; NO_TOPICS] = create_array(|i| { + header_size + address_size + topics_header_len + (i + 1) * ENCODED_TOPIC_SIZE + 1 + }); + + let data: [usize; MAX_DATA] = create_array(|i| { + header_size + address_size + topics_size + data_header_len + (i * MAX_DATA_SIZE) + }); + + let event_sig = alloy::primitives::keccak256(event_signature.as_bytes()); + + Self { + size, + address: contract, + add_rel_offset, + event_signature: event_sig.0, + sig_rel_offset, topics, data, - }) + } } } @@ -551,9 +495,13 @@ impl ReceiptProofInfo { } } -impl ReceiptQuery { - pub fn new(contract: Address, event: Event) -> Self { - Self { contract, event } +impl ReceiptQuery { + /// Construct a new [`ReceiptQuery`] from the contract [`Address`] and the event's name as a [`str`]. + pub fn new(contract: Address, event_name: &str) -> Self { + Self { + contract, + event: EventLogInfo::::new(contract, event_name), + } } /// Function that returns the MPT Trie inclusion proofs for all receipts in a block whose logs contain @@ -563,102 +511,29 @@ impl ReceiptQuery { provider: &RootProvider, block: BlockNumberOrTag, ) -> Result> { - let expected_topic_0 = B256::from_slice(&keccak256(self.event.signature().as_bytes())); let filter = Filter::new() .select(block) .address(self.contract) - .event(&self.event.signature()); + .event_signature(B256::from(self.event.event_signature)); let logs = provider.get_logs(&filter).await?; - // Find the length of the RLP encoded log - let event_log_info: EventLogInfo = (&logs - .first() - .ok_or(anyhow!("No relevant logs in this block"))? - .inner) - .try_into()?; // For each of the logs return the transacion its included in, then sort and remove duplicates. - let mut tx_indices = logs - .iter() - .map(|log| log.transaction_index) - .collect::>>() - .ok_or(anyhow!("One of the logs did not have a transaction index"))?; - tx_indices.sort(); - tx_indices.dedup(); + let tx_indices = BTreeSet::from_iter(logs.iter().map_while(|log| log.transaction_index)); // Construct the Receipt Trie for this block so we can retrieve MPT proofs. let mut block_util = BlockUtil::fetch(provider, block).await?; let mpt_root = block_util.receipts_trie.root_hash()?; let proofs = tx_indices .into_iter() - .map(|index| { - let key = index.rlp_bytes(); - - let index_size = key.len(); + .map(|tx_index| { + let key = tx_index.rlp_bytes(); let proof = block_util.receipts_trie.get_proof(&key[..])?; - // Since the compact encoding of the key is stored first plus an additional list header and - // then the first element in the receipt body is the transaction type we calculate the offset to that point - - let last_node = proof.last().ok_or(eth_trie::TrieError::DB( - "Could not get last node in proof".to_string(), - ))?; - - let list_length_hint = last_node[0] as usize - 247; - let key_length = if last_node[1 + list_length_hint] > 128 { - last_node[1 + list_length_hint] as usize - 128 - } else { - 0 - }; - let body_length_hint = last_node[2 + list_length_hint + key_length] as usize - 183; - let body_offset = 4 + list_length_hint + key_length + body_length_hint; - - let receipt = block_util.txs[index as usize].receipt(); - - let body_length_hint = last_node[body_offset] as usize - 247; - let length_hint = body_offset + body_length_hint; - - let status_offset = 1 + length_hint; - let gas_hint = last_node[2 + length_hint] as usize - 128; - // Logs bloom is always 256 bytes long and comes after the gas used the first byte is 185 then 1 then 0 then the bloom so the - // log data starts at 4 + length_hint + gas_hint + 259 - let log_offset = 3 + length_hint + gas_hint + 259; - - let log_hint = if last_node[log_offset] < 247 { - last_node[log_offset] as usize - 192 - } else { - last_node[log_offset] as usize - 247 - }; - // We iterate through the logs and store the offsets we care about. - let mut current_log_offset = log_offset + 1 + log_hint; - - let relevant_logs = receipt - .logs() - .iter() - .filter_map(|log| { - let length = log.length(); - if log.address == self.contract - && log.data.topics().contains(&expected_topic_0) - { - let out = current_log_offset; - current_log_offset += length; - Some(out) - } else { - current_log_offset += length; - None - } - }) - .collect::>(); - Ok(ReceiptProofInfo { mpt_proof: proof, mpt_root, - tx_index: index, - index_size, - status_offset, - logs_offset: log_offset, - event_log_info, - relevant_logs_offset: relevant_logs, + tx_index, }) }) .collect::, eth_trie::TrieError>>()?; @@ -723,14 +598,21 @@ impl BlockUtil { bail!("can't see full transactions"); }; // check receipt root + let all_tx_map = HashMap::::from_iter( + all_tx + .iter() + .map_while(|tx| tx.transaction_index.map(|tx_index| (tx_index, tx))), + ); let memdb = Arc::new(MemoryDB::new(true)); let mut receipts_trie = EthTrie::new(memdb.clone()); let mut transactions_trie = EthTrie::new(memdb.clone()); let consensus_receipts = receipts .into_iter() - .zip(all_tx.iter()) - .map(|(receipt, transaction)| { - let tx_index = receipt.transaction_index.unwrap().rlp_bytes(); + .map(|receipt| { + let tx_index_u64 = receipt.transaction_index.unwrap(); + // If the HashMap doesn't have an entry for this tx_index then the recceipts and transactions aren't from the same block. + let transaction = all_tx_map.get(&tx_index_u64).cloned().unwrap(); + let tx_index = tx_index_u64.rlp_bytes(); let receipt_primitive = match receipt.inner { CRE::Legacy(ref r) => CRE::Legacy(from_rpc_logs_to_consensus(r)), @@ -741,11 +623,9 @@ impl BlockUtil { _ => panic!("aie"), }; - let transaction_primitive = TxEnvelope::from(transaction.clone()); - let body_rlp = receipt_primitive.encoded_2718(); - let tx_body_rlp = transaction_primitive.encoded_2718(); + let tx_body_rlp = transaction.inner.encoded_2718(); receipts_trie .insert(&tx_index, &body_rlp) @@ -808,12 +688,10 @@ mod test { use std::str::FromStr; use alloy::{ - network::{TransactionBuilder, TransactionResponse}, - node_bindings::Anvil, - primitives::{Bytes, Log, U256}, - providers::{ext::AnvilApi, Provider, ProviderBuilder}, + network::TransactionResponse, + primitives::{Bytes, Log}, + providers::{Provider, ProviderBuilder}, rlp::Decodable, - sol, }; use eth_trie::Nibbles; @@ -828,7 +706,10 @@ mod test { mpt_sequential::utils::nibbles_to_bytes, utils::{Endianness, Packer}, }; - use mp2_test::eth::{get_mainnet_url, get_sepolia_url}; + use mp2_test::{ + eth::{get_mainnet_url, get_sepolia_url}, + mpt_sequential::generate_receipt_test_info, + }; use super::*; @@ -960,137 +841,12 @@ mod test { Ok(()) } - #[tokio::test] - async fn test_receipt_query() -> Result<()> { - // Make a contract that emits events so we can pick up on them - sol! { - #[allow(missing_docs)] - // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin - #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780638381f58a14610058578063d09de08a14610076578063db73227914610080575b5f80fd5b61005661008a565b005b6100606100f8565b60405161006d9190610165565b60405180910390f35b61007e6100fd565b005b610088610115565b005b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100c06100fd565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100f66100fd565b565b5f5481565b5f8081548092919061010e906101ab565b9190505550565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a261014b6100fd565b565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212202787ca0f2ea71e118bc4d1bf239cde5ec4730aeb35a404c44e6c9d587316418564736f6c634300081a0033")] - contract EventEmitter { - uint256 public number; - event testEvent(uint256 indexed num); - - function testEmit() public { - emit testEvent(number); - increment(); - } - - function twoEmits() public { - emit testEvent(number); - increment(); - emit testEvent(number); - increment(); - } - - function increment() public { - number++; - } - } - } - - sol! { - #[allow(missing_docs)] - // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin - #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780637229db15146100585780638381f58a14610062578063d09de08a14610080575b5f80fd5b61005661008a565b005b6100606100f8565b005b61006a610130565b6040516100779190610165565b60405180910390f35b610088610135565b005b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100c0610135565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100f6610135565b565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a261012e610135565b565b5f5481565b5f80815480929190610146906101ab565b9190505550565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212203b7602644bfff2df89c2fe9498cd533326876859a0df7b96ac10be1fdc09c3a064736f6c634300081a0033")] - - contract OtherEmitter { - uint256 public number; - event otherEvent(uint256 indexed num); - - function otherEmit() public { - emit otherEvent(number); - increment(); - } - - function twoEmits() public { - emit otherEvent(number); - increment(); - emit otherEvent(number); - increment(); - } - - function increment() public { - number++; - } - } - } - - // Spin up a local node. - - let rpc = ProviderBuilder::new() - .with_recommended_fillers() - .on_anvil_with_config(|anvil| Anvil::arg(anvil, "--no-mining")); - - // Turn on auto mining to deploy the contracts - rpc.anvil_set_auto_mine(true).await.unwrap(); - - // Deploy the contract using anvil - let event_contract = EventEmitter::deploy(rpc.root()).await.unwrap(); - - // Deploy the contract using anvil - let other_contract = OtherEmitter::deploy(rpc.root()).await.unwrap(); - - // Disable auto mining so we can ensure that all the transaction appear in the same block - rpc.anvil_set_auto_mine(false).await.unwrap(); - - let mut pending_tx_builders = vec![]; - for i in 0..25 { - let tx_req = match i % 4 { - 0 => event_contract.testEmit().into_transaction_request(), - 1 => event_contract.twoEmits().into_transaction_request(), - 2 => other_contract.otherEmit().into_transaction_request(), - 3 => other_contract.twoEmits().into_transaction_request(), - _ => unreachable!(), - }; - - let sender_address = Address::random(); - let funding = U256::from(1e18 as u64); - rpc.anvil_set_balance(sender_address, funding) - .await - .unwrap(); - rpc.anvil_auto_impersonate_account(true).await.unwrap(); - let new_req = tx_req.with_from(sender_address); - let tx_req_final = rpc - .fill(new_req) - .await - .unwrap() - .as_builder() - .unwrap() - .clone(); - pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); - } - - rpc.anvil_mine(Some(U256::from(1u8)), None).await.unwrap(); - - let mut transactions = Vec::new(); - for pending in pending_tx_builders.into_iter() { - let hash = pending.watch().await.unwrap(); - transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); - } - - let block_number = transactions.first().unwrap().block_number.unwrap(); - println!("block number: {block_number}"); - // We want to get the event signature so we can make a ReceiptQuery - let all_events = EventEmitter::abi::events(); - - let events = all_events.get("testEvent").unwrap(); - let receipt_query = ReceiptQuery::new(*event_contract.address(), events[0].clone()); - - let block = rpc - .get_block( - BlockNumberOrTag::Number(block_number).into(), - alloy::rpc::types::BlockTransactionsKind::Full, - ) - .await? - .ok_or(anyhow!("Could not get block test"))?; - let receipt_hash = block.header().receipts_root; - let proofs = receipt_query - .query_receipt_proofs(rpc.root(), BlockNumberOrTag::Number(block_number)) - .await?; - + #[test] + fn test_receipt_query() -> Result<()> { // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it - + let test_info = generate_receipt_test_info(); + let proofs = test_info.proofs(); + let query = test_info.query(); for proof in proofs.iter() { let memdb = Arc::new(MemoryDB::new(true)); let tx_trie = EthTrie::new(Arc::clone(&memdb)); @@ -1098,33 +854,66 @@ mod test { let mpt_key = proof.tx_index.rlp_bytes(); let _ = tx_trie - .verify_proof(receipt_hash.0.into(), &mpt_key, proof.mpt_proof.clone())? + .verify_proof(proof.mpt_root, &mpt_key, proof.mpt_proof.clone())? .ok_or(anyhow!("No proof found when verifying"))?; let last_node = proof .mpt_proof .last() .ok_or(anyhow!("Couldn't get first node in proof"))?; - let expected_sig: [u8; 32] = keccak256(receipt_query.event.signature().as_bytes()) - .try_into() - .unwrap(); + let expected_sig: [u8; 32] = query.event.event_signature; + + // Convert to Rlp form so we can use provided methods. + let node_rlp = rlp::Rlp::new(last_node); + + // The actual receipt data is item 1 in the list + let (receipt_rlp, receipt_off) = node_rlp.at_with_offset(1)?; + // The rlp encoded Receipt is not a list but a string that is formed of the `tx_type` followed by the remaining receipt + // data rlp encoded as a list. We retrieve the payload info so that we can work out relevant offsets later. + let receipt_str_payload = receipt_rlp.payload_info()?; + + // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` + let receipt_list = rlp::Rlp::new(&receipt_rlp.data()?[1..]); + + // The logs themselves start are the item at index 3 in this list + let (logs_rlp, logs_off) = receipt_list.at_with_offset(3)?; + + // We calculate the offset the that the logs are at from the start of the node + let logs_offset = receipt_off + receipt_str_payload.header_len + 1 + logs_off; + + // Now we produce an iterator over the logs with each logs offset. + let relevant_logs_offset = std::iter::successors(Some(0usize), |i| Some(i + 1)) + .map_while(|i| logs_rlp.at_with_offset(i).ok()) + .filter_map(|(log_rlp, log_off)| { + let mut bytes = log_rlp.data().ok()?; + let log = Log::decode(&mut bytes).ok()?; + if log.address == query.contract + && log + .data + .topics() + .contains(&B256::from(query.event.event_signature)) + { + Some(logs_offset + log_off) + } else { + Some(0usize) + } + }) + .collect::>(); - for log_offset in proof.relevant_logs_offset.iter() { - let mut buf = &last_node[*log_offset..*log_offset + proof.event_log_info.size]; + for log_offset in relevant_logs_offset.iter() { + let mut buf = &last_node[*log_offset..*log_offset + query.event.size]; let decoded_log = Log::decode(&mut buf)?; - let raw_bytes: [u8; 20] = last_node[*log_offset - + proof.event_log_info.add_rel_offset - ..*log_offset + proof.event_log_info.add_rel_offset + 20] + let raw_bytes: [u8; 20] = last_node[*log_offset + query.event.add_rel_offset + ..*log_offset + query.event.add_rel_offset + 20] .to_vec() .try_into() .unwrap(); - assert_eq!(decoded_log.address, receipt_query.contract); - assert_eq!(raw_bytes, receipt_query.contract); + assert_eq!(decoded_log.address, query.contract); + assert_eq!(raw_bytes, query.contract); let topics = decoded_log.topics(); assert_eq!(topics[0].0, expected_sig); - let raw_bytes: [u8; 32] = last_node[*log_offset - + proof.event_log_info.sig_rel_offset - ..*log_offset + proof.event_log_info.sig_rel_offset + 32] + let raw_bytes: [u8; 32] = last_node[*log_offset + query.event.sig_rel_offset + ..*log_offset + query.event.sig_rel_offset + 32] .to_vec() .try_into() .unwrap(); @@ -1249,10 +1038,10 @@ mod test { #[tokio::test] async fn test_pidgy_pinguin_mapping_slot() -> Result<()> { // first pinguin holder https://dune.com/queries/2450476/4027653 - // holder: 0x188b264aa1456b869c3a92eeed32117ebb835f47 + // holder: 0x29469395eaf6f95920e59f858042f0e28d98a20b // NFT id https://opensea.io/assets/ethereum/0xbd3531da5cf5857e7cfaa92426877b022e612cf8/1116 let mapping_value = - Address::from_str("0xee5ac9c6db07c26e71207a41e64df42e1a2b05cf").unwrap(); + Address::from_str("0x29469395eaf6f95920e59f858042f0e28d98a20b").unwrap(); let nft_id: u32 = 1116; let mapping_key = left_pad32(&nft_id.to_be_bytes()); let url = get_mainnet_url(); diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 6f5fa8719..1cd68a313 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -50,9 +50,27 @@ pub fn generate_random_storage_mpt( (trie, keys[right_key_idx].to_vec()) } +#[derive(Debug, Clone)] +pub struct ReceiptTestInfo { + /// The query which we have returned proofs for + pub query: ReceiptQuery, + /// The proofs for receipts relating to `self.query` + pub proofs: Vec, +} + +impl ReceiptTestInfo { + /// Getter for the proofs + pub fn proofs(&self) -> Vec { + self.proofs.clone() + } + /// Getter for the query + pub fn query(&self) -> &ReceiptQuery { + &self.query + } +} /// This function is used so that we can generate a Receipt Trie for a blog with varying transactions /// (i.e. some we are interested in and some we are not). -pub fn generate_receipt_proofs() -> Vec { +pub fn generate_receipt_test_info() -> ReceiptTestInfo<1, 0> { // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] @@ -78,11 +96,7 @@ pub fn generate_receipt_proofs() -> Vec { number++; } } - } - sol! { - #[allow(missing_docs)] - // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780637229db15146100585780638381f58a14610062578063d09de08a14610080575b5f80fd5b61005661008a565b005b6100606100f8565b005b61006a610130565b6040516100779190610165565b60405180910390f35b610088610135565b005b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100c0610135565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100f6610135565b565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a261012e610135565b565b5f5481565b5f80815480929190610146906101ab565b9190505550565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212203b7602644bfff2df89c2fe9498cd533326876859a0df7b96ac10be1fdc09c3a064736f6c634300081a0033")] contract OtherEmitter { @@ -170,11 +184,18 @@ pub fn generate_receipt_proofs() -> Vec { let all_events = EventEmitter::abi::events(); let events = all_events.get("testEvent").unwrap(); - let receipt_query = ReceiptQuery::new(*event_contract.address(), events[0].clone()); - receipt_query + let receipt_query = + ReceiptQuery::<1, 0>::new(*event_contract.address(), &events[0].signature()); + + let proofs = receipt_query .query_receipt_proofs(rpc.root(), BlockNumberOrTag::Number(block_number)) .await - .unwrap() + .unwrap(); + + ReceiptTestInfo { + query: receipt_query, + proofs, + } }) } diff --git a/mp2-v1/src/final_extraction/api.rs b/mp2-v1/src/final_extraction/api.rs index 45f88831a..51ea65f95 100644 --- a/mp2-v1/src/final_extraction/api.rs +++ b/mp2-v1/src/final_extraction/api.rs @@ -261,6 +261,7 @@ mod tests { final_extraction::{ base_circuit::{test::ProofsPi, CONTRACT_SET_NUM_IO, VALUE_SET_NUM_IO}, lengthed_circuit::LENGTH_SET_NUM_IO, + receipt_circuit::test::ReceiptsProofsPi, }, length_extraction, }; @@ -284,6 +285,7 @@ mod tests { ); let proof_pis = ProofsPi::random(); + let receipt_proof_pis = ReceiptsProofsPi::generate_from_proof_pi_value(&proof_pis); let length_pis = proof_pis.length_inputs(); let len_dm = length_extraction::PublicInputs::::from_slice(&length_pis).metadata_point(); let block_proof = block_circuit @@ -298,6 +300,13 @@ mod tests { let length_proof = &length_params .generate_input_proofs::<1>([length_pis.try_into().unwrap()]) .unwrap()[0]; + let receipt_proof = &values_params + .generate_input_proofs::<1>([receipt_proof_pis + .value_inputs() + .proof_inputs + .try_into() + .unwrap()]) + .unwrap()[0]; let contract_proof: ProofWithVK = ( contract_proof.clone(), @@ -359,5 +368,31 @@ mod tests { ) .unwrap(); proof_pis.check_proof_public_inputs(proof.proof(), Some(len_dm)); + + let receipt_proof: ProofWithVK = ( + receipt_proof.clone(), + values_params.verifier_data_for_input_proofs::<1>()[0].clone(), + ) + .into(); + + let circuit_input = CircuitInput::new_receipt_input( + serialize_proof(&block_proof).unwrap(), + receipt_proof.serialize().unwrap(), + ) + .unwrap(); + let proof = ProofWithVK::deserialize( + ¶ms + .generate_receipt_proof( + match circuit_input { + CircuitInput::Receipt(input) => input, + _ => unreachable!(), + }, + values_params.get_recursive_circuit_set(), + ) + .unwrap(), + ) + .unwrap(); + + receipt_proof_pis.check_proof_public_inputs(proof.proof()); } } diff --git a/mp2-v1/src/final_extraction/receipt_circuit.rs b/mp2-v1/src/final_extraction/receipt_circuit.rs index ef536ef83..bce6854eb 100644 --- a/mp2-v1/src/final_extraction/receipt_circuit.rs +++ b/mp2-v1/src/final_extraction/receipt_circuit.rs @@ -2,9 +2,10 @@ use mp2_common::{ default_config, keccak::{OutputHash, PACKED_HASH_LEN}, proof::{deserialize_proof, verify_proof_fixed_circuit, ProofWithVK}, + public_inputs::PublicInputCommon, serialization::{deserialize, serialize}, u256::UInt256Target, - utils::FromTargets, + utils::{FromTargets, ToTargets}, C, D, F, }; use plonky2::{ @@ -29,7 +30,10 @@ use serde::{Deserialize, Serialize}; use crate::{block_extraction, values_extraction}; -use super::api::{FinalExtractionBuilderParams, NUM_IO}; +use super::{ + api::{FinalExtractionBuilderParams, NUM_IO}, + PublicInputs, +}; use anyhow::Result; @@ -67,8 +71,21 @@ impl ReceiptExtractionCircuit { // enforce block_pi.state_root == contract_pi.state_root block_pi - .state_root() + .receipt_root() .enforce_equal(b, &OutputHash::from_targets(value_pi.root_hash_info())); + + PublicInputs::new( + block_pi.bh, + block_pi.prev_bh, + // here the value digest is the same since for length proof, it is assumed the table + // digest is in Compound format (i.e. multiple rows inside digest already). + &value_pi.values_digest_target().to_targets(), + &value_pi.metadata_digest_target().to_targets(), + &block_pi.bn.to_targets(), + &[b._false().target], + ) + .register_args(b); + ReceiptExtractionWires { dm: value_pi.metadata_digest_target(), dv: value_pi.values_digest_target(), @@ -211,3 +228,190 @@ impl ReceiptCircuitProofWires { .get_public_input_targets::() } } + +#[cfg(test)] +pub(crate) mod test { + use std::iter::once; + + use crate::final_extraction::{base_circuit::test::ProofsPi, PublicInputs}; + + use super::*; + use alloy::primitives::U256; + use anyhow::Result; + use itertools::Itertools; + use mp2_common::{ + keccak::PACKED_HASH_LEN, + utils::{Endianness, Packer, ToFields}, + }; + use mp2_test::{ + circuit::{run_circuit, UserCircuit}, + utils::random_vector, + }; + use plonky2::{ + field::types::{PrimeField64, Sample}, + hash::hash_types::HashOut, + iop::witness::WitnessWrite, + plonk::config::GenericHashOut, + }; + use plonky2_ecgfp5::curve::curve::Point; + use values_extraction::public_inputs::tests::new_extraction_public_inputs; + + #[derive(Clone, Debug)] + struct TestReceiptCircuit { + pis: ReceiptsProofsPi, + } + + struct TestReceiptWires { + pis: ReceiptsProofsPiTarget, + } + + impl UserCircuit for TestReceiptCircuit { + type Wires = TestReceiptWires; + fn build(c: &mut CircuitBuilder) -> Self::Wires { + let proofs_pi = ReceiptsProofsPiTarget::new(c); + let _ = ReceiptExtractionCircuit::build(c, &proofs_pi.blocks_pi, &proofs_pi.values_pi); + TestReceiptWires { pis: proofs_pi } + } + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + wires.pis.assign(pw, &self.pis); + } + } + + #[derive(Clone, Debug)] + pub(crate) struct ReceiptsProofsPiTarget { + pub(crate) blocks_pi: Vec, + pub(crate) values_pi: Vec, + } + + impl ReceiptsProofsPiTarget { + pub(crate) fn new(b: &mut CircuitBuilder) -> Self { + Self { + blocks_pi: b.add_virtual_targets( + block_extraction::public_inputs::PublicInputs::::TOTAL_LEN, + ), + values_pi: b + .add_virtual_targets(values_extraction::PublicInputs::::TOTAL_LEN), + } + } + pub(crate) fn assign(&self, pw: &mut PartialWitness, pis: &ReceiptsProofsPi) { + pw.set_target_arr(&self.values_pi, pis.values_pi.as_ref()); + pw.set_target_arr(&self.blocks_pi, pis.blocks_pi.as_ref()); + } + } + + /// TODO: refactor this struct to mimick exactly the base circuit wires in that it can contain + /// multiple values + #[derive(Clone, Debug)] + pub(crate) struct ReceiptsProofsPi { + pub(crate) blocks_pi: Vec, + pub(crate) values_pi: Vec, + } + + impl ReceiptsProofsPi { + /// Function takes in a [`ProofsPi`] instance and generates a set of values public inputs + /// that agree with the provided receipts root from the `blocks_pi`. + pub(crate) fn generate_from_proof_pi_value(base_info: &ProofsPi) -> ReceiptsProofsPi { + let original = base_info.value_inputs(); + let block_pi = base_info.block_inputs(); + let (k, t) = original.mpt_key_info(); + let new_value_digest = Point::rand(); + let new_metadata_digest = Point::rand(); + let new_values_pi = block_pi + .receipt_root_raw() + .iter() + .chain(k.iter()) + .chain(once(&t)) + .chain(new_value_digest.to_weierstrass().to_fields().iter()) + .chain(new_metadata_digest.to_weierstrass().to_fields().iter()) + .chain(once(&original.n())) + .cloned() + .collect_vec(); + Self { + blocks_pi: base_info.blocks_pi.clone(), + values_pi: new_values_pi, + } + } + + pub(crate) fn block_inputs(&self) -> block_extraction::PublicInputs { + block_extraction::PublicInputs::from_slice(&self.blocks_pi) + } + + pub(crate) fn value_inputs(&self) -> values_extraction::PublicInputs { + values_extraction::PublicInputs::new(&self.values_pi) + } + + /// check public inputs of the proof match with the ones in `self`. + /// `compound_type` is a flag to specify whether `proof` is generated for a simple or compound type + /// `length_dm` is the metadata digest of a length proof, which is provided only for proofs related + /// to a compound type with a length slot + pub(crate) fn check_proof_public_inputs(&self, proof: &ProofWithPublicInputs) { + let proof_pis = PublicInputs::from_slice(&proof.public_inputs); + let block_pi = self.block_inputs(); + + assert_eq!(proof_pis.bn, block_pi.bn); + assert_eq!(proof_pis.h, block_pi.bh); + assert_eq!(proof_pis.ph, block_pi.prev_bh); + + // check digests + let value_pi = self.value_inputs(); + + assert_eq!(proof_pis.value_point(), value_pi.values_digest()); + + assert_eq!(proof_pis.metadata_point(), value_pi.metadata_digest()); + } + + pub(crate) fn random() -> Self { + let value_h = HashOut::::rand().to_bytes().pack(Endianness::Little); + let key = random_vector(64); + let ptr = usize::MAX; + let value_dv = Point::rand(); + let value_dm = Point::rand(); + let n = 10; + let values_pi = new_extraction_public_inputs( + &value_h, + &key, + ptr, + &value_dv.to_weierstrass(), + &value_dm.to_weierstrass(), + n, + ); + + let th = &random_vector::(PACKED_HASH_LEN).to_fields(); + let sh = &random_vector::(PACKED_HASH_LEN).to_fields(); + + // The receipts root and value root need to agree + let rh = &value_h.to_fields(); + + let block_number = U256::from(F::rand().to_canonical_u64()).to_fields(); + let block_hash = HashOut::::rand() + .to_bytes() + .pack(Endianness::Little) + .to_fields(); + let parent_block_hash = HashOut::::rand() + .to_bytes() + .pack(Endianness::Little) + .to_fields(); + let blocks_pi = block_extraction::public_inputs::PublicInputs { + bh: &block_hash, + prev_bh: &parent_block_hash, + bn: &block_number, + sh, + th, + rh, + } + .to_vec(); + ReceiptsProofsPi { + blocks_pi, + values_pi, + } + } + } + + #[test] + fn final_simple_value() -> Result<()> { + let pis = ReceiptsProofsPi::random(); + let test_circuit = TestReceiptCircuit { pis }; + run_circuit::(test_circuit); + Ok(()) + } +} diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index c1cc80179..d92a0d498 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -16,7 +16,7 @@ use anyhow::{bail, ensure, Result}; use log::debug; use mp2_common::{ default_config, - eth::ReceiptProofInfo, + eth::{ReceiptProofInfo, ReceiptQuery}, mpt_sequential::PAD_LEN, poseidon::H, proof::{ProofInputSerialized, ProofWithVK}, @@ -139,8 +139,13 @@ where } /// Create a circuit input for proving a leaf MPT node of a transaction receipt. - pub fn new_receipt_leaf(info: ReceiptProofInfo) -> Self { - CircuitInput::LeafReceipt(ReceiptLeafCircuit { info }) + pub fn new_receipt_leaf( + info: &ReceiptProofInfo, + query: &ReceiptQuery, + ) -> Self { + CircuitInput::LeafReceipt( + ReceiptLeafCircuit::new(info, query).expect("Could not construct Receipt Leaf Circuit"), + ) } /// Create a circuit input for proving an extension MPT node. @@ -541,7 +546,7 @@ mod tests { types::MAPPING_LEAF_VALUE_LEN, }; use mp2_test::{ - mpt_sequential::{generate_random_storage_mpt, generate_receipt_proofs}, + mpt_sequential::{generate_random_storage_mpt, generate_receipt_test_info}, utils::random_vector, }; use plonky2::field::types::Field; @@ -902,14 +907,15 @@ mod tests { } #[test] fn test_receipt_api() { - let receipt_proof_infos = generate_receipt_proofs(); - + let receipt_proof_infos = generate_receipt_test_info(); + let receipt_proofs = receipt_proof_infos.proofs(); + let query = receipt_proof_infos.query(); // We check that we have enough receipts and then take the second and third info // (the MPT proof for the first node is different). // Then check that the node above both is a branch. - assert!(receipt_proof_infos.len() > 3); - let second_info = &receipt_proof_infos[1]; - let third_info = &receipt_proof_infos[2]; + assert!(receipt_proofs.len() > 3); + let second_info = &receipt_proofs[1]; + let third_info = &receipt_proofs[2]; let proof_length_1 = second_info.mpt_proof.len(); let proof_length_2 = third_info.mpt_proof.len(); @@ -924,7 +930,7 @@ mod tests { let params = build_circuits_params(); println!("Proving leaf 1..."); - let leaf_input_1 = CircuitInput::new_receipt_leaf(second_info.clone()); + let leaf_input_1 = CircuitInput::new_receipt_leaf(second_info, query); let now = std::time::Instant::now(); let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); { @@ -939,7 +945,7 @@ mod tests { ); println!("Proving leaf 2..."); - let leaf_input_2 = CircuitInput::new_receipt_leaf(third_info.clone()); + let leaf_input_2 = CircuitInput::new_receipt_leaf(third_info, query); let now = std::time::Instant::now(); let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); println!( diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index 3e8926773..ef1bfcf04 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -2,18 +2,27 @@ use crate::MAX_RECEIPT_LEAF_NODE_LEN; -use super::public_inputs::{PublicInputs, PublicInputsArgs}; +use super::{ + public_inputs::{PublicInputs, PublicInputsArgs}, + DATA_PREFIX, GAS_USED_PREFIX, LOG_NUMBER_PREFIX, TOPIC_PREFIX, TX_INDEX_PREFIX, +}; +use alloy::{ + primitives::{Address, Log, B256}, + rlp::Decodable, +}; +use anyhow::{anyhow, Result}; use mp2_common::{ array::{Array, Vector, VectorWire}, - eth::{EventLogInfo, LogDataInfo, ReceiptProofInfo}, + eth::{EventLogInfo, ReceiptProofInfo, ReceiptQuery}, group_hashing::CircuitBuilderGroupHashing, - keccak::{InputData, KeccakCircuit, KeccakWires}, + keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN}, mpt_sequential::{MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, + poseidon::H, public_inputs::PublicInputCommon, rlp::MAX_KEY_NIBBLE_LEN, types::{CBuilder, GFp}, - utils::{less_than, less_than_or_equal_to, Endianness, PackerTarget}, + utils::{less_than, less_than_or_equal_to_unsafe, Endianness, PackerTarget, ToTargets}, D, F, }; use plonky2::{ @@ -22,7 +31,7 @@ use plonky2::{ target::Target, witness::{PartialWitness, WitnessWrite}, }, - plonk::circuit_builder::CircuitBuilder, + plonk::{circuit_builder::CircuitBuilder, config::Hasher}, }; use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; @@ -34,6 +43,19 @@ use std::{array::from_fn, iter}; /// Maximum number of logs per transaction we can process const MAX_LOGS_PER_TX: usize = 2; +/// The number of bytes that `gas_used` could take up in the receipt. +/// We set a max of 3 here because this would be over half the gas in the block for Ethereum. +const MAX_GAS_SIZE: u64 = 3; + +/// The size of a topic in bytes in the rlp encoded receipt +const TOPICS_SIZE: usize = 32; + +/// The maximum number of topics that aren't the event signature. +const MAX_TOPICS: usize = 3; + +/// The maximum number of additional pieces of data we allow in an event (each being 32 bytes long). +const MAX_ADDITIONAL_DATA: usize = 2; + #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] pub struct ReceiptLeafWires where @@ -47,12 +69,16 @@ where pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, /// The index of this receipt in the block pub index: Target, - /// The offset of the status of the transaction in the RLP encoded receipt node. - pub status_offset: Target, /// The offsets of the relevant logs inside the node pub relevant_logs_offset: VectorWire, /// The key in the MPT Trie pub mpt_key: MPTKeyWire, + /// The column ID for the transaction index + pub tx_index_column_id: Target, + /// The column ID for the log number in the receipt + pub log_number_column_id: Target, + /// The gas used column ID + pub gas_used_column_id: Target, } /// Contains all the information for an [`Event`] in rlp form @@ -65,13 +91,13 @@ pub struct EventWires { /// Byte offset for the address from the beginning of a Log add_rel_offset: Target, /// Packed event signature, - event_signature: Array, + event_signature: Array, /// Byte offset from the start of the log to event signature sig_rel_offset: Target, /// The topics for this Log - topics: [LogColumn; 3], + topics: [LogColumn; MAX_TOPICS], /// The extra data stored by this Log - data: [LogColumn; 2], + data: [LogColumn; MAX_ADDITIONAL_DATA], } /// Contains all the information for a [`Log`] in rlp form @@ -85,59 +111,47 @@ pub struct LogColumn { } impl LogColumn { - /// Convert to an array for metadata digest - pub fn to_array(self) -> [Target; 3] { - [self.column_id, self.rel_byte_offset, self.len] - } - /// Assigns a log colum from a [`LogDataInfo`] - pub fn assign(&self, pw: &mut PartialWitness, data: LogDataInfo) { - pw.set_target(self.column_id, F::from_canonical_usize(data.column_id)); + pub fn assign(&self, pw: &mut PartialWitness, info: &LogDataInfo) { + pw.set_target(self.column_id, info.column_id); pw.set_target( self.rel_byte_offset, - F::from_canonical_usize(data.rel_byte_offset), + F::from_canonical_usize(info.rel_byte_offset), ); - pw.set_target(self.len, F::from_canonical_usize(data.len)); + pw.set_target(self.len, F::from_canonical_usize(info.len)); } } impl EventWires { /// Convert to an array for metadata digest pub fn to_vec(&self) -> Vec { - let topics_flat = self - .topics - .iter() - .flat_map(|t| t.to_array()) - .collect::>(); - let data_flat = self - .data - .iter() - .flat_map(|t| t.to_array()) - .collect::>(); let mut out = Vec::new(); out.push(self.size); out.extend_from_slice(&self.address.arr); out.push(self.add_rel_offset); out.extend_from_slice(&self.event_signature.arr); out.push(self.sig_rel_offset); - out.extend_from_slice(&topics_flat); - out.extend_from_slice(&data_flat); out } + #[allow(clippy::too_many_arguments)] pub fn verify_logs_and_extract_values( &self, b: &mut CBuilder, value: &VectorWire, relevant_logs_offsets: &VectorWire, + tx_index: Target, + tx_index_column_id: Target, + log_number_column_id: Target, + gas_used_column_id: Target, ) -> (Target, CurveTarget) { let t = b._true(); let one = b.one(); let two = b.two(); let zero = b.zero(); let curve_zero = b.curve_zero(); - let mut points = Vec::new(); + let mut row_points = Vec::new(); // Extract the gas used in the transaction, since the position of this can vary because it is after the key // we have to prove we extracted from the correct location. @@ -175,13 +189,13 @@ impl EventWires { let combiner = b.constant(F::from_canonical_u64(1 << 8)); - let gas_used = (0..3u64).fold(zero, |acc, i| { + let gas_used = (0..MAX_GAS_SIZE).fold(zero, |acc, i| { let access_index = b.add_const(initial_gas_index, F::from_canonical_u64(i)); let array_value = value.arr.random_access_large_array(b, access_index); // If we have extracted a value from an index in the desired range (so lte final_gas_index) we want to add it. // If access_index was strictly less than final_gas_index we need to multiply by 1 << 8 after (since the encoding is big endian) - let valid = less_than_or_equal_to(b, access_index, final_gas_index, 12); + let valid = less_than_or_equal_to_unsafe(b, access_index, final_gas_index, 12); let need_scalar = less_than(b, access_index, final_gas_index, 12); let to_add = b.select(valid, array_value, zero); @@ -192,11 +206,14 @@ impl EventWires { }); // Map the gas used to a curve point for the value digest, gas used is the first column so use one as its column id. - let gas_digest = b.map_to_curve_point(&[zero, gas_used]); + let gas_digest = b.map_to_curve_point(&[gas_used_column_id, gas_used]); + let tx_index_digest = b.map_to_curve_point(&[tx_index_column_id, tx_index]); + let initial_row_digest = b.add_curve_point(&[gas_digest, tx_index_digest]); // We also keep track of the number of real logs we process as each log forms a row in our table let mut n = zero; for (index, log_offset) in relevant_logs_offsets.arr.arr.into_iter().enumerate() { + let mut points = Vec::new(); // Extract the address bytes let address_start = b.add(log_offset, self.add_rel_offset); @@ -236,46 +253,204 @@ impl EventWires { let dummy_column = b.is_equal(log_column.column_id, zero); let selected_point = b.select_curve_point(dummy_column, curve_zero, data_digest); - let selected_point = b.select_curve_point(dummy, curve_zero, selected_point); points.push(selected_point); } // If this is a real row we record the gas used in the transaction - let gas_select = b.select_curve_point(dummy, curve_zero, gas_digest); - points.push(gas_select); + points.push(initial_row_digest); // We also keep track of which log this is in the receipt to avoid having identical rows in the table in the case // that the event we are tracking can be emitted multiple times in the same transaction but has no topics or data. let log_number = b.constant(F::from_canonical_usize(index + 1)); - let log_no_digest = b.map_to_curve_point(&[one, log_number]); - let log_no_select = b.select_curve_point(dummy, curve_zero, log_no_digest); - points.push(log_no_select); + let log_no_digest = b.map_to_curve_point(&[log_number_column_id, log_number]); + points.push(log_no_digest); let increment = b.select(dummy, zero, one); n = b.add(n, increment); + let row_point_sum = b.add_curve_point(&points); + let sum_digest = b.map_to_curve_point(&row_point_sum.to_targets()); + let point_to_add = b.select_curve_point(dummy, curve_zero, sum_digest); + row_points.push(point_to_add); } - (n, b.add_curve_point(&points)) + (n, b.add_curve_point(&row_points)) } } -/// Circuit to prove the correct derivation of the MPT key from a simple slot +/// Circuit to prove a transaction receipt contains logs relating to a specific event. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct ReceiptLeafCircuit { - pub(crate) info: ReceiptProofInfo, + /// This is the RLP encoded leaf node in the Receipt Trie. + pub node: Vec, + /// The transaction index, telling us where the receipt is in the block. The RLP encoding of the index + /// is also the key used in the Receipt Trie. + pub tx_index: u64, + /// The size of the node in bytes + pub size: usize, + /// The address of the contract that emits the log + pub address: Address, + /// The offset of the address in the rlp encoded log + pub rel_add_offset: usize, + /// The event signature hash + pub event_signature: [u8; HASH_LEN], + /// The offset of the event signatur ein the rlp encoded log + pub sig_rel_offset: usize, + /// The other topics information + pub topics: [LogDataInfo; MAX_TOPICS], + /// Any additional data that we will extract from the log + pub data: [LogDataInfo; MAX_ADDITIONAL_DATA], + /// This is the offsets in the node to the start of the logs that relate to `event_info` + pub relevant_logs_offset: Vec, +} + +/// Contains all the information for data contained in an [`Event`] +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] +pub struct LogDataInfo { + /// The column id of this piece of info + pub column_id: GFp, + /// The byte offset from the beggining of the log to this target + pub rel_byte_offset: usize, + /// The length of this piece of data + pub len: usize, } impl ReceiptLeafCircuit where [(); PAD_LEN(NODE_LEN)]:, { + /// Create a new [`ReceiptLeafCircuit`] from a [`ReceiptProofInfo`] and a [`ReceiptQuery`] + pub fn new( + proof_info: &ReceiptProofInfo, + query: &ReceiptQuery, + ) -> Result { + // Since the compact encoding of the key is stored first plus an additional list header and + // then the first element in the receipt body is the transaction type we calculate the offset to that point + + let last_node = proof_info + .mpt_proof + .last() + .ok_or(anyhow!("Could not get last node in receipt trie proof"))?; + + // Convert to Rlp form so we can use provided methods. + let node_rlp = rlp::Rlp::new(last_node); + + // The actual receipt data is item 1 in the list + let (receipt_rlp, receipt_off) = node_rlp.at_with_offset(1)?; + // The rlp encoded Receipt is not a list but a string that is formed of the `tx_type` followed by the remaining receipt + // data rlp encoded as a list. We retrieve the payload info so that we can work out relevant offsets later. + let receipt_str_payload = receipt_rlp.payload_info()?; + + // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` + let receipt_list = rlp::Rlp::new(&receipt_rlp.data()?[1..]); + + // The logs themselves start are the item at index 3 in this list + let (logs_rlp, logs_off) = receipt_list.at_with_offset(3)?; + + // We calculate the offset the that the logs are at from the start of the node + let logs_offset = receipt_off + receipt_str_payload.header_len + 1 + logs_off; + + // Now we produce an iterator over the logs with each logs offset. + let relevant_logs_offset = iter::successors(Some(0usize), |i| Some(i + 1)) + .map_while(|i| logs_rlp.at_with_offset(i).ok()) + .filter_map(|(log_rlp, log_off)| { + let mut bytes = log_rlp.as_raw(); + let log = Log::decode(&mut bytes).ok()?; + + if log.address == query.contract + && log + .data + .topics() + .contains(&B256::from(query.event.event_signature)) + { + println!("relevant offset: {}", logs_offset + log_off); + Some(logs_offset + log_off) + } else { + Some(0usize) + } + }) + .take(MAX_LOGS_PER_TX) + .collect::>(); + + let EventLogInfo:: { + size, + address, + add_rel_offset, + event_signature, + sig_rel_offset, + topics, + data, + } = query.event; + + // We need a fixed number of topics for the circuit so we use dummies to pad to the correct length. + let mut final_topics = [LogDataInfo::default(); MAX_TOPICS]; + + final_topics.iter_mut().enumerate().for_each(|(j, topic)| { + if j < NO_TOPICS { + let input = [ + address.as_slice(), + event_signature.as_slice(), + TOPIC_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let column_id = H::hash_no_pad(&input).elements[0]; + *topic = LogDataInfo { + column_id, + rel_byte_offset: topics[j], + len: TOPICS_SIZE, + }; + } + }); + + // We need a fixed number of pieces of data for the circuit so we use dummies to pad to the correct length. + let mut final_data = [LogDataInfo::default(); MAX_ADDITIONAL_DATA]; + final_data.iter_mut().enumerate().for_each(|(j, d)| { + if j < MAX_DATA { + let input = [ + address.as_slice(), + event_signature.as_slice(), + DATA_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let column_id = H::hash_no_pad(&input).elements[0]; + *d = LogDataInfo { + column_id, + rel_byte_offset: data[j], + len: TOPICS_SIZE, + }; + }; + }); + + Ok(Self { + node: last_node.clone(), + tx_index: proof_info.tx_index, + size, + address, + rel_add_offset: add_rel_offset, + event_signature, + sig_rel_offset, + topics: final_topics, + data: final_data, + relevant_logs_offset, + }) + } + pub fn build(b: &mut CBuilder) -> ReceiptLeafWires { // Build the event wires let event_wires = Self::build_event_wires(b); + let zero = b.zero(); + let curve_zero = b.curve_zero(); // Add targets for the data specific to this receipt let index = b.add_virtual_target(); - let status_offset = b.add_virtual_target(); + let relevant_logs_offset = VectorWire::::new(b); let mpt_key = MPTKeyWire::new(b); @@ -285,16 +460,46 @@ where let node = wires.node; let root = wires.root; + // Add targets for the column ids for tx index, log number and gas used + let tx_index_column_id = b.add_virtual_target(); + let log_number_column_id = b.add_virtual_target(); + let gas_used_column_id = b.add_virtual_target(); // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for - let (n, mut dv) = - event_wires.verify_logs_and_extract_values::(b, &node, &relevant_logs_offset); + let (n, dv) = event_wires.verify_logs_and_extract_values::( + b, + &node, + &relevant_logs_offset, + index, + tx_index_column_id, + log_number_column_id, + gas_used_column_id, + ); + + let mut core_metadata = event_wires.to_vec(); + core_metadata.push(tx_index_column_id); + core_metadata.push(log_number_column_id); + core_metadata.push(gas_used_column_id); - let value_id = b.map_to_curve_point(&[index]); + let initial_dm = b.map_to_curve_point(&core_metadata); + + let mut meta_data_points = vec![initial_dm]; + + for topic in event_wires.topics.iter() { + let is_id_zero = b.is_equal(topic.column_id, zero); + let column_id_digest = b.map_one_to_curve_point(topic.column_id); + let selected = b.select_curve_point(is_id_zero, curve_zero, column_id_digest); + meta_data_points.push(selected); + } - dv = b.add_curve_point(&[value_id, dv]); + for data in event_wires.data.iter() { + let is_id_zero = b.is_equal(data.column_id, zero); + let column_id_digest = b.map_one_to_curve_point(data.column_id); + let selected = b.select_curve_point(is_id_zero, curve_zero, column_id_digest); + meta_data_points.push(selected); + } - let dm = b.map_to_curve_point(&event_wires.to_vec()); + let dm = b.add_curve_point(&meta_data_points); // Register the public inputs PublicInputsArgs { @@ -311,9 +516,11 @@ where node, root, index, - status_offset, relevant_logs_offset, mpt_key, + tx_index_column_id, + log_number_column_id, + gas_used_column_id, } } @@ -364,84 +571,103 @@ where pub fn assign(&self, pw: &mut PartialWitness, wires: &ReceiptLeafWires) { self.assign_event_wires(pw, &wires.event); - let node = self - .info - .mpt_proof - .last() - .expect("Receipt MPT proof had no nodes"); let pad_node = - Vector::::from_vec(node).expect("invalid node given"); + Vector::::from_vec(&self.node).expect("invalid node given"); wires.node.assign(pw, &pad_node); KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::assign( pw, &wires.root, &InputData::Assigned(&pad_node), ); - pw.set_target(wires.index, GFp::from_canonical_u64(self.info.tx_index)); - pw.set_target( - wires.status_offset, - GFp::from_canonical_usize(self.info.status_offset), - ); + pw.set_target(wires.index, GFp::from_canonical_u64(self.tx_index)); let relevant_logs_vector = - Vector::::from_vec(&self.info.relevant_logs_offset) + Vector::::from_vec(&self.relevant_logs_offset) .expect("Could not assign relevant logs offsets"); wires.relevant_logs_offset.assign(pw, &relevant_logs_vector); - let key_encoded = self.info.tx_index.rlp_bytes(); + let key_encoded = self.tx_index.rlp_bytes(); let key_nibbles: [u8; MAX_KEY_NIBBLE_LEN] = key_encoded .iter() .flat_map(|byte| [byte / 16, byte % 16]) .chain(iter::repeat(0u8)) - .take(64) + .take(MAX_KEY_NIBBLE_LEN) .collect::>() .try_into() .expect("Couldn't create mpt key with correct length"); - wires.mpt_key.assign(pw, &key_nibbles, self.info.index_size); + wires.mpt_key.assign(pw, &key_nibbles, key_encoded.len()); + + // Work out the column ids for tx_index, log_number and gas_used + let tx_index_input = [ + self.address.as_slice(), + self.event_signature.as_slice(), + TX_INDEX_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let tx_index_column_id = H::hash_no_pad(&tx_index_input).elements[0]; + + let log_number_input = [ + self.address.as_slice(), + self.event_signature.as_slice(), + LOG_NUMBER_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let log_number_column_id = H::hash_no_pad(&log_number_input).elements[0]; + + let gas_used_input = [ + self.address.as_slice(), + self.event_signature.as_slice(), + GAS_USED_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; + + pw.set_target(wires.tx_index_column_id, tx_index_column_id); + pw.set_target(wires.log_number_column_id, log_number_column_id); + pw.set_target(wires.gas_used_column_id, gas_used_column_id); } pub fn assign_event_wires(&self, pw: &mut PartialWitness, wires: &EventWires) { - let EventLogInfo { - size, - address, - add_rel_offset, - event_signature, - sig_rel_offset, - topics, - data, - } = self.info.event_log_info; - - pw.set_target(wires.size, F::from_canonical_usize(size)); + pw.set_target(wires.size, F::from_canonical_usize(self.size)); wires .address - .assign(pw, &address.0.map(GFp::from_canonical_u8)); + .assign(pw, &self.address.0.map(GFp::from_canonical_u8)); pw.set_target( wires.add_rel_offset, - F::from_canonical_usize(add_rel_offset), + F::from_canonical_usize(self.rel_add_offset), ); wires .event_signature - .assign(pw, &event_signature.map(GFp::from_canonical_u8)); + .assign(pw, &self.event_signature.map(GFp::from_canonical_u8)); pw.set_target( wires.sig_rel_offset, - F::from_canonical_usize(sig_rel_offset), + F::from_canonical_usize(self.sig_rel_offset), ); wires .topics .iter() - .zip(topics) - .for_each(|(topic_wire, topic)| topic_wire.assign(pw, topic)); + .zip(self.topics.iter()) + .for_each(|(topic_wire, topic_info)| topic_wire.assign(pw, topic_info)); wires .data .iter() - .zip(data) - .for_each(|(data_wire, data)| data_wire.assign(pw, data)); + .zip(self.data.iter()) + .for_each(|(data_wire, data_info)| data_wire.assign(pw, data_info)); } } @@ -473,8 +699,12 @@ impl CircuitLogicWires for ReceiptLeafWires { @@ -510,10 +740,12 @@ mod tests { fn test_leaf_circuit() { const NODE_LEN: usize = 512; - let receipt_proof_infos = generate_receipt_proofs(); - let info = receipt_proof_infos.first().unwrap().clone(); + let receipt_proof_infos = generate_receipt_test_info(); + let proofs = receipt_proof_infos.proofs(); + let info = proofs.first().unwrap(); + let query = receipt_proof_infos.query(); - let c = ReceiptLeafCircuit:: { info: info.clone() }; + let c = ReceiptLeafCircuit::::new(info, query).unwrap(); let test_circuit = TestReceiptLeafCircuit { c }; let node = info.mpt_proof.last().unwrap().clone(); @@ -529,13 +761,13 @@ mod tests { // Check value digest { - let exp_digest = compute_receipt_leaf_value_digest(&info); + let exp_digest = compute_receipt_leaf_value_digest(&proofs[0], &query.event); assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); } // Check metadata digest { - let exp_digest = compute_receipt_leaf_metadata_digest(&info.event_log_info); + let exp_digest = compute_receipt_leaf_metadata_digest(&query.event); assert_eq!(pi.metadata_digest(), exp_digest.to_weierstrass()); } } diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index 3010895b5..2d68d7fa3 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -146,6 +146,20 @@ impl StorageSlotInfo { .collect_vec() } } +/// Prefix used for making a topic column id. +const TOPIC_PREFIX: &[u8] = b"topic"; + +/// Prefix used for making a data column id. +const DATA_PREFIX: &[u8] = b"data"; + +/// Prefix for transaction index +const TX_INDEX_PREFIX: &[u8] = b"tx index"; + +/// Prefix for log number +const LOG_NUMBER_PREFIX: &[u8] = b"log number"; + +/// Prefix for gas used +const GAS_USED_PREFIX: &[u8] = b" gas used"; pub fn identifier_block_column() -> ColumnId { let inputs: Vec = BLOCK_ID_DST.to_fields(); @@ -532,41 +546,158 @@ pub fn compute_leaf_mapping_of_mappings_values_digest Digest { - let topics_flat = event - .topics - .iter() - .chain(event.data.iter()) - .flat_map(|t| [t.column_id, t.rel_byte_offset, t.len]) - .collect::>(); - +pub fn compute_receipt_leaf_metadata_digest( + event: &EventLogInfo, +) -> Digest { let mut out = Vec::new(); out.push(event.size); out.extend_from_slice(&event.address.0.map(|byte| byte as usize)); out.push(event.add_rel_offset); out.extend_from_slice(&event.event_signature.map(|byte| byte as usize)); out.push(event.sig_rel_offset); - out.extend_from_slice(&topics_flat); - let data = out + let mut field_out = out .into_iter() .map(GFp::from_canonical_usize) - .collect::>(); - map_to_curve_point(&data) + .collect::>(); + // Work out the column ids for tx_index, log_number and gas_used + let tx_index_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + TX_INDEX_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let tx_index_column_id = H::hash_no_pad(&tx_index_input).elements[0]; + + let log_number_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + LOG_NUMBER_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let log_number_column_id = H::hash_no_pad(&log_number_input).elements[0]; + + let gas_used_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + GAS_USED_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; + field_out.push(tx_index_column_id); + field_out.push(log_number_column_id); + field_out.push(gas_used_column_id); + + let core_metadata = map_to_curve_point(&field_out); + + let topic_digests = event + .topics + .iter() + .enumerate() + .map(|(j, _)| { + let input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + TOPIC_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let column_id = H::hash_no_pad(&input).elements[0]; + map_to_curve_point(&[column_id]) + }) + .collect::>(); + + let data_digests = event + .data + .iter() + .enumerate() + .map(|(j, _)| { + let input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + DATA_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let column_id = H::hash_no_pad(&input).elements[0]; + map_to_curve_point(&[column_id]) + }) + .collect::>(); + + iter::once(core_metadata) + .chain(topic_digests) + .chain(data_digests) + .fold(Digest::NEUTRAL, |acc, p| acc + p) } /// Calculate `value_digest` for receipt leaf. -pub fn compute_receipt_leaf_value_digest(receipt_proof_info: &ReceiptProofInfo) -> Digest { +pub fn compute_receipt_leaf_value_digest( + receipt_proof_info: &ReceiptProofInfo, + event: &EventLogInfo, +) -> Digest { let receipt = receipt_proof_info.to_receipt().unwrap(); let gas_used = receipt.cumulative_gas_used(); // Only use events that we are indexing - let address = receipt_proof_info.event_log_info.address; - let sig = receipt_proof_info.event_log_info.event_signature; - - let index_digest = map_to_curve_point(&[GFp::from_canonical_u64(receipt_proof_info.tx_index)]); - - let gas_digest = map_to_curve_point(&[GFp::ZERO, GFp::from_noncanonical_u128(gas_used)]); + let address = event.address; + let sig = event.event_signature; + + // Work out the column ids for tx_index, log_number and gas_used + let tx_index_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + TX_INDEX_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let tx_index_column_id = H::hash_no_pad(&tx_index_input).elements[0]; + + let log_number_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + LOG_NUMBER_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let log_number_column_id = H::hash_no_pad(&log_number_input).elements[0]; + + let gas_used_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + GAS_USED_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; + + let index_digest = map_to_curve_point(&[ + tx_index_column_id, + GFp::from_canonical_u64(receipt_proof_info.tx_index), + ]); + + let gas_digest = + map_to_curve_point(&[gas_used_column_id, GFp::from_noncanonical_u128(gas_used)]); let mut n = 0; receipt .logs() @@ -579,32 +710,60 @@ pub fn compute_receipt_leaf_value_digest(receipt_proof_info: &ReceiptProofInfo) if log_address == address && topics[0].0 == sig { n += 1; - let topics_field = topics + let topics_value_digest = topics .iter() + .enumerate() .skip(1) - .map(|fixed| fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields()) + .map(|(j, fixed)| { + let packed = fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields(); + let input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + TOPIC_PREFIX, + &[j as u8], + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let mut values = vec![H::hash_no_pad(&input).elements[0]]; + values.extend_from_slice(&packed); + map_to_curve_point(&values) + }) .collect::>(); - let data_fixed_bytes = data + let data_value_digest = data .chunks(32) - .map(|chunk| chunk.pack(mp2_common::utils::Endianness::Big).to_fields()) - .take(2) + .enumerate() + .map(|(j, fixed)| { + let packed = fixed.pack(mp2_common::utils::Endianness::Big).to_fields(); + let input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + DATA_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let mut values = vec![H::hash_no_pad(&input).elements[0]]; + values.extend_from_slice(&packed); + map_to_curve_point(&values) + }) .collect::>(); - let log_no_digest = map_to_curve_point(&[GFp::ONE, GFp::from_canonical_usize(n)]); - let initial_digest = gas_digest + log_no_digest; - Some( - topics_field - .iter() - .chain(data_fixed_bytes.iter()) - .enumerate() - .fold(initial_digest, |acc, (i, fixed)| { - let mut values = vec![GFp::from_canonical_usize(i + 2)]; - values.extend_from_slice(fixed); - acc + map_to_curve_point(&values) - }), - ) + let log_no_digest = + map_to_curve_point(&[log_number_column_id, GFp::from_canonical_usize(n)]); + let initial_digest = index_digest + gas_digest + log_no_digest; + + let row_value = iter::once(initial_digest) + .chain(topics_value_digest) + .chain(data_value_digest) + .fold(Digest::NEUTRAL, |acc, p| acc + p); + + Some(map_to_curve_point(&row_value.to_fields())) } else { None } }) - .fold(index_digest, |acc, p| acc + p) + .fold(Digest::NEUTRAL, |acc, p| acc + p) } diff --git a/mp2-v1/src/values_extraction/public_inputs.rs b/mp2-v1/src/values_extraction/public_inputs.rs index 2a0c1238a..dfbaccd27 100644 --- a/mp2-v1/src/values_extraction/public_inputs.rs +++ b/mp2-v1/src/values_extraction/public_inputs.rs @@ -17,7 +17,7 @@ use plonky2_ecgfp5::{ curve::curve::WeierstrassPoint, gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, }; -use std::array; +use std::{array, fmt::Debug}; // Leaf/Extension/Branch node Public Inputs: // - `H : [8]F` packed Keccak hash of the extension node From 2e4e060814addd4f526e3270275a544f532f8a18 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 17 Dec 2024 16:43:57 +0000 Subject: [PATCH 15/47] Addressed review comments --- mp2-common/src/eth.rs | 15 ++- mp2-test/src/mpt_sequential.rs | 113 +++++++++++++++--- .../src/final_extraction/receipt_circuit.rs | 58 ++++----- mp2-v1/src/values_extraction/api.rs | 2 +- mp2-v1/src/values_extraction/leaf_receipt.rs | 20 +++- 5 files changed, 151 insertions(+), 57 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 9117866b9..e2c264ce4 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -211,9 +211,8 @@ impl EventLogInfo Result<()> { + test_receipt_query_helper::<1, 0>()?; + test_receipt_query_helper::<2, 0>()?; + test_receipt_query_helper::<3, 0>()?; + test_receipt_query_helper::<3, 1>()?; + test_receipt_query_helper::<3, 2>() + } + + fn test_receipt_query_helper() -> Result<()> { // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it - let test_info = generate_receipt_test_info(); + let test_info = generate_receipt_test_info::(); let proofs = test_info.proofs(); let query = test_info.query(); for proof in proofs.iter() { diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 1cd68a313..bd49669fa 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -9,7 +9,7 @@ use alloy::{ use eth_trie::{EthTrie, MemoryDB, Trie}; use mp2_common::eth::{ReceiptProofInfo, ReceiptQuery}; -use rand::{thread_rng, Rng}; +use rand::{distributions::uniform::SampleRange, thread_rng, Rng}; use std::sync::Arc; /// Simply the maximum number of nibbles a key can have. @@ -70,34 +70,72 @@ impl ReceiptTestInfo ReceiptTestInfo<1, 0> { +pub fn generate_receipt_test_info( +) -> ReceiptTestInfo { // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin - #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780638381f58a14610058578063d09de08a14610076578063db73227914610080575b5f80fd5b61005661008a565b005b6100606100f8565b60405161006d9190610165565b60405180910390f35b61007e6100fd565b005b610088610115565b005b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100c06100fd565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100f66100fd565b565b5f5481565b5f8081548092919061010e906101ab565b9190505550565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a261014b6100fd565b565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212202787ca0f2ea71e118bc4d1bf239cde5ec4730aeb35a404c44e6c9d587316418564736f6c634300081a0033")] + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506104ed8061001c5f395ff3fe608060405234801561000f575f80fd5b5060043610610085575f3560e01c80638381f58a116100595780638381f58a146100b1578063d09de08a146100cf578063d857c891146100d9578063db732279146100f557610085565b80623c7e56146100895780632dc347641461009357806331c1c63b1461009d578063338b538a146100a7575b5f80fd5b6100916100ff565b005b61009b61016b565b005b6100a56101e6565b005b6100af61023a565b005b6100b9610280565b6040516100c69190610377565b60405180910390f35b6100d7610285565b005b6100f360048036038101906100ee91906103be565b61029d565b005b6100fd610327565b005b60025f5461010d9190610416565b60015f5461011b9190610416565b5f547ff57f433eb9493cf4d9cb5763c12221d9b095804644d4ee006a78c72076cff94760035f5461014c9190610416565b6040516101599190610377565b60405180910390a4610169610285565b565b60025f546101799190610416565b60015f546101879190610416565b5f547ff03d29753fbd5ac209bab88a99b396bcc25c3e72530d02c81aea4d324ab3d74260035f546101b89190610416565b60045f546101c69190610416565b6040516101d4929190610449565b60405180910390a46101e4610285565b565b60025f546101f49190610416565b60015f546102029190610416565b5f547f1d18de2cd8798a1c29b9255930c807eb6c84ae0acb2219acbb11e0f65cf813e960405160405180910390a4610238610285565b565b60015f546102489190610416565b5f547fa6baf14d8f11d7a4497089bb3fca0adfc34837cfb1f4aa370634d36ef0305b4660405160405180910390a361027e610285565b565b5f5481565b5f8081548092919061029690610470565b9190505550565b5f81036102b9576102ac610327565b6102b4610327565b610324565b600181036102d6576102c961023a565b6102d161023a565b610323565b600281036102f3576102e66101e6565b6102ee6101e6565b610322565b60038103610310576103036100ff565b61030b6100ff565b610321565b61031861016b565b61032061016b565b5b5b5b5b50565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a261035d610285565b565b5f819050919050565b6103718161035f565b82525050565b5f60208201905061038a5f830184610368565b92915050565b5f80fd5b61039d8161035f565b81146103a7575f80fd5b50565b5f813590506103b881610394565b92915050565b5f602082840312156103d3576103d2610390565b5b5f6103e0848285016103aa565b91505092915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6104208261035f565b915061042b8361035f565b9250828201905080821115610443576104426103e9565b5b92915050565b5f60408201905061045c5f830185610368565b6104696020830184610368565b9392505050565b5f61047a8261035f565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036104ac576104ab6103e9565b5b60018201905091905056fea2646970667358221220f5d14aba97b2168309da4d73f65e2c98d90f3c697213c6e51c2520cee4816aea64736f6c634300081a0033")] contract EventEmitter { uint256 public number; event testEvent(uint256 indexed num); + event twoIndexed(uint256 indexed num, uint256 indexed numTwo); + event threeIndexed(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree); + event oneData(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree, uint256 numFour); + event twoData(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree, uint256 numFour, uint256 numFive); + function testEmit() public { emit testEvent(number); increment(); } - function twoEmits() public { - emit testEvent(number); + function testTwoIndexed() public { + emit twoIndexed(number, number + 1); increment(); - emit testEvent(number); + } + + function testThreeIndexed() public { + emit threeIndexed(number, number + 1, number + 2); + increment(); + } + + function testOneData() public { + emit oneData(number, number + 1, number + 2, number + 3); increment(); } + function testTwoData() public { + emit twoData(number, number + 1, number + 2, number + 3, number + 4); + increment(); + } + + function twoEmits(uint256 flag) public { + if (flag == 0) { + testEmit(); + testEmit(); + } else if (flag == 1) { + testTwoIndexed(); + testTwoIndexed(); + } else if (flag == 2) { + testThreeIndexed(); + testThreeIndexed(); + } else if (flag == 3) { + testOneData(); + testOneData(); + } else { + testTwoData(); + testTwoData(); + } + } + function increment() public { number++; } } - #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780637229db15146100585780638381f58a14610062578063d09de08a14610080575b5f80fd5b61005661008a565b005b6100606100f8565b005b61006a610130565b6040516100779190610165565b60405180910390f35b610088610135565b005b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100c0610135565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100f6610135565b565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a261012e610135565b565b5f5481565b5f80815480929190610146906101ab565b9190505550565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212203b7602644bfff2df89c2fe9498cd533326876859a0df7b96ac10be1fdc09c3a064736f6c634300081a0033")] + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780637229db15146100585780638381f58a14610062578063d09de08a14610080575b5f80fd5b61005661008a565b005b6100606100f8565b005b61006a610130565b6040516100779190610165565b60405180910390f35b610088610135565b005b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100c0610135565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100f6610135565b565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a261012e610135565b565b5f5481565b5f80815480929190610146906101ab565b9190505550565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea2646970667358221220aacdd709f2f5e659587a60249419a4459e23d06c85d31d2c0b55c3fafbf3a2cb64736f6c634300081a0033")] contract OtherEmitter { uint256 public number; @@ -140,24 +178,36 @@ pub fn generate_receipt_test_info() -> ReceiptTestInfo<1, 0> { // Disable auto mining so we can ensure that all the transaction appear in the same block rpc.anvil_set_auto_mine(false).await.unwrap(); - + rpc.anvil_auto_impersonate_account(true).await.unwrap(); // Send a bunch of transactions, some of which are related to the event we are testing for. let mut pending_tx_builders = vec![]; + let mut rng = rand::thread_rng(); for i in 0..25 { - let tx_req = match i % 4 { + let random = match (0..5).sample_single(&mut rng) { 0 => event_contract.testEmit().into_transaction_request(), - 1 => event_contract.twoEmits().into_transaction_request(), + 1 => event_contract.testTwoIndexed().into_transaction_request(), + 2 => event_contract.testThreeIndexed().into_transaction_request(), + 3 => event_contract.testOneData().into_transaction_request(), + 4 => event_contract.testTwoData().into_transaction_request(), + _ => unreachable!(), + }; + let tx_req = match i % 4 { + 0 => random, + 1 => event_contract + .twoEmits(U256::from((0..5).sample_single(&mut rng))) + .into_transaction_request(), 2 => other_contract.otherEmit().into_transaction_request(), 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), }; let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); rpc.anvil_set_balance(sender_address, funding) .await .unwrap(); - rpc.anvil_auto_impersonate_account(true).await.unwrap(); + let new_req = tx_req.with_from(sender_address); let tx_req_final = rpc .fill(new_req) @@ -169,6 +219,32 @@ pub fn generate_receipt_test_info() -> ReceiptTestInfo<1, 0> { pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); } + // Finally we guarantee at least one of the event we are going to query for + let queried_event_req = match (NO_TOPICS, MAX_DATA) { + (1, 0) => event_contract.testEmit().into_transaction_request(), + (2, 0) => event_contract.testTwoIndexed().into_transaction_request(), + (3, 0) => event_contract.testThreeIndexed().into_transaction_request(), + (3, 1) => event_contract.testOneData().into_transaction_request(), + (3, 2) => event_contract.testTwoData().into_transaction_request(), + _ => unreachable!(), + }; + + let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); + rpc.anvil_set_balance(sender_address, funding) + .await + .unwrap(); + rpc.anvil_auto_impersonate_account(true).await.unwrap(); + let new_req = queried_event_req.with_from(sender_address); + let tx_req_final = rpc + .fill(new_req) + .await + .unwrap() + .as_builder() + .unwrap() + .clone(); + pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); + // Mine a block, it should include all the transactions created above. rpc.anvil_mine(Some(U256::from(1u8)), None).await.unwrap(); @@ -183,10 +259,19 @@ pub fn generate_receipt_test_info() -> ReceiptTestInfo<1, 0> { // We want to get the event signature so we can make a ReceiptQuery let all_events = EventEmitter::abi::events(); - let events = all_events.get("testEvent").unwrap(); + let events = match (NO_TOPICS, MAX_DATA) { + (1, 0) => all_events.get("testEvent").unwrap(), + (2, 0) => all_events.get("twoIndexed").unwrap(), + (3, 0) => all_events.get("threeIndexed").unwrap(), + (3, 1) => all_events.get("oneData").unwrap(), + (3, 2) => all_events.get("twoData").unwrap(), + _ => panic!(), + }; - let receipt_query = - ReceiptQuery::<1, 0>::new(*event_contract.address(), &events[0].signature()); + let receipt_query = ReceiptQuery::::new( + *event_contract.address(), + &events[0].signature(), + ); let proofs = receipt_query .query_receipt_proofs(rpc.root(), BlockNumberOrTag::Number(block_number)) diff --git a/mp2-v1/src/final_extraction/receipt_circuit.rs b/mp2-v1/src/final_extraction/receipt_circuit.rs index bce6854eb..56a540370 100644 --- a/mp2-v1/src/final_extraction/receipt_circuit.rs +++ b/mp2-v1/src/final_extraction/receipt_circuit.rs @@ -58,7 +58,7 @@ impl ReceiptExtractionCircuit { b: &mut CircuitBuilder, block_pi: &[Target], value_pi: &[Target], - ) -> ReceiptExtractionWires { + ) { // TODO: homogeinize the public inputs structs let block_pi = block_extraction::public_inputs::PublicInputs::::from_slice(block_pi); @@ -85,14 +85,6 @@ impl ReceiptExtractionCircuit { &[b._false().target], ) .register_args(b); - - ReceiptExtractionWires { - dm: value_pi.metadata_digest_target(), - dv: value_pi.values_digest_target(), - bh: block_pi.block_hash_raw().try_into().unwrap(), // safe to unwrap as we give as input the slice of the expected length - prev_bh: block_pi.prev_block_hash_raw().try_into().unwrap(), // safe to unwrap as we give as input the slice of the expected length - bn: block_pi.block_number(), - } } } @@ -102,8 +94,6 @@ impl ReceiptExtractionCircuit { pub(crate) struct ReceiptRecursiveWires { /// Wires containing the block and value proof verification: ReceiptCircuitProofWires, - /// Wires information to check that the value corresponds to the block - consistency: ReceiptExtractionWires, } impl CircuitLogicWires for ReceiptRecursiveWires { @@ -120,15 +110,12 @@ impl CircuitLogicWires for ReceiptRecursiveWires { ) -> Self { // value proof for table a and value proof for table b = 2 let verification = ReceiptCircuitProofInputs::build(builder, &builder_parameters); - let consistency = ReceiptExtractionCircuit::build( + ReceiptExtractionCircuit::build( builder, verification.get_block_public_inputs(), verification.get_value_public_inputs(), ); - Self { - verification, - consistency, - } + Self { verification } } fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> anyhow::Result<()> { @@ -231,14 +218,13 @@ impl ReceiptCircuitProofWires { #[cfg(test)] pub(crate) mod test { - use std::iter::once; use crate::final_extraction::{base_circuit::test::ProofsPi, PublicInputs}; use super::*; use alloy::primitives::U256; use anyhow::Result; - use itertools::Itertools; + use mp2_common::{ keccak::PACKED_HASH_LEN, utils::{Endianness, Packer, ToFields}, @@ -269,7 +255,7 @@ pub(crate) mod test { type Wires = TestReceiptWires; fn build(c: &mut CircuitBuilder) -> Self::Wires { let proofs_pi = ReceiptsProofsPiTarget::new(c); - let _ = ReceiptExtractionCircuit::build(c, &proofs_pi.blocks_pi, &proofs_pi.values_pi); + ReceiptExtractionCircuit::build(c, &proofs_pi.blocks_pi, &proofs_pi.values_pi); TestReceiptWires { pis: proofs_pi } } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { @@ -316,16 +302,21 @@ pub(crate) mod test { let (k, t) = original.mpt_key_info(); let new_value_digest = Point::rand(); let new_metadata_digest = Point::rand(); - let new_values_pi = block_pi - .receipt_root_raw() - .iter() - .chain(k.iter()) - .chain(once(&t)) - .chain(new_value_digest.to_weierstrass().to_fields().iter()) - .chain(new_metadata_digest.to_weierstrass().to_fields().iter()) - .chain(once(&original.n())) - .cloned() - .collect_vec(); + let new_values_pi = new_extraction_public_inputs( + &block_pi + .receipt_root_raw() + .iter() + .map(|byte| byte.to_canonical_u64() as u32) + .collect::>(), + &k.iter() + .map(|byte| byte.to_canonical_u64() as u8) + .collect::>(), + t.to_canonical_u64() as usize, + &new_value_digest.to_weierstrass(), + &new_metadata_digest.to_weierstrass(), + original.n().to_canonical_u64() as usize, + ); + Self { blocks_pi: base_info.blocks_pi.clone(), values_pi: new_values_pi, @@ -340,10 +331,6 @@ pub(crate) mod test { values_extraction::PublicInputs::new(&self.values_pi) } - /// check public inputs of the proof match with the ones in `self`. - /// `compound_type` is a flag to specify whether `proof` is generated for a simple or compound type - /// `length_dm` is the metadata digest of a length proof, which is provided only for proofs related - /// to a compound type with a length slot pub(crate) fn check_proof_public_inputs(&self, proof: &ProofWithPublicInputs) { let proof_pis = PublicInputs::from_slice(&proof.public_inputs); let block_pi = self.block_inputs(); @@ -410,8 +397,9 @@ pub(crate) mod test { #[test] fn final_simple_value() -> Result<()> { let pis = ReceiptsProofsPi::random(); - let test_circuit = TestReceiptCircuit { pis }; - run_circuit::(test_circuit); + let test_circuit = TestReceiptCircuit { pis: pis.clone() }; + let proof = run_circuit::(test_circuit); + pis.check_proof_public_inputs(&proof); Ok(()) } } diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index d92a0d498..8639474eb 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -907,7 +907,7 @@ mod tests { } #[test] fn test_receipt_api() { - let receipt_proof_infos = generate_receipt_test_info(); + let receipt_proof_infos = generate_receipt_test_info::<1, 0>(); let receipt_proofs = receipt_proof_infos.proofs(); let query = receipt_proof_infos.query(); // We check that we have enough receipts and then take the second and third info diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index ef1bfcf04..93a3ca983 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -362,7 +362,6 @@ where .topics() .contains(&B256::from(query.event.event_signature)) { - println!("relevant offset: {}", logs_offset + log_off); Some(logs_offset + log_off) } else { Some(0usize) @@ -716,6 +715,7 @@ mod tests { circuit::{run_circuit, UserCircuit}, mpt_sequential::generate_receipt_test_info, }; + #[derive(Clone, Debug)] struct TestReceiptLeafCircuit { c: ReceiptLeafCircuit, @@ -739,13 +739,27 @@ mod tests { #[test] fn test_leaf_circuit() { const NODE_LEN: usize = 512; + test_leaf_circuit_helper::<1, 0, NODE_LEN>(); + test_leaf_circuit_helper::<2, 0, NODE_LEN>(); + test_leaf_circuit_helper::<3, 0, NODE_LEN>(); + test_leaf_circuit_helper::<3, 1, NODE_LEN>(); + test_leaf_circuit_helper::<3, 2, NODE_LEN>(); + } - let receipt_proof_infos = generate_receipt_test_info(); + fn test_leaf_circuit_helper< + const NO_TOPICS: usize, + const MAX_DATA: usize, + const NODE_LEN: usize, + >() + where + [(); PAD_LEN(NODE_LEN)]:, + { + let receipt_proof_infos = generate_receipt_test_info::(); let proofs = receipt_proof_infos.proofs(); let info = proofs.first().unwrap(); let query = receipt_proof_infos.query(); - let c = ReceiptLeafCircuit::::new(info, query).unwrap(); + let c = ReceiptLeafCircuit::::new::(info, query).unwrap(); let test_circuit = TestReceiptLeafCircuit { c }; let node = info.mpt_proof.last().unwrap().clone(); From 030dc329c85945d23acb1a23bac5ecedd633f279 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 17 Dec 2024 19:21:26 +0000 Subject: [PATCH 16/47] Fixed tests to pass CI --- mp2-test/src/mpt_sequential.rs | 62 +++++++++++--------- mp2-v1/src/values_extraction/leaf_receipt.rs | 4 +- 2 files changed, 38 insertions(+), 28 deletions(-) diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index bd49669fa..42e550623 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -191,11 +191,17 @@ pub fn generate_receipt_test_info 4 => event_contract.testTwoData().into_transaction_request(), _ => unreachable!(), }; + let random_two = match (0..5).sample_single(&mut rng) { + 0 => event_contract.testEmit().into_transaction_request(), + 1 => event_contract.testTwoIndexed().into_transaction_request(), + 2 => event_contract.testThreeIndexed().into_transaction_request(), + 3 => event_contract.testOneData().into_transaction_request(), + 4 => event_contract.testTwoData().into_transaction_request(), + _ => unreachable!(), + }; let tx_req = match i % 4 { 0 => random, - 1 => event_contract - .twoEmits(U256::from((0..5).sample_single(&mut rng))) - .into_transaction_request(), + 1 => random_two, 2 => other_contract.otherEmit().into_transaction_request(), 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), @@ -219,31 +225,33 @@ pub fn generate_receipt_test_info pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); } - // Finally we guarantee at least one of the event we are going to query for - let queried_event_req = match (NO_TOPICS, MAX_DATA) { - (1, 0) => event_contract.testEmit().into_transaction_request(), - (2, 0) => event_contract.testTwoIndexed().into_transaction_request(), - (3, 0) => event_contract.testThreeIndexed().into_transaction_request(), - (3, 1) => event_contract.testOneData().into_transaction_request(), - (3, 2) => event_contract.testTwoData().into_transaction_request(), - _ => unreachable!(), - }; + // Finally we guarantee at least three of the event we are going to query for + for _ in 0..3 { + let queried_event_req = match (NO_TOPICS, MAX_DATA) { + (1, 0) => event_contract.testEmit().into_transaction_request(), + (2, 0) => event_contract.testTwoIndexed().into_transaction_request(), + (3, 0) => event_contract.testThreeIndexed().into_transaction_request(), + (3, 1) => event_contract.testOneData().into_transaction_request(), + (3, 2) => event_contract.testTwoData().into_transaction_request(), + _ => unreachable!(), + }; - let sender_address = Address::random(); - let funding = U256::from(1e18 as u64); - rpc.anvil_set_balance(sender_address, funding) - .await - .unwrap(); - rpc.anvil_auto_impersonate_account(true).await.unwrap(); - let new_req = queried_event_req.with_from(sender_address); - let tx_req_final = rpc - .fill(new_req) - .await - .unwrap() - .as_builder() - .unwrap() - .clone(); - pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); + let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); + rpc.anvil_set_balance(sender_address, funding) + .await + .unwrap(); + rpc.anvil_auto_impersonate_account(true).await.unwrap(); + let new_req = queried_event_req.with_from(sender_address); + let tx_req_final = rpc + .fill(new_req) + .await + .unwrap() + .as_builder() + .unwrap() + .clone(); + pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); + } // Mine a block, it should include all the transactions created above. rpc.anvil_mine(Some(U256::from(1u8)), None).await.unwrap(); diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index 93a3ca983..cef724c25 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -41,7 +41,7 @@ use rlp::Encodable; use serde::{Deserialize, Serialize}; use std::{array::from_fn, iter}; /// Maximum number of logs per transaction we can process -const MAX_LOGS_PER_TX: usize = 2; +const MAX_LOGS_PER_TX: usize = 1; /// The number of bytes that `gas_used` could take up in the receipt. /// We set a max of 3 here because this would be over half the gas in the block for Ethereum. @@ -243,6 +243,7 @@ impl EventWires { // Pack the data and get the digest let packed_data = data_bytes.arr.pack(b, Endianness::Big); + let data_digest = b.map_to_curve_point( &std::iter::once(log_column.column_id) .chain(packed_data) @@ -764,6 +765,7 @@ mod tests { let node = info.mpt_proof.last().unwrap().clone(); + assert!(node.len() <= NODE_LEN); let proof = run_circuit::(test_circuit); let pi = PublicInputs::new(&proof.public_inputs); From 551495c07ee9bf0fc4e2772ba14494f9b5baceba Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 18 Dec 2024 12:11:27 +0000 Subject: [PATCH 17/47] Resolves CRY-22 --- mp2-v1/test-contracts/src/Event.sol | 105 + mp2-v1/tests/common/bindings/eventemitter.rs | 4197 ++++++++++++++++++ mp2-v1/tests/common/bindings/mod.rs | 1 + mp2-v1/tests/common/bindings/simple.rs | 192 +- 4 files changed, 4432 insertions(+), 63 deletions(-) create mode 100644 mp2-v1/test-contracts/src/Event.sol create mode 100644 mp2-v1/tests/common/bindings/eventemitter.rs diff --git a/mp2-v1/test-contracts/src/Event.sol b/mp2-v1/test-contracts/src/Event.sol new file mode 100644 index 000000000..683030020 --- /dev/null +++ b/mp2-v1/test-contracts/src/Event.sol @@ -0,0 +1,105 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.13; + +contract EventEmitter { + uint256 public number; + event noIndexed(); + event oneIndexed(uint256 indexed num); + event twoIndexed(uint256 indexed num, uint256 indexed numTwo); + event threeIndexed( + uint256 indexed num, + uint256 indexed numTwo, + uint256 indexed numThree + ); + event oneData( + uint256 indexed num, + uint256 indexed numTwo, + uint256 indexed numThree, + uint256 numFour + ); + event twoData( + uint256 indexed num, + uint256 indexed numTwo, + uint256 indexed numThree, + uint256 numFour, + uint256 numFive + ); + event noIOneD(uint256 num); + event noITwoD(uint256 num, uint256 numTwo); + event oneIOneD(uint256 indexed num, uint256 numTwo); + event oneITwoD(uint256 indexed num, uint256 numTwo, uint256 numThree); + event twoIOneD( + uint256 indexed num, + uint256 indexed numTwo, + uint256 numThree + ); + event twoITwoD( + uint256 indexed num, + uint256 indexed numTwo, + uint256 numThree, + uint256 numFour + ); + + function testNoIndexed() public { + emit noIndexed(); + } + + function testOneIndexed() public { + emit oneIndexed(number); + increment(); + } + + function testTwoIndexed() public { + emit twoIndexed(number, number + 1); + increment(); + } + + function testThreeIndexed() public { + emit threeIndexed(number, number + 1, number + 2); + increment(); + } + + function testOneData() public { + emit oneData(number, number + 1, number + 2, number + 3); + increment(); + } + + function testTwoData() public { + emit twoData(number, number + 1, number + 2, number + 3, number + 4); + increment(); + } + + function testNoIOneD() public { + emit noIOneD(number); + increment(); + } + + function testNoITwoD() public { + emit noITwoD(number, number + 1); + increment(); + } + + function testOneIOneD() public { + emit oneIOneD(number, number + 1); + increment(); + } + + function testOneITwoD() public { + emit oneITwoD(number, number + 1, number + 2); + increment(); + } + + function testTwoIOneD() public { + emit twoIOneD(number, number + 1, number + 2); + increment(); + } + + function testTwoITwoD() public { + emit twoITwoD(number, number + 1, number + 2, number + 3); + increment(); + } + + function increment() public { + number++; + } +} diff --git a/mp2-v1/tests/common/bindings/eventemitter.rs b/mp2-v1/tests/common/bindings/eventemitter.rs new file mode 100644 index 000000000..7843a3356 --- /dev/null +++ b/mp2-v1/tests/common/bindings/eventemitter.rs @@ -0,0 +1,4197 @@ +/** + +Generated by the following Solidity interface... +```solidity +interface EventEmitter { + event noIOneD(uint256 num); + event noITwoD(uint256 num, uint256 numTwo); + event noIndexed(); + event oneData(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree, uint256 numFour); + event oneIOneD(uint256 indexed num, uint256 numTwo); + event oneITwoD(uint256 indexed num, uint256 numTwo, uint256 numThree); + event oneIndexed(uint256 indexed num); + event threeIndexed(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree); + event twoData(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree, uint256 numFour, uint256 numFive); + event twoIOneD(uint256 indexed num, uint256 indexed numTwo, uint256 numThree); + event twoITwoD(uint256 indexed num, uint256 indexed numTwo, uint256 numThree, uint256 numFour); + event twoIndexed(uint256 indexed num, uint256 indexed numTwo); + + function increment() external; + function number() external view returns (uint256); + function testNoIOneD() external; + function testNoITwoD() external; + function testNoIndexed() external; + function testOneData() external; + function testOneIOneD() external; + function testOneITwoD() external; + function testOneIndexed() external; + function testThreeIndexed() external; + function testTwoData() external; + function testTwoIOneD() external; + function testTwoITwoD() external; + function testTwoIndexed() external; +} +``` + +...which was generated by the following JSON ABI: +```json +[ + { + "type": "function", + "name": "increment", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "number", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "testNoIOneD", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testNoITwoD", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testNoIndexed", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testOneData", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testOneIOneD", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testOneITwoD", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testOneIndexed", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testThreeIndexed", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testTwoData", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testTwoIOneD", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testTwoITwoD", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "testTwoIndexed", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "event", + "name": "noIOneD", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "noITwoD", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "noIndexed", + "inputs": [], + "anonymous": false + }, + { + "type": "event", + "name": "oneData", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numThree", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numFour", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "oneIOneD", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "oneITwoD", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + }, + { + "name": "numThree", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "oneIndexed", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "threeIndexed", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numThree", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "twoData", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numThree", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numFour", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + }, + { + "name": "numFive", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "twoIOneD", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numThree", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "twoITwoD", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numThree", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + }, + { + "name": "numFour", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "twoIndexed", + "inputs": [ + { + "name": "num", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "numTwo", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + } + ], + "anonymous": false + } +] +```*/ +#[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style, + clippy::empty_structs_with_brackets +)] +pub mod EventEmitter { + use super::*; + use alloy::sol_types as alloy_sol_types; + /// The creation / init bytecode of the contract. + /// + /// ```text + ///0x608060405234801561000f575f80fd5b5061057e8061001d5f395ff3fe608060405234801561000f575f80fd5b50600436106100e3575f3560e01c806346d6a7b5116100885780638381f58a116100635780638381f58a14610139578063b1e057a914610153578063c02420001461015b578063d09de08a14610163575f80fd5b806346d6a7b51461012157806363eb70f014610129578063729d452014610131575f80fd5b806331c1c63b116100c357806331c1c63b14610101578063338b538a146101095780634282ed58146101115780634369f72814610119575f80fd5b80623c7e56146100e7578062d83b55146100f15780632dc34764146100f9575b5f80fd5b6100ef61016b565b005b6100ef6101cf565b6100ef61022b565b6100ef610295565b6100ef6102e2565b6100ef610322565b6100ef61034b565b6100ef610387565b6100ef6103d7565b6100ef61042c565b6101415f5481565b60405190815260200160405180910390f35b6100ef61045f565b6100ef6104bc565b6100ef6104ee565b5f54610178906002610517565b5f54610185906001610517565b5f547ff57f433eb9493cf4d9cb5763c12221d9b095804644d4ee006a78c72076cff9476101b3826003610517565b6040519081526020015b60405180910390a46101cd6104ee565b565b5f547fef4c88193498df237f039055d1212ac2a3b93ed8aea88c814312e50f6a32592d6101fd826001610517565b5f5461020a906002610517565b604080519283526020830191909152015b60405180910390a26101cd6104ee565b5f54610238906002610517565b5f54610245906001610517565b5f547ff03d29753fbd5ac209bab88a99b396bcc25c3e72530d02c81aea4d324ab3d742610273826003610517565b5f54610280906004610517565b604080519283526020830191909152016101bd565b5f546102a2906002610517565b5f546102af906001610517565b5f805460405190917f1d18de2cd8798a1c29b9255930c807eb6c84ae0acb2219acbb11e0f65cf813e991a46101cd6104ee565b5f546102ef906001610517565b5f805460405190917fa6baf14d8f11d7a4497089bb3fca0adfc34837cfb1f4aa370634d36ef0305b4691a36101cd6104ee565b6040517ef7c74f0533aa15e5ac7cafa9f9261d14da1e78830deba7110fbc79001ed15e905f90a1565b5f547f168718c0b1eb6bfd7b0edecea5c6fc6502737ad73a4c9f52ffa7e553c8eb9f53610379826001610517565b60405190815260200161021b565b5f547f2fa61517ddf9dc7f2f3d5ca72414a01c834d9c5bb7c336c977423c85094bba61906103b6816001610517565b604080519283526020830191909152015b60405180910390a16101cd6104ee565b5f546103e4906001610517565b5f547f3bb2d6337882faa5526cf806c9763904a90f3363590dd4386913e3fcd8a2e1d1610412826002610517565b6040519081526020015b60405180910390a36101cd6104ee565b5f805460405190917fc2809a1a2fb95d84cfdc488cdb320a144c158f8d44836c9c2d4badba082bfdfa91a26101cd6104ee565b5f5461046c906001610517565b5f547f4b92229abe204a30d7b088d8110291760934d65b3c960680ad94e05f52a8860561049a826002610517565b5f546104a7906003610517565b6040805192835260208301919091520161041c565b7f04f7fb289e51ea9996ec98e62ff4b651becfa6e53f3b850be209b69741c66f245f546040516103c791815260200190565b5f805490806104fc83610530565b9190505550565b634e487b7160e01b5f52601160045260245ffd5b8082018082111561052a5761052a610503565b92915050565b5f6001820161054157610541610503565b506001019056fea2646970667358221220b4cc2df5eed06f538a31157edfaeeee591a5719d35fb47f3b6ce5d31c1ffe2f964736f6c63430008180033 + /// ``` + #[rustfmt::skip] + #[allow(clippy::all)] + pub static BYTECODE: alloy_sol_types::private::Bytes = alloy_sol_types::private::Bytes::from_static( + b"`\x80`@R4\x80\x15a\0\x0FW_\x80\xFD[Pa\x05~\x80a\0\x1D_9_\xF3\xFE`\x80`@R4\x80\x15a\0\x0FW_\x80\xFD[P`\x046\x10a\0\xE3W_5`\xE0\x1C\x80cF\xD6\xA7\xB5\x11a\0\x88W\x80c\x83\x81\xF5\x8A\x11a\0cW\x80c\x83\x81\xF5\x8A\x14a\x019W\x80c\xB1\xE0W\xA9\x14a\x01SW\x80c\xC0$ \0\x14a\x01[W\x80c\xD0\x9D\xE0\x8A\x14a\x01cW_\x80\xFD[\x80cF\xD6\xA7\xB5\x14a\x01!W\x80cc\xEBp\xF0\x14a\x01)W\x80cr\x9DE \x14a\x011W_\x80\xFD[\x80c1\xC1\xC6;\x11a\0\xC3W\x80c1\xC1\xC6;\x14a\x01\x01W\x80c3\x8BS\x8A\x14a\x01\tW\x80cB\x82\xEDX\x14a\x01\x11W\x80cCi\xF7(\x14a\x01\x19W_\x80\xFD[\x80b<~V\x14a\0\xE7W\x80b\xD8;U\x14a\0\xF1W\x80c-\xC3Gd\x14a\0\xF9W[_\x80\xFD[a\0\xEFa\x01kV[\0[a\0\xEFa\x01\xCFV[a\0\xEFa\x02+V[a\0\xEFa\x02\x95V[a\0\xEFa\x02\xE2V[a\0\xEFa\x03\"V[a\0\xEFa\x03KV[a\0\xEFa\x03\x87V[a\0\xEFa\x03\xD7V[a\0\xEFa\x04,V[a\x01A_T\x81V[`@Q\x90\x81R` \x01`@Q\x80\x91\x03\x90\xF3[a\0\xEFa\x04_V[a\0\xEFa\x04\xBCV[a\0\xEFa\x04\xEEV[_Ta\x01x\x90`\x02a\x05\x17V[_Ta\x01\x85\x90`\x01a\x05\x17V[_T\x7F\xF5\x7FC>\xB9I<\xF4\xD9\xCBWc\xC1\"!\xD9\xB0\x95\x80FD\xD4\xEE\0jx\xC7 v\xCF\xF9Ga\x01\xB3\x82`\x03a\x05\x17V[`@Q\x90\x81R` \x01[`@Q\x80\x91\x03\x90\xA4a\x01\xCDa\x04\xEEV[V[_T\x7F\xEFL\x88\x194\x98\xDF#\x7F\x03\x90U\xD1!*\xC2\xA3\xB9>\xD8\xAE\xA8\x8C\x81C\x12\xE5\x0Fj2Y-a\x01\xFD\x82`\x01a\x05\x17V[_Ta\x02\n\x90`\x02a\x05\x17V[`@\x80Q\x92\x83R` \x83\x01\x91\x90\x91R\x01[`@Q\x80\x91\x03\x90\xA2a\x01\xCDa\x04\xEEV[_Ta\x028\x90`\x02a\x05\x17V[_Ta\x02E\x90`\x01a\x05\x17V[_T\x7F\xF0=)u?\xBDZ\xC2\t\xBA\xB8\x8A\x99\xB3\x96\xBC\xC2\\>rS\r\x02\xC8\x1A\xEAM2J\xB3\xD7Ba\x02s\x82`\x03a\x05\x17V[_Ta\x02\x80\x90`\x04a\x05\x17V[`@\x80Q\x92\x83R` \x83\x01\x91\x90\x91R\x01a\x01\xBDV[_Ta\x02\xA2\x90`\x02a\x05\x17V[_Ta\x02\xAF\x90`\x01a\x05\x17V[_\x80T`@Q\x90\x91\x7F\x1D\x18\xDE,\xD8y\x8A\x1C)\xB9%Y0\xC8\x07\xEBl\x84\xAE\n\xCB\"\x19\xAC\xBB\x11\xE0\xF6\\\xF8\x13\xE9\x91\xA4a\x01\xCDa\x04\xEEV[_Ta\x02\xEF\x90`\x01a\x05\x17V[_\x80T`@Q\x90\x91\x7F\xA6\xBA\xF1M\x8F\x11\xD7\xA4Ip\x89\xBB?\xCA\n\xDF\xC3H7\xCF\xB1\xF4\xAA7\x064\xD3n\xF00[F\x91\xA3a\x01\xCDa\x04\xEEV[`@Q~\xF7\xC7O\x053\xAA\x15\xE5\xAC|\xAF\xA9\xF9&\x1D\x14\xDA\x1Ex\x83\r\xEB\xA7\x11\x0F\xBCy\0\x1E\xD1^\x90_\x90\xA1V[_T\x7F\x16\x87\x18\xC0\xB1\xEBk\xFD{\x0E\xDE\xCE\xA5\xC6\xFCe\x02sz\xD7:L\x9FR\xFF\xA7\xE5S\xC8\xEB\x9FSa\x03y\x82`\x01a\x05\x17V[`@Q\x90\x81R` \x01a\x02\x1BV[_T\x7F/\xA6\x15\x17\xDD\xF9\xDC\x7F/=\\\xA7$\x14\xA0\x1C\x83M\x9C[\xB7\xC36\xC9wB<\x85\tK\xBAa\x90a\x03\xB6\x81`\x01a\x05\x17V[`@\x80Q\x92\x83R` \x83\x01\x91\x90\x91R\x01[`@Q\x80\x91\x03\x90\xA1a\x01\xCDa\x04\xEEV[_Ta\x03\xE4\x90`\x01a\x05\x17V[_T\x7F;\xB2\xD63x\x82\xFA\xA5Rl\xF8\x06\xC9v9\x04\xA9\x0F3cY\r\xD48i\x13\xE3\xFC\xD8\xA2\xE1\xD1a\x04\x12\x82`\x02a\x05\x17V[`@Q\x90\x81R` \x01[`@Q\x80\x91\x03\x90\xA3a\x01\xCDa\x04\xEEV[_\x80T`@Q\x90\x91\x7F\xC2\x80\x9A\x1A/\xB9]\x84\xCF\xDCH\x8C\xDB2\n\x14L\x15\x8F\x8DD\x83l\x9C-K\xAD\xBA\x08+\xFD\xFA\x91\xA2a\x01\xCDa\x04\xEEV[_Ta\x04l\x90`\x01a\x05\x17V[_T\x7FK\x92\"\x9A\xBE J0\xD7\xB0\x88\xD8\x11\x02\x91v\t4\xD6[<\x96\x06\x80\xAD\x94\xE0_R\xA8\x86\x05a\x04\x9A\x82`\x02a\x05\x17V[_Ta\x04\xA7\x90`\x03a\x05\x17V[`@\x80Q\x92\x83R` \x83\x01\x91\x90\x91R\x01a\x04\x1CV[\x7F\x04\xF7\xFB(\x9EQ\xEA\x99\x96\xEC\x98\xE6/\xF4\xB6Q\xBE\xCF\xA6\xE5?;\x85\x0B\xE2\t\xB6\x97A\xC6o$_T`@Qa\x03\xC7\x91\x81R` \x01\x90V[_\x80T\x90\x80a\x04\xFC\x83a\x050V[\x91\x90PUPV[cNH{q`\xE0\x1B_R`\x11`\x04R`$_\xFD[\x80\x82\x01\x80\x82\x11\x15a\x05*Wa\x05*a\x05\x03V[\x92\x91PPV[_`\x01\x82\x01a\x05AWa\x05Aa\x05\x03V[P`\x01\x01\x90V\xFE\xA2dipfsX\"\x12 \xB4\xCC-\xF5\xEE\xD0oS\x8A1\x15~\xDF\xAE\xEE\xE5\x91\xA5q\x9D5\xFBG\xF3\xB6\xCE]1\xC1\xFF\xE2\xF9dsolcC\0\x08\x18\x003", + ); + /// The runtime bytecode of the contract, as deployed on the network. + /// + /// ```text + ///0x608060405234801561000f575f80fd5b50600436106100e3575f3560e01c806346d6a7b5116100885780638381f58a116100635780638381f58a14610139578063b1e057a914610153578063c02420001461015b578063d09de08a14610163575f80fd5b806346d6a7b51461012157806363eb70f014610129578063729d452014610131575f80fd5b806331c1c63b116100c357806331c1c63b14610101578063338b538a146101095780634282ed58146101115780634369f72814610119575f80fd5b80623c7e56146100e7578062d83b55146100f15780632dc34764146100f9575b5f80fd5b6100ef61016b565b005b6100ef6101cf565b6100ef61022b565b6100ef610295565b6100ef6102e2565b6100ef610322565b6100ef61034b565b6100ef610387565b6100ef6103d7565b6100ef61042c565b6101415f5481565b60405190815260200160405180910390f35b6100ef61045f565b6100ef6104bc565b6100ef6104ee565b5f54610178906002610517565b5f54610185906001610517565b5f547ff57f433eb9493cf4d9cb5763c12221d9b095804644d4ee006a78c72076cff9476101b3826003610517565b6040519081526020015b60405180910390a46101cd6104ee565b565b5f547fef4c88193498df237f039055d1212ac2a3b93ed8aea88c814312e50f6a32592d6101fd826001610517565b5f5461020a906002610517565b604080519283526020830191909152015b60405180910390a26101cd6104ee565b5f54610238906002610517565b5f54610245906001610517565b5f547ff03d29753fbd5ac209bab88a99b396bcc25c3e72530d02c81aea4d324ab3d742610273826003610517565b5f54610280906004610517565b604080519283526020830191909152016101bd565b5f546102a2906002610517565b5f546102af906001610517565b5f805460405190917f1d18de2cd8798a1c29b9255930c807eb6c84ae0acb2219acbb11e0f65cf813e991a46101cd6104ee565b5f546102ef906001610517565b5f805460405190917fa6baf14d8f11d7a4497089bb3fca0adfc34837cfb1f4aa370634d36ef0305b4691a36101cd6104ee565b6040517ef7c74f0533aa15e5ac7cafa9f9261d14da1e78830deba7110fbc79001ed15e905f90a1565b5f547f168718c0b1eb6bfd7b0edecea5c6fc6502737ad73a4c9f52ffa7e553c8eb9f53610379826001610517565b60405190815260200161021b565b5f547f2fa61517ddf9dc7f2f3d5ca72414a01c834d9c5bb7c336c977423c85094bba61906103b6816001610517565b604080519283526020830191909152015b60405180910390a16101cd6104ee565b5f546103e4906001610517565b5f547f3bb2d6337882faa5526cf806c9763904a90f3363590dd4386913e3fcd8a2e1d1610412826002610517565b6040519081526020015b60405180910390a36101cd6104ee565b5f805460405190917fc2809a1a2fb95d84cfdc488cdb320a144c158f8d44836c9c2d4badba082bfdfa91a26101cd6104ee565b5f5461046c906001610517565b5f547f4b92229abe204a30d7b088d8110291760934d65b3c960680ad94e05f52a8860561049a826002610517565b5f546104a7906003610517565b6040805192835260208301919091520161041c565b7f04f7fb289e51ea9996ec98e62ff4b651becfa6e53f3b850be209b69741c66f245f546040516103c791815260200190565b5f805490806104fc83610530565b9190505550565b634e487b7160e01b5f52601160045260245ffd5b8082018082111561052a5761052a610503565b92915050565b5f6001820161054157610541610503565b506001019056fea2646970667358221220b4cc2df5eed06f538a31157edfaeeee591a5719d35fb47f3b6ce5d31c1ffe2f964736f6c63430008180033 + /// ``` + #[rustfmt::skip] + #[allow(clippy::all)] + pub static DEPLOYED_BYTECODE: alloy_sol_types::private::Bytes = alloy_sol_types::private::Bytes::from_static( + b"`\x80`@R4\x80\x15a\0\x0FW_\x80\xFD[P`\x046\x10a\0\xE3W_5`\xE0\x1C\x80cF\xD6\xA7\xB5\x11a\0\x88W\x80c\x83\x81\xF5\x8A\x11a\0cW\x80c\x83\x81\xF5\x8A\x14a\x019W\x80c\xB1\xE0W\xA9\x14a\x01SW\x80c\xC0$ \0\x14a\x01[W\x80c\xD0\x9D\xE0\x8A\x14a\x01cW_\x80\xFD[\x80cF\xD6\xA7\xB5\x14a\x01!W\x80cc\xEBp\xF0\x14a\x01)W\x80cr\x9DE \x14a\x011W_\x80\xFD[\x80c1\xC1\xC6;\x11a\0\xC3W\x80c1\xC1\xC6;\x14a\x01\x01W\x80c3\x8BS\x8A\x14a\x01\tW\x80cB\x82\xEDX\x14a\x01\x11W\x80cCi\xF7(\x14a\x01\x19W_\x80\xFD[\x80b<~V\x14a\0\xE7W\x80b\xD8;U\x14a\0\xF1W\x80c-\xC3Gd\x14a\0\xF9W[_\x80\xFD[a\0\xEFa\x01kV[\0[a\0\xEFa\x01\xCFV[a\0\xEFa\x02+V[a\0\xEFa\x02\x95V[a\0\xEFa\x02\xE2V[a\0\xEFa\x03\"V[a\0\xEFa\x03KV[a\0\xEFa\x03\x87V[a\0\xEFa\x03\xD7V[a\0\xEFa\x04,V[a\x01A_T\x81V[`@Q\x90\x81R` \x01`@Q\x80\x91\x03\x90\xF3[a\0\xEFa\x04_V[a\0\xEFa\x04\xBCV[a\0\xEFa\x04\xEEV[_Ta\x01x\x90`\x02a\x05\x17V[_Ta\x01\x85\x90`\x01a\x05\x17V[_T\x7F\xF5\x7FC>\xB9I<\xF4\xD9\xCBWc\xC1\"!\xD9\xB0\x95\x80FD\xD4\xEE\0jx\xC7 v\xCF\xF9Ga\x01\xB3\x82`\x03a\x05\x17V[`@Q\x90\x81R` \x01[`@Q\x80\x91\x03\x90\xA4a\x01\xCDa\x04\xEEV[V[_T\x7F\xEFL\x88\x194\x98\xDF#\x7F\x03\x90U\xD1!*\xC2\xA3\xB9>\xD8\xAE\xA8\x8C\x81C\x12\xE5\x0Fj2Y-a\x01\xFD\x82`\x01a\x05\x17V[_Ta\x02\n\x90`\x02a\x05\x17V[`@\x80Q\x92\x83R` \x83\x01\x91\x90\x91R\x01[`@Q\x80\x91\x03\x90\xA2a\x01\xCDa\x04\xEEV[_Ta\x028\x90`\x02a\x05\x17V[_Ta\x02E\x90`\x01a\x05\x17V[_T\x7F\xF0=)u?\xBDZ\xC2\t\xBA\xB8\x8A\x99\xB3\x96\xBC\xC2\\>rS\r\x02\xC8\x1A\xEAM2J\xB3\xD7Ba\x02s\x82`\x03a\x05\x17V[_Ta\x02\x80\x90`\x04a\x05\x17V[`@\x80Q\x92\x83R` \x83\x01\x91\x90\x91R\x01a\x01\xBDV[_Ta\x02\xA2\x90`\x02a\x05\x17V[_Ta\x02\xAF\x90`\x01a\x05\x17V[_\x80T`@Q\x90\x91\x7F\x1D\x18\xDE,\xD8y\x8A\x1C)\xB9%Y0\xC8\x07\xEBl\x84\xAE\n\xCB\"\x19\xAC\xBB\x11\xE0\xF6\\\xF8\x13\xE9\x91\xA4a\x01\xCDa\x04\xEEV[_Ta\x02\xEF\x90`\x01a\x05\x17V[_\x80T`@Q\x90\x91\x7F\xA6\xBA\xF1M\x8F\x11\xD7\xA4Ip\x89\xBB?\xCA\n\xDF\xC3H7\xCF\xB1\xF4\xAA7\x064\xD3n\xF00[F\x91\xA3a\x01\xCDa\x04\xEEV[`@Q~\xF7\xC7O\x053\xAA\x15\xE5\xAC|\xAF\xA9\xF9&\x1D\x14\xDA\x1Ex\x83\r\xEB\xA7\x11\x0F\xBCy\0\x1E\xD1^\x90_\x90\xA1V[_T\x7F\x16\x87\x18\xC0\xB1\xEBk\xFD{\x0E\xDE\xCE\xA5\xC6\xFCe\x02sz\xD7:L\x9FR\xFF\xA7\xE5S\xC8\xEB\x9FSa\x03y\x82`\x01a\x05\x17V[`@Q\x90\x81R` \x01a\x02\x1BV[_T\x7F/\xA6\x15\x17\xDD\xF9\xDC\x7F/=\\\xA7$\x14\xA0\x1C\x83M\x9C[\xB7\xC36\xC9wB<\x85\tK\xBAa\x90a\x03\xB6\x81`\x01a\x05\x17V[`@\x80Q\x92\x83R` \x83\x01\x91\x90\x91R\x01[`@Q\x80\x91\x03\x90\xA1a\x01\xCDa\x04\xEEV[_Ta\x03\xE4\x90`\x01a\x05\x17V[_T\x7F;\xB2\xD63x\x82\xFA\xA5Rl\xF8\x06\xC9v9\x04\xA9\x0F3cY\r\xD48i\x13\xE3\xFC\xD8\xA2\xE1\xD1a\x04\x12\x82`\x02a\x05\x17V[`@Q\x90\x81R` \x01[`@Q\x80\x91\x03\x90\xA3a\x01\xCDa\x04\xEEV[_\x80T`@Q\x90\x91\x7F\xC2\x80\x9A\x1A/\xB9]\x84\xCF\xDCH\x8C\xDB2\n\x14L\x15\x8F\x8DD\x83l\x9C-K\xAD\xBA\x08+\xFD\xFA\x91\xA2a\x01\xCDa\x04\xEEV[_Ta\x04l\x90`\x01a\x05\x17V[_T\x7FK\x92\"\x9A\xBE J0\xD7\xB0\x88\xD8\x11\x02\x91v\t4\xD6[<\x96\x06\x80\xAD\x94\xE0_R\xA8\x86\x05a\x04\x9A\x82`\x02a\x05\x17V[_Ta\x04\xA7\x90`\x03a\x05\x17V[`@\x80Q\x92\x83R` \x83\x01\x91\x90\x91R\x01a\x04\x1CV[\x7F\x04\xF7\xFB(\x9EQ\xEA\x99\x96\xEC\x98\xE6/\xF4\xB6Q\xBE\xCF\xA6\xE5?;\x85\x0B\xE2\t\xB6\x97A\xC6o$_T`@Qa\x03\xC7\x91\x81R` \x01\x90V[_\x80T\x90\x80a\x04\xFC\x83a\x050V[\x91\x90PUPV[cNH{q`\xE0\x1B_R`\x11`\x04R`$_\xFD[\x80\x82\x01\x80\x82\x11\x15a\x05*Wa\x05*a\x05\x03V[\x92\x91PPV[_`\x01\x82\x01a\x05AWa\x05Aa\x05\x03V[P`\x01\x01\x90V\xFE\xA2dipfsX\"\x12 \xB4\xCC-\xF5\xEE\xD0oS\x8A1\x15~\xDF\xAE\xEE\xE5\x91\xA5q\x9D5\xFBG\xF3\xB6\xCE]1\xC1\xFF\xE2\xF9dsolcC\0\x08\x18\x003", + ); + /**Event with signature `noIOneD(uint256)` and selector `0x04f7fb289e51ea9996ec98e62ff4b651becfa6e53f3b850be209b69741c66f24`. + ```solidity + event noIOneD(uint256 num); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct noIOneD { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for noIOneD { + type DataTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = (alloy_sol_types::sol_data::FixedBytes<32>,); + const SIGNATURE: &'static str = "noIOneD(uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 4u8, 247u8, 251u8, 40u8, 158u8, 81u8, 234u8, 153u8, 150u8, 236u8, 152u8, 230u8, + 47u8, 244u8, 182u8, 81u8, 190u8, 207u8, 166u8, 229u8, 63u8, 59u8, 133u8, 11u8, + 226u8, 9u8, 182u8, 151u8, 65u8, 198u8, 111u8, 36u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { num: data.0 } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.num, + ), + ) + } + #[inline] + fn topics(&self) -> ::RustType { + (Self::SIGNATURE_HASH.into(),) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for noIOneD { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&noIOneD> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &noIOneD) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `noITwoD(uint256,uint256)` and selector `0x2fa61517ddf9dc7f2f3d5ca72414a01c834d9c5bb7c336c977423c85094bba61`. + ```solidity + event noITwoD(uint256 num, uint256 numTwo); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct noITwoD { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for noITwoD { + type DataTuple<'a> = ( + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = (alloy_sol_types::sol_data::FixedBytes<32>,); + const SIGNATURE: &'static str = "noITwoD(uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 47u8, 166u8, 21u8, 23u8, 221u8, 249u8, 220u8, 127u8, 47u8, 61u8, 92u8, 167u8, + 36u8, 20u8, 160u8, 28u8, 131u8, 77u8, 156u8, 91u8, 183u8, 195u8, 54u8, 201u8, + 119u8, 66u8, 60u8, 133u8, 9u8, 75u8, 186u8, 97u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: data.0, + numTwo: data.1, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.num, + ), + as alloy_sol_types::SolType>::tokenize( + &self.numTwo, + ), + ) + } + #[inline] + fn topics(&self) -> ::RustType { + (Self::SIGNATURE_HASH.into(),) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for noITwoD { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&noITwoD> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &noITwoD) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `noIndexed()` and selector `0x00f7c74f0533aa15e5ac7cafa9f9261d14da1e78830deba7110fbc79001ed15e`. + ```solidity + event noIndexed(); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct noIndexed {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for noIndexed { + type DataTuple<'a> = (); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = (alloy_sol_types::sol_data::FixedBytes<32>,); + const SIGNATURE: &'static str = "noIndexed()"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 0u8, 247u8, 199u8, 79u8, 5u8, 51u8, 170u8, 21u8, 229u8, 172u8, 124u8, 175u8, + 169u8, 249u8, 38u8, 29u8, 20u8, 218u8, 30u8, 120u8, 131u8, 13u8, 235u8, 167u8, + 17u8, 15u8, 188u8, 121u8, 0u8, 30u8, 209u8, 94u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self {} + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + () + } + #[inline] + fn topics(&self) -> ::RustType { + (Self::SIGNATURE_HASH.into(),) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for noIndexed { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&noIndexed> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &noIndexed) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `oneData(uint256,uint256,uint256,uint256)` and selector `0xf57f433eb9493cf4d9cb5763c12221d9b095804644d4ee006a78c72076cff947`. + ```solidity + event oneData(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree, uint256 numFour); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct oneData { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numThree: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numFour: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for oneData { + type DataTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "oneData(uint256,uint256,uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 245u8, 127u8, 67u8, 62u8, 185u8, 73u8, 60u8, 244u8, 217u8, 203u8, 87u8, 99u8, + 193u8, 34u8, 33u8, 217u8, 176u8, 149u8, 128u8, 70u8, 68u8, 212u8, 238u8, 0u8, + 106u8, 120u8, 199u8, 32u8, 118u8, 207u8, 249u8, 71u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: topics.1, + numTwo: topics.2, + numThree: topics.3, + numFour: data.0, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.numFour, + ), + ) + } + #[inline] + fn topics(&self) -> ::RustType { + ( + Self::SIGNATURE_HASH.into(), + self.num.clone(), + self.numTwo.clone(), + self.numThree.clone(), + ) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + out[2usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numTwo); + out[3usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numThree); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for oneData { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&oneData> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &oneData) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `oneIOneD(uint256,uint256)` and selector `0x168718c0b1eb6bfd7b0edecea5c6fc6502737ad73a4c9f52ffa7e553c8eb9f53`. + ```solidity + event oneIOneD(uint256 indexed num, uint256 numTwo); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct oneIOneD { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for oneIOneD { + type DataTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "oneIOneD(uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 22u8, 135u8, 24u8, 192u8, 177u8, 235u8, 107u8, 253u8, 123u8, 14u8, 222u8, + 206u8, 165u8, 198u8, 252u8, 101u8, 2u8, 115u8, 122u8, 215u8, 58u8, 76u8, 159u8, + 82u8, 255u8, 167u8, 229u8, 83u8, 200u8, 235u8, 159u8, 83u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: topics.1, + numTwo: data.0, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.numTwo, + ), + ) + } + #[inline] + fn topics(&self) -> ::RustType { + (Self::SIGNATURE_HASH.into(), self.num.clone()) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for oneIOneD { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&oneIOneD> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &oneIOneD) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `oneITwoD(uint256,uint256,uint256)` and selector `0xef4c88193498df237f039055d1212ac2a3b93ed8aea88c814312e50f6a32592d`. + ```solidity + event oneITwoD(uint256 indexed num, uint256 numTwo, uint256 numThree); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct oneITwoD { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numThree: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for oneITwoD { + type DataTuple<'a> = ( + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "oneITwoD(uint256,uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 239u8, 76u8, 136u8, 25u8, 52u8, 152u8, 223u8, 35u8, 127u8, 3u8, 144u8, 85u8, + 209u8, 33u8, 42u8, 194u8, 163u8, 185u8, 62u8, 216u8, 174u8, 168u8, 140u8, + 129u8, 67u8, 18u8, 229u8, 15u8, 106u8, 50u8, 89u8, 45u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: topics.1, + numTwo: data.0, + numThree: data.1, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.numTwo, + ), + as alloy_sol_types::SolType>::tokenize( + &self.numThree, + ), + ) + } + #[inline] + fn topics(&self) -> ::RustType { + (Self::SIGNATURE_HASH.into(), self.num.clone()) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for oneITwoD { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&oneITwoD> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &oneITwoD) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `oneIndexed(uint256)` and selector `0xc2809a1a2fb95d84cfdc488cdb320a144c158f8d44836c9c2d4badba082bfdfa`. + ```solidity + event oneIndexed(uint256 indexed num); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct oneIndexed { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for oneIndexed { + type DataTuple<'a> = (); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "oneIndexed(uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 194u8, 128u8, 154u8, 26u8, 47u8, 185u8, 93u8, 132u8, 207u8, 220u8, 72u8, 140u8, + 219u8, 50u8, 10u8, 20u8, 76u8, 21u8, 143u8, 141u8, 68u8, 131u8, 108u8, 156u8, + 45u8, 75u8, 173u8, 186u8, 8u8, 43u8, 253u8, 250u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { num: topics.1 } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + () + } + #[inline] + fn topics(&self) -> ::RustType { + (Self::SIGNATURE_HASH.into(), self.num.clone()) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for oneIndexed { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&oneIndexed> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &oneIndexed) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `threeIndexed(uint256,uint256,uint256)` and selector `0x1d18de2cd8798a1c29b9255930c807eb6c84ae0acb2219acbb11e0f65cf813e9`. + ```solidity + event threeIndexed(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct threeIndexed { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numThree: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for threeIndexed { + type DataTuple<'a> = (); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "threeIndexed(uint256,uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 29u8, 24u8, 222u8, 44u8, 216u8, 121u8, 138u8, 28u8, 41u8, 185u8, 37u8, 89u8, + 48u8, 200u8, 7u8, 235u8, 108u8, 132u8, 174u8, 10u8, 203u8, 34u8, 25u8, 172u8, + 187u8, 17u8, 224u8, 246u8, 92u8, 248u8, 19u8, 233u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: topics.1, + numTwo: topics.2, + numThree: topics.3, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + () + } + #[inline] + fn topics(&self) -> ::RustType { + ( + Self::SIGNATURE_HASH.into(), + self.num.clone(), + self.numTwo.clone(), + self.numThree.clone(), + ) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + out[2usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numTwo); + out[3usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numThree); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for threeIndexed { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&threeIndexed> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &threeIndexed) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `twoData(uint256,uint256,uint256,uint256,uint256)` and selector `0xf03d29753fbd5ac209bab88a99b396bcc25c3e72530d02c81aea4d324ab3d742`. + ```solidity + event twoData(uint256 indexed num, uint256 indexed numTwo, uint256 indexed numThree, uint256 numFour, uint256 numFive); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct twoData { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numThree: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numFour: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numFive: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for twoData { + type DataTuple<'a> = ( + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "twoData(uint256,uint256,uint256,uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 240u8, 61u8, 41u8, 117u8, 63u8, 189u8, 90u8, 194u8, 9u8, 186u8, 184u8, 138u8, + 153u8, 179u8, 150u8, 188u8, 194u8, 92u8, 62u8, 114u8, 83u8, 13u8, 2u8, 200u8, + 26u8, 234u8, 77u8, 50u8, 74u8, 179u8, 215u8, 66u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: topics.1, + numTwo: topics.2, + numThree: topics.3, + numFour: data.0, + numFive: data.1, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.numFour, + ), + as alloy_sol_types::SolType>::tokenize( + &self.numFive, + ), + ) + } + #[inline] + fn topics(&self) -> ::RustType { + ( + Self::SIGNATURE_HASH.into(), + self.num.clone(), + self.numTwo.clone(), + self.numThree.clone(), + ) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + out[2usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numTwo); + out[3usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numThree); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for twoData { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&twoData> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &twoData) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `twoIOneD(uint256,uint256,uint256)` and selector `0x3bb2d6337882faa5526cf806c9763904a90f3363590dd4386913e3fcd8a2e1d1`. + ```solidity + event twoIOneD(uint256 indexed num, uint256 indexed numTwo, uint256 numThree); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct twoIOneD { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numThree: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for twoIOneD { + type DataTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "twoIOneD(uint256,uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 59u8, 178u8, 214u8, 51u8, 120u8, 130u8, 250u8, 165u8, 82u8, 108u8, 248u8, 6u8, + 201u8, 118u8, 57u8, 4u8, 169u8, 15u8, 51u8, 99u8, 89u8, 13u8, 212u8, 56u8, + 105u8, 19u8, 227u8, 252u8, 216u8, 162u8, 225u8, 209u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: topics.1, + numTwo: topics.2, + numThree: data.0, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.numThree, + ), + ) + } + #[inline] + fn topics(&self) -> ::RustType { + ( + Self::SIGNATURE_HASH.into(), + self.num.clone(), + self.numTwo.clone(), + ) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + out[2usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numTwo); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for twoIOneD { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&twoIOneD> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &twoIOneD) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `twoITwoD(uint256,uint256,uint256,uint256)` and selector `0x4b92229abe204a30d7b088d8110291760934d65b3c960680ad94e05f52a88605`. + ```solidity + event twoITwoD(uint256 indexed num, uint256 indexed numTwo, uint256 numThree, uint256 numFour); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct twoITwoD { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numThree: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numFour: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for twoITwoD { + type DataTuple<'a> = ( + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "twoITwoD(uint256,uint256,uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 75u8, 146u8, 34u8, 154u8, 190u8, 32u8, 74u8, 48u8, 215u8, 176u8, 136u8, 216u8, + 17u8, 2u8, 145u8, 118u8, 9u8, 52u8, 214u8, 91u8, 60u8, 150u8, 6u8, 128u8, + 173u8, 148u8, 224u8, 95u8, 82u8, 168u8, 134u8, 5u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: topics.1, + numTwo: topics.2, + numThree: data.0, + numFour: data.1, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.numThree, + ), + as alloy_sol_types::SolType>::tokenize( + &self.numFour, + ), + ) + } + #[inline] + fn topics(&self) -> ::RustType { + ( + Self::SIGNATURE_HASH.into(), + self.num.clone(), + self.numTwo.clone(), + ) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + out[2usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numTwo); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for twoITwoD { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&twoITwoD> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &twoITwoD) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Event with signature `twoIndexed(uint256,uint256)` and selector `0xa6baf14d8f11d7a4497089bb3fca0adfc34837cfb1f4aa370634d36ef0305b46`. + ```solidity + event twoIndexed(uint256 indexed num, uint256 indexed numTwo); + ```*/ + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + #[derive(Clone)] + pub struct twoIndexed { + #[allow(missing_docs)] + pub num: alloy::sol_types::private::primitives::aliases::U256, + #[allow(missing_docs)] + pub numTwo: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[automatically_derived] + impl alloy_sol_types::SolEvent for twoIndexed { + type DataTuple<'a> = (); + type DataToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + type TopicList = ( + alloy_sol_types::sol_data::FixedBytes<32>, + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<256>, + ); + const SIGNATURE: &'static str = "twoIndexed(uint256,uint256)"; + const SIGNATURE_HASH: alloy_sol_types::private::B256 = + alloy_sol_types::private::B256::new([ + 166u8, 186u8, 241u8, 77u8, 143u8, 17u8, 215u8, 164u8, 73u8, 112u8, 137u8, + 187u8, 63u8, 202u8, 10u8, 223u8, 195u8, 72u8, 55u8, 207u8, 177u8, 244u8, 170u8, + 55u8, 6u8, 52u8, 211u8, 110u8, 240u8, 48u8, 91u8, 70u8, + ]); + const ANONYMOUS: bool = false; + #[allow(unused_variables)] + #[inline] + fn new( + topics: ::RustType, + data: as alloy_sol_types::SolType>::RustType, + ) -> Self { + Self { + num: topics.1, + numTwo: topics.2, + } + } + #[inline] + fn check_signature( + topics: &::RustType, + ) -> alloy_sol_types::Result<()> { + if topics.0 != Self::SIGNATURE_HASH { + return Err(alloy_sol_types::Error::invalid_event_signature_hash( + Self::SIGNATURE, + topics.0, + Self::SIGNATURE_HASH, + )); + } + Ok(()) + } + #[inline] + fn tokenize_body(&self) -> Self::DataToken<'_> { + () + } + #[inline] + fn topics(&self) -> ::RustType { + ( + Self::SIGNATURE_HASH.into(), + self.num.clone(), + self.numTwo.clone(), + ) + } + #[inline] + fn encode_topics_raw( + &self, + out: &mut [alloy_sol_types::abi::token::WordToken], + ) -> alloy_sol_types::Result<()> { + if out.len() < ::COUNT { + return Err(alloy_sol_types::Error::Overrun); + } + out[0usize] = alloy_sol_types::abi::token::WordToken(Self::SIGNATURE_HASH); + out[1usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.num); + out[2usize] = as alloy_sol_types::EventTopic>::encode_topic(&self.numTwo); + Ok(()) + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for twoIndexed { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + From::from(self) + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + From::from(&self) + } + } + #[automatically_derived] + impl From<&twoIndexed> for alloy_sol_types::private::LogData { + #[inline] + fn from(this: &twoIndexed) -> alloy_sol_types::private::LogData { + alloy_sol_types::SolEvent::encode_log_data(this) + } + } + }; + /**Function with signature `increment()` and selector `0xd09de08a`. + ```solidity + function increment() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct incrementCall {} + ///Container type for the return parameters of the [`increment()`](incrementCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct incrementReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: incrementCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for incrementCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: incrementReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for incrementReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for incrementCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = incrementReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "increment()"; + const SELECTOR: [u8; 4] = [208u8, 157u8, 224u8, 138u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `number()` and selector `0x8381f58a`. + ```solidity + function number() external view returns (uint256); + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct numberCall {} + ///Container type for the return parameters of the [`number()`](numberCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct numberReturn { + pub _0: alloy::sol_types::private::primitives::aliases::U256, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: numberCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for numberCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: numberReturn) -> Self { + (value._0,) + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for numberReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self { _0: tuple.0 } + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for numberCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = numberReturn; + type ReturnTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "number()"; + const SELECTOR: [u8; 4] = [131u8, 129u8, 245u8, 138u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testNoIOneD()` and selector `0xc0242000`. + ```solidity + function testNoIOneD() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testNoIOneDCall {} + ///Container type for the return parameters of the [`testNoIOneD()`](testNoIOneDCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testNoIOneDReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testNoIOneDCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testNoIOneDCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testNoIOneDReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testNoIOneDReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testNoIOneDCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testNoIOneDReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testNoIOneD()"; + const SELECTOR: [u8; 4] = [192u8, 36u8, 32u8, 0u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testNoITwoD()` and selector `0x46d6a7b5`. + ```solidity + function testNoITwoD() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testNoITwoDCall {} + ///Container type for the return parameters of the [`testNoITwoD()`](testNoITwoDCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testNoITwoDReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testNoITwoDCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testNoITwoDCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testNoITwoDReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testNoITwoDReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testNoITwoDCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testNoITwoDReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testNoITwoD()"; + const SELECTOR: [u8; 4] = [70u8, 214u8, 167u8, 181u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testNoIndexed()` and selector `0x4282ed58`. + ```solidity + function testNoIndexed() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testNoIndexedCall {} + ///Container type for the return parameters of the [`testNoIndexed()`](testNoIndexedCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testNoIndexedReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testNoIndexedCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testNoIndexedCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testNoIndexedReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testNoIndexedReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testNoIndexedCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testNoIndexedReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testNoIndexed()"; + const SELECTOR: [u8; 4] = [66u8, 130u8, 237u8, 88u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testOneData()` and selector `0x003c7e56`. + ```solidity + function testOneData() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testOneDataCall {} + ///Container type for the return parameters of the [`testOneData()`](testOneDataCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testOneDataReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testOneDataCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testOneDataCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testOneDataReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testOneDataReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testOneDataCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testOneDataReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testOneData()"; + const SELECTOR: [u8; 4] = [0u8, 60u8, 126u8, 86u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testOneIOneD()` and selector `0x4369f728`. + ```solidity + function testOneIOneD() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testOneIOneDCall {} + ///Container type for the return parameters of the [`testOneIOneD()`](testOneIOneDCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testOneIOneDReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testOneIOneDCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testOneIOneDCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testOneIOneDReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testOneIOneDReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testOneIOneDCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testOneIOneDReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testOneIOneD()"; + const SELECTOR: [u8; 4] = [67u8, 105u8, 247u8, 40u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testOneITwoD()` and selector `0x00d83b55`. + ```solidity + function testOneITwoD() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testOneITwoDCall {} + ///Container type for the return parameters of the [`testOneITwoD()`](testOneITwoDCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testOneITwoDReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testOneITwoDCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testOneITwoDCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testOneITwoDReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testOneITwoDReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testOneITwoDCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testOneITwoDReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testOneITwoD()"; + const SELECTOR: [u8; 4] = [0u8, 216u8, 59u8, 85u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testOneIndexed()` and selector `0x729d4520`. + ```solidity + function testOneIndexed() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testOneIndexedCall {} + ///Container type for the return parameters of the [`testOneIndexed()`](testOneIndexedCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testOneIndexedReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testOneIndexedCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testOneIndexedCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testOneIndexedReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testOneIndexedReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testOneIndexedCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testOneIndexedReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testOneIndexed()"; + const SELECTOR: [u8; 4] = [114u8, 157u8, 69u8, 32u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testThreeIndexed()` and selector `0x31c1c63b`. + ```solidity + function testThreeIndexed() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testThreeIndexedCall {} + ///Container type for the return parameters of the [`testThreeIndexed()`](testThreeIndexedCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testThreeIndexedReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testThreeIndexedCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testThreeIndexedCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testThreeIndexedReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testThreeIndexedReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testThreeIndexedCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testThreeIndexedReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testThreeIndexed()"; + const SELECTOR: [u8; 4] = [49u8, 193u8, 198u8, 59u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testTwoData()` and selector `0x2dc34764`. + ```solidity + function testTwoData() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testTwoDataCall {} + ///Container type for the return parameters of the [`testTwoData()`](testTwoDataCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testTwoDataReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testTwoDataCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testTwoDataCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testTwoDataReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testTwoDataReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testTwoDataCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testTwoDataReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testTwoData()"; + const SELECTOR: [u8; 4] = [45u8, 195u8, 71u8, 100u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testTwoIOneD()` and selector `0x63eb70f0`. + ```solidity + function testTwoIOneD() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testTwoIOneDCall {} + ///Container type for the return parameters of the [`testTwoIOneD()`](testTwoIOneDCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testTwoIOneDReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testTwoIOneDCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testTwoIOneDCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testTwoIOneDReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testTwoIOneDReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testTwoIOneDCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testTwoIOneDReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testTwoIOneD()"; + const SELECTOR: [u8; 4] = [99u8, 235u8, 112u8, 240u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testTwoITwoD()` and selector `0xb1e057a9`. + ```solidity + function testTwoITwoD() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testTwoITwoDCall {} + ///Container type for the return parameters of the [`testTwoITwoD()`](testTwoITwoDCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testTwoITwoDReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testTwoITwoDCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testTwoITwoDCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testTwoITwoDReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testTwoITwoDReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testTwoITwoDCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testTwoITwoDReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testTwoITwoD()"; + const SELECTOR: [u8; 4] = [177u8, 224u8, 87u8, 169u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + /**Function with signature `testTwoIndexed()` and selector `0x338b538a`. + ```solidity + function testTwoIndexed() external; + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testTwoIndexedCall {} + ///Container type for the return parameters of the [`testTwoIndexed()`](testTwoIndexedCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct testTwoIndexedReturn {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testTwoIndexedCall) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testTwoIndexedCall { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + { + #[doc(hidden)] + type UnderlyingSolTuple<'a> = (); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = (); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: testTwoIndexedReturn) -> Self { + () + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for testTwoIndexedReturn { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self {} + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolCall for testTwoIndexedCall { + type Parameters<'a> = (); + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + type Return = testTwoIndexedReturn; + type ReturnTuple<'a> = (); + type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SIGNATURE: &'static str = "testTwoIndexed()"; + const SELECTOR: [u8; 4] = [51u8, 139u8, 83u8, 138u8]; + #[inline] + fn new<'a>( + tuple: as alloy_sol_types::SolType>::RustType, + ) -> Self { + tuple.into() + } + #[inline] + fn tokenize(&self) -> Self::Token<'_> { + () + } + #[inline] + fn abi_decode_returns( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + as alloy_sol_types::SolType>::abi_decode_sequence( + data, validate, + ) + .map(Into::into) + } + } + }; + ///Container for all the [`EventEmitter`](self) function calls. + pub enum EventEmitterCalls { + increment(incrementCall), + number(numberCall), + testNoIOneD(testNoIOneDCall), + testNoITwoD(testNoITwoDCall), + testNoIndexed(testNoIndexedCall), + testOneData(testOneDataCall), + testOneIOneD(testOneIOneDCall), + testOneITwoD(testOneITwoDCall), + testOneIndexed(testOneIndexedCall), + testThreeIndexed(testThreeIndexedCall), + testTwoData(testTwoDataCall), + testTwoIOneD(testTwoIOneDCall), + testTwoITwoD(testTwoITwoDCall), + testTwoIndexed(testTwoIndexedCall), + } + #[automatically_derived] + impl EventEmitterCalls { + /// All the selectors of this enum. + /// + /// Note that the selectors might not be in the same order as the variants. + /// No guarantees are made about the order of the selectors. + /// + /// Prefer using `SolInterface` methods instead. + pub const SELECTORS: &'static [[u8; 4usize]] = &[ + [0u8, 60u8, 126u8, 86u8], + [0u8, 216u8, 59u8, 85u8], + [45u8, 195u8, 71u8, 100u8], + [49u8, 193u8, 198u8, 59u8], + [51u8, 139u8, 83u8, 138u8], + [66u8, 130u8, 237u8, 88u8], + [67u8, 105u8, 247u8, 40u8], + [70u8, 214u8, 167u8, 181u8], + [99u8, 235u8, 112u8, 240u8], + [114u8, 157u8, 69u8, 32u8], + [131u8, 129u8, 245u8, 138u8], + [177u8, 224u8, 87u8, 169u8], + [192u8, 36u8, 32u8, 0u8], + [208u8, 157u8, 224u8, 138u8], + ]; + } + #[automatically_derived] + impl alloy_sol_types::SolInterface for EventEmitterCalls { + const NAME: &'static str = "EventEmitterCalls"; + const MIN_DATA_LENGTH: usize = 0usize; + const COUNT: usize = 14usize; + #[inline] + fn selector(&self) -> [u8; 4] { + match self { + Self::increment(_) => ::SELECTOR, + Self::number(_) => ::SELECTOR, + Self::testNoIOneD(_) => ::SELECTOR, + Self::testNoITwoD(_) => ::SELECTOR, + Self::testNoIndexed(_) => ::SELECTOR, + Self::testOneData(_) => ::SELECTOR, + Self::testOneIOneD(_) => ::SELECTOR, + Self::testOneITwoD(_) => ::SELECTOR, + Self::testOneIndexed(_) => { + ::SELECTOR + } + Self::testThreeIndexed(_) => { + ::SELECTOR + } + Self::testTwoData(_) => ::SELECTOR, + Self::testTwoIOneD(_) => ::SELECTOR, + Self::testTwoITwoD(_) => ::SELECTOR, + Self::testTwoIndexed(_) => { + ::SELECTOR + } + } + } + #[inline] + fn selector_at(i: usize) -> ::core::option::Option<[u8; 4]> { + Self::SELECTORS.get(i).copied() + } + #[inline] + fn valid_selector(selector: [u8; 4]) -> bool { + Self::SELECTORS.binary_search(&selector).is_ok() + } + #[inline] + #[allow(non_snake_case)] + fn abi_decode_raw( + selector: [u8; 4], + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + static DECODE_SHIMS: &[fn( + &[u8], + bool, + ) + -> alloy_sol_types::Result] = &[ + { + fn testOneData( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testOneData) + } + testOneData + }, + { + fn testOneITwoD( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testOneITwoD) + } + testOneITwoD + }, + { + fn testTwoData( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testTwoData) + } + testTwoData + }, + { + fn testThreeIndexed( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testThreeIndexed) + } + testThreeIndexed + }, + { + fn testTwoIndexed( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testTwoIndexed) + } + testTwoIndexed + }, + { + fn testNoIndexed( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testNoIndexed) + } + testNoIndexed + }, + { + fn testOneIOneD( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testOneIOneD) + } + testOneIOneD + }, + { + fn testNoITwoD( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testNoITwoD) + } + testNoITwoD + }, + { + fn testTwoIOneD( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testTwoIOneD) + } + testTwoIOneD + }, + { + fn testOneIndexed( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testOneIndexed) + } + testOneIndexed + }, + { + fn number( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw(data, validate) + .map(EventEmitterCalls::number) + } + number + }, + { + fn testTwoITwoD( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testTwoITwoD) + } + testTwoITwoD + }, + { + fn testNoIOneD( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(EventEmitterCalls::testNoIOneD) + } + testNoIOneD + }, + { + fn increment( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw(data, validate) + .map(EventEmitterCalls::increment) + } + increment + }, + ]; + let Ok(idx) = Self::SELECTORS.binary_search(&selector) else { + return Err(alloy_sol_types::Error::unknown_selector( + ::NAME, + selector, + )); + }; + DECODE_SHIMS[idx](data, validate) + } + #[inline] + fn abi_encoded_size(&self) -> usize { + match self { + Self::increment(inner) => { + ::abi_encoded_size(inner) + } + Self::number(inner) => { + ::abi_encoded_size(inner) + } + Self::testNoIOneD(inner) => { + ::abi_encoded_size(inner) + } + Self::testNoITwoD(inner) => { + ::abi_encoded_size(inner) + } + Self::testNoIndexed(inner) => { + ::abi_encoded_size(inner) + } + Self::testOneData(inner) => { + ::abi_encoded_size(inner) + } + Self::testOneIOneD(inner) => { + ::abi_encoded_size(inner) + } + Self::testOneITwoD(inner) => { + ::abi_encoded_size(inner) + } + Self::testOneIndexed(inner) => { + ::abi_encoded_size(inner) + } + Self::testThreeIndexed(inner) => { + ::abi_encoded_size(inner) + } + Self::testTwoData(inner) => { + ::abi_encoded_size(inner) + } + Self::testTwoIOneD(inner) => { + ::abi_encoded_size(inner) + } + Self::testTwoITwoD(inner) => { + ::abi_encoded_size(inner) + } + Self::testTwoIndexed(inner) => { + ::abi_encoded_size(inner) + } + } + } + #[inline] + fn abi_encode_raw(&self, out: &mut alloy_sol_types::private::Vec) { + match self { + Self::increment(inner) => { + ::abi_encode_raw(inner, out) + } + Self::number(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testNoIOneD(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testNoITwoD(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testNoIndexed(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testOneData(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testOneIOneD(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testOneITwoD(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testOneIndexed(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testThreeIndexed(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testTwoData(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testTwoIOneD(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testTwoITwoD(inner) => { + ::abi_encode_raw(inner, out) + } + Self::testTwoIndexed(inner) => { + ::abi_encode_raw(inner, out) + } + } + } + } + ///Container for all the [`EventEmitter`](self) events. + pub enum EventEmitterEvents { + noIOneD(noIOneD), + noITwoD(noITwoD), + noIndexed(noIndexed), + oneData(oneData), + oneIOneD(oneIOneD), + oneITwoD(oneITwoD), + oneIndexed(oneIndexed), + threeIndexed(threeIndexed), + twoData(twoData), + twoIOneD(twoIOneD), + twoITwoD(twoITwoD), + twoIndexed(twoIndexed), + } + #[automatically_derived] + impl EventEmitterEvents { + /// All the selectors of this enum. + /// + /// Note that the selectors might not be in the same order as the variants. + /// No guarantees are made about the order of the selectors. + /// + /// Prefer using `SolInterface` methods instead. + pub const SELECTORS: &'static [[u8; 32usize]] = &[ + [ + 0u8, 247u8, 199u8, 79u8, 5u8, 51u8, 170u8, 21u8, 229u8, 172u8, 124u8, 175u8, 169u8, + 249u8, 38u8, 29u8, 20u8, 218u8, 30u8, 120u8, 131u8, 13u8, 235u8, 167u8, 17u8, 15u8, + 188u8, 121u8, 0u8, 30u8, 209u8, 94u8, + ], + [ + 4u8, 247u8, 251u8, 40u8, 158u8, 81u8, 234u8, 153u8, 150u8, 236u8, 152u8, 230u8, + 47u8, 244u8, 182u8, 81u8, 190u8, 207u8, 166u8, 229u8, 63u8, 59u8, 133u8, 11u8, + 226u8, 9u8, 182u8, 151u8, 65u8, 198u8, 111u8, 36u8, + ], + [ + 22u8, 135u8, 24u8, 192u8, 177u8, 235u8, 107u8, 253u8, 123u8, 14u8, 222u8, 206u8, + 165u8, 198u8, 252u8, 101u8, 2u8, 115u8, 122u8, 215u8, 58u8, 76u8, 159u8, 82u8, + 255u8, 167u8, 229u8, 83u8, 200u8, 235u8, 159u8, 83u8, + ], + [ + 29u8, 24u8, 222u8, 44u8, 216u8, 121u8, 138u8, 28u8, 41u8, 185u8, 37u8, 89u8, 48u8, + 200u8, 7u8, 235u8, 108u8, 132u8, 174u8, 10u8, 203u8, 34u8, 25u8, 172u8, 187u8, + 17u8, 224u8, 246u8, 92u8, 248u8, 19u8, 233u8, + ], + [ + 47u8, 166u8, 21u8, 23u8, 221u8, 249u8, 220u8, 127u8, 47u8, 61u8, 92u8, 167u8, 36u8, + 20u8, 160u8, 28u8, 131u8, 77u8, 156u8, 91u8, 183u8, 195u8, 54u8, 201u8, 119u8, + 66u8, 60u8, 133u8, 9u8, 75u8, 186u8, 97u8, + ], + [ + 59u8, 178u8, 214u8, 51u8, 120u8, 130u8, 250u8, 165u8, 82u8, 108u8, 248u8, 6u8, + 201u8, 118u8, 57u8, 4u8, 169u8, 15u8, 51u8, 99u8, 89u8, 13u8, 212u8, 56u8, 105u8, + 19u8, 227u8, 252u8, 216u8, 162u8, 225u8, 209u8, + ], + [ + 75u8, 146u8, 34u8, 154u8, 190u8, 32u8, 74u8, 48u8, 215u8, 176u8, 136u8, 216u8, + 17u8, 2u8, 145u8, 118u8, 9u8, 52u8, 214u8, 91u8, 60u8, 150u8, 6u8, 128u8, 173u8, + 148u8, 224u8, 95u8, 82u8, 168u8, 134u8, 5u8, + ], + [ + 166u8, 186u8, 241u8, 77u8, 143u8, 17u8, 215u8, 164u8, 73u8, 112u8, 137u8, 187u8, + 63u8, 202u8, 10u8, 223u8, 195u8, 72u8, 55u8, 207u8, 177u8, 244u8, 170u8, 55u8, 6u8, + 52u8, 211u8, 110u8, 240u8, 48u8, 91u8, 70u8, + ], + [ + 194u8, 128u8, 154u8, 26u8, 47u8, 185u8, 93u8, 132u8, 207u8, 220u8, 72u8, 140u8, + 219u8, 50u8, 10u8, 20u8, 76u8, 21u8, 143u8, 141u8, 68u8, 131u8, 108u8, 156u8, 45u8, + 75u8, 173u8, 186u8, 8u8, 43u8, 253u8, 250u8, + ], + [ + 239u8, 76u8, 136u8, 25u8, 52u8, 152u8, 223u8, 35u8, 127u8, 3u8, 144u8, 85u8, 209u8, + 33u8, 42u8, 194u8, 163u8, 185u8, 62u8, 216u8, 174u8, 168u8, 140u8, 129u8, 67u8, + 18u8, 229u8, 15u8, 106u8, 50u8, 89u8, 45u8, + ], + [ + 240u8, 61u8, 41u8, 117u8, 63u8, 189u8, 90u8, 194u8, 9u8, 186u8, 184u8, 138u8, + 153u8, 179u8, 150u8, 188u8, 194u8, 92u8, 62u8, 114u8, 83u8, 13u8, 2u8, 200u8, 26u8, + 234u8, 77u8, 50u8, 74u8, 179u8, 215u8, 66u8, + ], + [ + 245u8, 127u8, 67u8, 62u8, 185u8, 73u8, 60u8, 244u8, 217u8, 203u8, 87u8, 99u8, + 193u8, 34u8, 33u8, 217u8, 176u8, 149u8, 128u8, 70u8, 68u8, 212u8, 238u8, 0u8, + 106u8, 120u8, 199u8, 32u8, 118u8, 207u8, 249u8, 71u8, + ], + ]; + } + #[automatically_derived] + impl alloy_sol_types::SolEventInterface for EventEmitterEvents { + const NAME: &'static str = "EventEmitterEvents"; + const COUNT: usize = 12usize; + fn decode_raw_log( + topics: &[alloy_sol_types::Word], + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + match topics.first().copied() { + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::noIOneD) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::noITwoD) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::noIndexed) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::oneData) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::oneIOneD) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::oneITwoD) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log( + topics, data, validate, + ) + .map(Self::oneIndexed) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log( + topics, data, validate, + ) + .map(Self::threeIndexed) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::twoData) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::twoIOneD) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log(topics, data, validate) + .map(Self::twoITwoD) + } + Some(::SIGNATURE_HASH) => { + ::decode_raw_log( + topics, data, validate, + ) + .map(Self::twoIndexed) + } + _ => alloy_sol_types::private::Err(alloy_sol_types::Error::InvalidLog { + name: ::NAME, + log: alloy_sol_types::private::Box::new( + alloy_sol_types::private::LogData::new_unchecked( + topics.to_vec(), + data.to_vec().into(), + ), + ), + }), + } + } + } + #[automatically_derived] + impl alloy_sol_types::private::IntoLogData for EventEmitterEvents { + fn to_log_data(&self) -> alloy_sol_types::private::LogData { + match self { + Self::noIOneD(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::noITwoD(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::noIndexed(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::oneData(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::oneIOneD(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::oneITwoD(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::oneIndexed(inner) => { + alloy_sol_types::private::IntoLogData::to_log_data(inner) + } + Self::threeIndexed(inner) => { + alloy_sol_types::private::IntoLogData::to_log_data(inner) + } + Self::twoData(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::twoIOneD(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::twoITwoD(inner) => alloy_sol_types::private::IntoLogData::to_log_data(inner), + Self::twoIndexed(inner) => { + alloy_sol_types::private::IntoLogData::to_log_data(inner) + } + } + } + fn into_log_data(self) -> alloy_sol_types::private::LogData { + match self { + Self::noIOneD(inner) => alloy_sol_types::private::IntoLogData::into_log_data(inner), + Self::noITwoD(inner) => alloy_sol_types::private::IntoLogData::into_log_data(inner), + Self::noIndexed(inner) => { + alloy_sol_types::private::IntoLogData::into_log_data(inner) + } + Self::oneData(inner) => alloy_sol_types::private::IntoLogData::into_log_data(inner), + Self::oneIOneD(inner) => { + alloy_sol_types::private::IntoLogData::into_log_data(inner) + } + Self::oneITwoD(inner) => { + alloy_sol_types::private::IntoLogData::into_log_data(inner) + } + Self::oneIndexed(inner) => { + alloy_sol_types::private::IntoLogData::into_log_data(inner) + } + Self::threeIndexed(inner) => { + alloy_sol_types::private::IntoLogData::into_log_data(inner) + } + Self::twoData(inner) => alloy_sol_types::private::IntoLogData::into_log_data(inner), + Self::twoIOneD(inner) => { + alloy_sol_types::private::IntoLogData::into_log_data(inner) + } + Self::twoITwoD(inner) => { + alloy_sol_types::private::IntoLogData::into_log_data(inner) + } + Self::twoIndexed(inner) => { + alloy_sol_types::private::IntoLogData::into_log_data(inner) + } + } + } + } + use alloy::contract as alloy_contract; + /**Creates a new wrapper around an on-chain [`EventEmitter`](self) contract instance. + + See the [wrapper's documentation](`EventEmitterInstance`) for more details.*/ + #[inline] + pub const fn new< + T: alloy_contract::private::Transport + ::core::clone::Clone, + P: alloy_contract::private::Provider, + N: alloy_contract::private::Network, + >( + address: alloy_sol_types::private::Address, + provider: P, + ) -> EventEmitterInstance { + EventEmitterInstance::::new(address, provider) + } + /**Deploys this contract using the given `provider` and constructor arguments, if any. + + Returns a new instance of the contract, if the deployment was successful. + + For more fine-grained control over the deployment process, use [`deploy_builder`] instead.*/ + #[inline] + pub fn deploy< + T: alloy_contract::private::Transport + ::core::clone::Clone, + P: alloy_contract::private::Provider, + N: alloy_contract::private::Network, + >( + provider: P, + ) -> impl ::core::future::Future>> + { + EventEmitterInstance::::deploy(provider) + } + /**Creates a `RawCallBuilder` for deploying this contract using the given `provider` + and constructor arguments, if any. + + This is a simple wrapper around creating a `RawCallBuilder` with the data set to + the bytecode concatenated with the constructor's ABI-encoded arguments.*/ + #[inline] + pub fn deploy_builder< + T: alloy_contract::private::Transport + ::core::clone::Clone, + P: alloy_contract::private::Provider, + N: alloy_contract::private::Network, + >( + provider: P, + ) -> alloy_contract::RawCallBuilder { + EventEmitterInstance::::deploy_builder(provider) + } + /**A [`EventEmitter`](self) instance. + + Contains type-safe methods for interacting with an on-chain instance of the + [`EventEmitter`](self) contract located at a given `address`, using a given + provider `P`. + + If the contract bytecode is available (see the [`sol!`](alloy_sol_types::sol!) + documentation on how to provide it), the `deploy` and `deploy_builder` methods can + be used to deploy a new instance of the contract. + + See the [module-level documentation](self) for all the available methods.*/ + #[derive(Clone)] + pub struct EventEmitterInstance { + address: alloy_sol_types::private::Address, + provider: P, + _network_transport: ::core::marker::PhantomData<(N, T)>, + } + #[automatically_derived] + impl ::core::fmt::Debug for EventEmitterInstance { + #[inline] + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + f.debug_tuple("EventEmitterInstance") + .field(&self.address) + .finish() + } + } + /// Instantiation and getters/setters. + #[automatically_derived] + impl< + T: alloy_contract::private::Transport + ::core::clone::Clone, + P: alloy_contract::private::Provider, + N: alloy_contract::private::Network, + > EventEmitterInstance + { + /**Creates a new wrapper around an on-chain [`EventEmitter`](self) contract instance. + + See the [wrapper's documentation](`EventEmitterInstance`) for more details.*/ + #[inline] + pub const fn new(address: alloy_sol_types::private::Address, provider: P) -> Self { + Self { + address, + provider, + _network_transport: ::core::marker::PhantomData, + } + } + /**Deploys this contract using the given `provider` and constructor arguments, if any. + + Returns a new instance of the contract, if the deployment was successful. + + For more fine-grained control over the deployment process, use [`deploy_builder`] instead.*/ + #[inline] + pub async fn deploy(provider: P) -> alloy_contract::Result> { + let call_builder = Self::deploy_builder(provider); + let contract_address = call_builder.deploy().await?; + Ok(Self::new(contract_address, call_builder.provider)) + } + /**Creates a `RawCallBuilder` for deploying this contract using the given `provider` + and constructor arguments, if any. + + This is a simple wrapper around creating a `RawCallBuilder` with the data set to + the bytecode concatenated with the constructor's ABI-encoded arguments.*/ + #[inline] + pub fn deploy_builder(provider: P) -> alloy_contract::RawCallBuilder { + alloy_contract::RawCallBuilder::new_raw_deploy( + provider, + ::core::clone::Clone::clone(&BYTECODE), + ) + } + /// Returns a reference to the address. + #[inline] + pub const fn address(&self) -> &alloy_sol_types::private::Address { + &self.address + } + /// Sets the address. + #[inline] + pub fn set_address(&mut self, address: alloy_sol_types::private::Address) { + self.address = address; + } + /// Sets the address and returns `self`. + pub fn at(mut self, address: alloy_sol_types::private::Address) -> Self { + self.set_address(address); + self + } + /// Returns a reference to the provider. + #[inline] + pub const fn provider(&self) -> &P { + &self.provider + } + } + impl EventEmitterInstance { + /// Clones the provider and returns a new instance with the cloned provider. + #[inline] + pub fn with_cloned_provider(self) -> EventEmitterInstance { + EventEmitterInstance { + address: self.address, + provider: ::core::clone::Clone::clone(&self.provider), + _network_transport: ::core::marker::PhantomData, + } + } + } + /// Function calls. + #[automatically_derived] + impl< + T: alloy_contract::private::Transport + ::core::clone::Clone, + P: alloy_contract::private::Provider, + N: alloy_contract::private::Network, + > EventEmitterInstance + { + /// Creates a new call builder using this contract instance's provider and address. + /// + /// Note that the call can be any function call, not just those defined in this + /// contract. Prefer using the other methods for building type-safe contract calls. + pub fn call_builder( + &self, + call: &C, + ) -> alloy_contract::SolCallBuilder { + alloy_contract::SolCallBuilder::new_sol(&self.provider, &self.address, call) + } + ///Creates a new call builder for the [`increment`] function. + pub fn increment(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&incrementCall {}) + } + ///Creates a new call builder for the [`number`] function. + pub fn number(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&numberCall {}) + } + ///Creates a new call builder for the [`testNoIOneD`] function. + pub fn testNoIOneD(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testNoIOneDCall {}) + } + ///Creates a new call builder for the [`testNoITwoD`] function. + pub fn testNoITwoD(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testNoITwoDCall {}) + } + ///Creates a new call builder for the [`testNoIndexed`] function. + pub fn testNoIndexed(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testNoIndexedCall {}) + } + ///Creates a new call builder for the [`testOneData`] function. + pub fn testOneData(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testOneDataCall {}) + } + ///Creates a new call builder for the [`testOneIOneD`] function. + pub fn testOneIOneD(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testOneIOneDCall {}) + } + ///Creates a new call builder for the [`testOneITwoD`] function. + pub fn testOneITwoD(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testOneITwoDCall {}) + } + ///Creates a new call builder for the [`testOneIndexed`] function. + pub fn testOneIndexed( + &self, + ) -> alloy_contract::SolCallBuilder { + self.call_builder(&testOneIndexedCall {}) + } + ///Creates a new call builder for the [`testThreeIndexed`] function. + pub fn testThreeIndexed( + &self, + ) -> alloy_contract::SolCallBuilder { + self.call_builder(&testThreeIndexedCall {}) + } + ///Creates a new call builder for the [`testTwoData`] function. + pub fn testTwoData(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testTwoDataCall {}) + } + ///Creates a new call builder for the [`testTwoIOneD`] function. + pub fn testTwoIOneD(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testTwoIOneDCall {}) + } + ///Creates a new call builder for the [`testTwoITwoD`] function. + pub fn testTwoITwoD(&self) -> alloy_contract::SolCallBuilder { + self.call_builder(&testTwoITwoDCall {}) + } + ///Creates a new call builder for the [`testTwoIndexed`] function. + pub fn testTwoIndexed( + &self, + ) -> alloy_contract::SolCallBuilder { + self.call_builder(&testTwoIndexedCall {}) + } + } + /// Event filters. + #[automatically_derived] + impl< + T: alloy_contract::private::Transport + ::core::clone::Clone, + P: alloy_contract::private::Provider, + N: alloy_contract::private::Network, + > EventEmitterInstance + { + /// Creates a new event filter using this contract instance's provider and address. + /// + /// Note that the type can be any event, not just those defined in this contract. + /// Prefer using the other methods for building type-safe event filters. + pub fn event_filter( + &self, + ) -> alloy_contract::Event { + alloy_contract::Event::new_sol(&self.provider, &self.address) + } + ///Creates a new event filter for the [`noIOneD`] event. + pub fn noIOneD_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`noITwoD`] event. + pub fn noITwoD_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`noIndexed`] event. + pub fn noIndexed_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`oneData`] event. + pub fn oneData_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`oneIOneD`] event. + pub fn oneIOneD_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`oneITwoD`] event. + pub fn oneITwoD_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`oneIndexed`] event. + pub fn oneIndexed_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`threeIndexed`] event. + pub fn threeIndexed_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`twoData`] event. + pub fn twoData_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`twoIOneD`] event. + pub fn twoIOneD_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`twoITwoD`] event. + pub fn twoITwoD_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + ///Creates a new event filter for the [`twoIndexed`] event. + pub fn twoIndexed_filter(&self) -> alloy_contract::Event { + self.event_filter::() + } + } +} diff --git a/mp2-v1/tests/common/bindings/mod.rs b/mp2-v1/tests/common/bindings/mod.rs index c8e26af41..6539eb0db 100644 --- a/mp2-v1/tests/common/bindings/mod.rs +++ b/mp2-v1/tests/common/bindings/mod.rs @@ -3,4 +3,5 @@ //! This is autogenerated code. //! Do not manually edit these files. //! These files may be overwritten by the codegen system at any time. +pub mod eventemitter; pub mod simple; diff --git a/mp2-v1/tests/common/bindings/simple.rs b/mp2-v1/tests/common/bindings/simple.rs index 1b1bcae41..e6c1de19b 100644 --- a/mp2-v1/tests/common/bindings/simple.rs +++ b/mp2-v1/tests/common/bindings/simple.rs @@ -590,7 +590,13 @@ interface Simple { } ] ```*/ -#[allow(non_camel_case_types, non_snake_case, clippy::style)] +#[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style, + clippy::empty_structs_with_brackets +)] pub mod Simple { use super::*; use alloy::sol_types as alloy_sol_types; @@ -614,7 +620,7 @@ pub mod Simple { pub static DEPLOYED_BYTECODE: alloy_sol_types::private::Bytes = alloy_sol_types::private::Bytes::from_static( b"`\x80`@R4\x80\x15a\0\x0FW_\x80\xFD[P`\x046\x10a\x01=W_5`\xE0\x1C\x80c\x88\xDF\xDD\xC6\x11a\0\xB4W\x80c\xC7\xBFM\xB5\x11a\0yW\x80c\xC7\xBFM\xB5\x14a\x03yW\x80c\xC8\xAF:\xA6\x14a\x03\x8CW\x80c\xD1^\xC8Q\x14a\x03\x9FW\x80c\xEA\xD1\x84\0\x14a\x03\xE1W\x80c\xF2]T\xF5\x14a\x04\x03W\x80c\xFBXl}\x14a\x04\x16W_\x80\xFD[\x80c\x88\xDF\xDD\xC6\x14a\x02\xE4W\x80c\x96\xDC\x9AA\x14a\x03\x1EW\x80c\xA3\x14\x15\x0F\x14a\x03HW\x80c\xA5\xD6f\xA9\x14a\x03QW\x80c\xC6\xA7\xF0\xFE\x14a\x03fW_\x80\xFD[\x80c.\xB5\xCF\xD8\x11a\x01\x05W\x80c.\xB5\xCF\xD8\x14a\x01\xEEW\x80cL\xF5\xA9J\x14a\x02\x01W\x80ci\x87\xB1\xFB\x14a\x02*W\x80cl\xC0\x14\xDE\x14a\x02KW\x80c\x80&\xDE1\x14a\x02gW\x80c\x85\xB6H\x9F\x14a\x02zW_\x80\xFD[\x80c\x02\0\"\\\x14a\x01AW\x80c\x0C\x16\x16\xC9\x14a\x01VW\x80c\x14\x17\xA4\xF0\x14a\x01iW\x80c\x1C\x13C\x15\x14a\x01\x96W\x80c*\xE4&\x86\x14a\x01\xA9W[_\x80\xFD[a\x01Ta\x01O6`\x04a\x0C\xE8V[a\x04)V[\0[a\x01Ta\x01d6`\x04a\r\xD8V[a\x04mV[a\x01Ta\x01w6`\x04a\x0E\xB8V[`\x06\x92\x90\x92U`\x01`\x01`\x80\x1B\x03\x91\x82\x16`\x01`\x80\x1B\x02\x91\x16\x17`\x07UV[a\x01Ta\x01\xA46`\x04a\x0E\xF1V[a\x05\xAEV[a\x01\xD1a\x01\xB76`\x04a\x0F\x1BV[`\x04` R_\x90\x81R`@\x90 T`\x01`\x01`\xA0\x1B\x03\x16\x81V[`@Q`\x01`\x01`\xA0\x1B\x03\x90\x91\x16\x81R` \x01[`@Q\x80\x91\x03\x90\xF3[a\x01Ta\x01\xFC6`\x04a\x0F2V[a\x05\xDBV[a\x01Ta\x02\x0F6`\x04a\x0F\xFDV[_\x92\x83R`\t` \x90\x81R`@\x80\x85 \x93\x85R\x92\x90R\x91 UV[a\x02=a\x0286`\x04a\x0F\x1BV[a\x07kV[`@Q\x90\x81R` \x01a\x01\xE5V[_Ta\x02W\x90`\xFF\x16\x81V[`@Q\x90\x15\x15\x81R` \x01a\x01\xE5V[a\x01Ta\x02u6`\x04a\x10&V[a\x07\x8AV[a\x02\xBFa\x02\x886`\x04a\x10^V[`\n` \x90\x81R_\x92\x83R`@\x80\x84 \x90\x91R\x90\x82R\x90 \x80T`\x01\x90\x91\x01T`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[`@\x80Q\x93\x84R`\x01`\x01`\x80\x1B\x03\x92\x83\x16` \x85\x01R\x91\x16\x90\x82\x01R``\x01a\x01\xE5V[a\x02\xBFa\x02\xF26`\x04a\x0F\x1BV[`\x08` R_\x90\x81R`@\x90 \x80T`\x01\x90\x91\x01T`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[a\x02=a\x03,6`\x04a\x10^V[`\t` \x90\x81R_\x92\x83R`@\x80\x84 \x90\x91R\x90\x82R\x90 T\x81V[a\x02=`\x01T\x81V[a\x03Ya\x07\xDCV[`@Qa\x01\xE5\x91\x90a\x10~V[a\x01Ta\x03t6`\x04a\x10\xCAV[a\x08hV[a\x01Ta\x03\x876`\x04a\x11\x16V[a\x08\xC3V[`\x03Ta\x01\xD1\x90`\x01`\x01`\xA0\x1B\x03\x16\x81V[a\x01Ta\x03\xAD6`\x04a\x0F\x1BV[`\x05\x80T`\x01\x81\x01\x82U_\x91\x90\x91R\x7F\x03kc\x84\xB5\xEC\xA7\x91\xC6'a\x15-\x0Cy\xBB\x06\x04\xC1\x04\xA5\xFBoN\xB0p?1T\xBB=\xB0\x01UV[`\x06T`\x07Ta\x02\xBF\x91\x90`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[a\x01Ta\x04\x116`\x04a\x0F\x1BV[`\x01UV[a\x01Ta\x04$6`\x04a\x11\xEAV[a\n7V[_\x80T`\xFF\x19\x16\x85\x15\x15\x17\x90U`\x01\x83\x90U`\x02a\x04G\x83\x82a\x13LV[P`\x03\x80T`\x01`\x01`\xA0\x1B\x03\x19\x16`\x01`\x01`\xA0\x1B\x03\x92\x90\x92\x16\x91\x90\x91\x17\x90UPPPV[_[\x81Q\x81\x10\x15a\x05\xAAW_\x82\x82\x81Q\x81\x10a\x04\x8BWa\x04\x8Ba\x14 V[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x04\xA8Wa\x04\xA8a\x14\x0CV[\x03a\x04\xEFW`\x04_\x83\x83\x81Q\x81\x10a\x04\xC2Wa\x04\xC2a\x14 V[` \x90\x81\x02\x91\x90\x91\x01\x81\x01QQ\x82R\x81\x01\x91\x90\x91R`@\x01_ \x80T`\x01`\x01`\xA0\x1B\x03\x19\x16\x90Ua\x05\xA2V[`\x02\x82\x82\x81Q\x81\x10a\x05\x03Wa\x05\x03a\x14 V[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x05 Wa\x05 a\x14\x0CV[\x14\x80a\x05ZWP`\x01\x82\x82\x81Q\x81\x10a\x05;Wa\x05;a\x14 V[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x05XWa\x05Xa\x14\x0CV[\x14[\x15a\x05\xA2Wa\x05\xA2\x82\x82\x81Q\x81\x10a\x05tWa\x05ta\x14 V[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x05\x91Wa\x05\x91a\x14 V[` \x02` \x01\x01Q` \x01Qa\x05\xAEV[`\x01\x01a\x04oV[PPV[_\x91\x82R`\x04` R`@\x90\x91 \x80T`\x01`\x01`\xA0\x1B\x03\x19\x16`\x01`\x01`\xA0\x1B\x03\x90\x92\x16\x91\x90\x91\x17\x90UV[_[\x81Q\x81\x10\x15a\x05\xAAW_\x82\x82\x81Q\x81\x10a\x05\xF9Wa\x05\xF9a\x14 V[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x06\x16Wa\x06\x16a\x14\x0CV[\x03a\x06|W`\t_\x83\x83\x81Q\x81\x10a\x060Wa\x060a\x14 V[` \x02` \x01\x01Q_\x01Q\x81R` \x01\x90\x81R` \x01_ _\x83\x83\x81Q\x81\x10a\x06[Wa\x06[a\x14 V[` \x02` \x01\x01Q` \x01Q\x81R` \x01\x90\x81R` \x01_ _\x90Ua\x07cV[`\x02\x82\x82\x81Q\x81\x10a\x06\x90Wa\x06\x90a\x14 V[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x06\xADWa\x06\xADa\x14\x0CV[\x14\x80a\x06\xE7WP`\x01\x82\x82\x81Q\x81\x10a\x06\xC8Wa\x06\xC8a\x14 V[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x06\xE5Wa\x06\xE5a\x14\x0CV[\x14[\x15a\x07cWa\x07c\x82\x82\x81Q\x81\x10a\x07\x01Wa\x07\x01a\x14 V[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x07\x1EWa\x07\x1Ea\x14 V[` \x02` \x01\x01Q` \x01Q\x84\x84\x81Q\x81\x10a\x07\x986\x0C\x90?h\xC0\xDC\x0B$sC\xEEs\xBF\xF76\x8B\x1C.\xF1A-l\xB2\xDCdsolcC\0\x08\x18\x003", ); - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct MappingOperation(u8); const _: () = { @@ -726,14 +732,19 @@ pub mod Simple { /**```solidity struct MappingChange { uint256 key; address value; MappingOperation operation; } ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct MappingChange { - pub key: alloy::sol_types::private::U256, + pub key: alloy::sol_types::private::primitives::aliases::U256, pub value: alloy::sol_types::private::Address, pub operation: ::RustType, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; #[doc(hidden)] @@ -744,7 +755,7 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, alloy::sol_types::private::Address, ::RustType, ); @@ -1696,23 +1707,28 @@ pub mod Simple { ```solidity function addToArray(uint256 value) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct addToArrayCall { - pub value: alloy::sol_types::private::U256, + pub value: alloy::sol_types::private::primitives::aliases::U256, } ///Container type for the return parameters of the [`addToArray(uint256)`](addToArrayCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct addToArrayReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256,); + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -1805,25 +1821,30 @@ pub mod Simple { ```solidity function arr1(uint256) external view returns (uint256); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct arr1Call { - pub _0: alloy::sol_types::private::U256, + pub _0: alloy::sol_types::private::primitives::aliases::U256, } ///Container type for the return parameters of the [`arr1(uint256)`](arr1Call) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct arr1Return { - pub _0: alloy::sol_types::private::U256, + pub _0: alloy::sol_types::private::primitives::aliases::U256, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256,); + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -1852,7 +1873,7 @@ pub mod Simple { #[doc(hidden)] type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256,); + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -1916,17 +1937,22 @@ pub mod Simple { ```solidity function changeMapping(MappingChange[] memory changes) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct changeMappingCall { pub changes: alloy::sol_types::private::Vec<::RustType>, } ///Container type for the return parameters of the [`changeMapping((uint256,address,uint8)[])`](changeMappingCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct changeMappingReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -2382,25 +2408,30 @@ pub mod Simple { ```solidity function m1(uint256) external view returns (address); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct m1Call { - pub _0: alloy::sol_types::private::U256, + pub _0: alloy::sol_types::private::primitives::aliases::U256, } ///Container type for the return parameters of the [`m1(uint256)`](m1Call) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct m1Return { pub _0: alloy::sol_types::private::Address, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256,); + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -2761,16 +2792,21 @@ pub mod Simple { ```solidity function s1() external view returns (bool); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct s1Call {} ///Container type for the return parameters of the [`s1()`](s1Call) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct s1Return { pub _0: bool, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -2866,16 +2902,21 @@ pub mod Simple { ```solidity function s2() external view returns (uint256); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct s2Call {} ///Container type for the return parameters of the [`s2()`](s2Call) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct s2Return { - pub _0: alloy::sol_types::private::U256, + pub _0: alloy::sol_types::private::primitives::aliases::U256, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -2911,7 +2952,7 @@ pub mod Simple { #[doc(hidden)] type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256,); + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -2971,16 +3012,21 @@ pub mod Simple { ```solidity function s3() external view returns (string memory); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct s3Call {} ///Container type for the return parameters of the [`s3()`](s3Call) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct s3Return { pub _0: alloy::sol_types::private::String, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -3076,16 +3122,21 @@ pub mod Simple { ```solidity function s4() external view returns (address); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct s4Call {} ///Container type for the return parameters of the [`s4()`](s4Call) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct s4Return { pub _0: alloy::sol_types::private::Address, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -3181,17 +3232,22 @@ pub mod Simple { ```solidity function setMapping(uint256 key, address value) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setMappingCall { - pub key: alloy::sol_types::private::U256, + pub key: alloy::sol_types::private::primitives::aliases::U256, pub value: alloy::sol_types::private::Address, } ///Container type for the return parameters of the [`setMapping(uint256,address)`](setMappingCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setMappingReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -3202,7 +3258,7 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, alloy::sol_types::private::Address, ); #[cfg(test)] @@ -3737,23 +3793,28 @@ pub mod Simple { ```solidity function setS2(uint256 newS2) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setS2Call { - pub newS2: alloy::sol_types::private::U256, + pub newS2: alloy::sol_types::private::primitives::aliases::U256, } ///Container type for the return parameters of the [`setS2(uint256)`](setS2Call) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setS2Return {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256,); + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -3975,19 +4036,24 @@ pub mod Simple { ```solidity function setSimples(bool newS1, uint256 newS2, string memory newS3, address newS4) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setSimplesCall { pub newS1: bool, - pub newS2: alloy::sol_types::private::U256, + pub newS2: alloy::sol_types::private::primitives::aliases::U256, pub newS3: alloy::sol_types::private::String, pub newS4: alloy::sol_types::private::Address, } ///Container type for the return parameters of the [`setSimples(bool,uint256,string,address)`](setSimplesCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setSimplesReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -4001,7 +4067,7 @@ pub mod Simple { #[doc(hidden)] type UnderlyingRustTuple<'a> = ( bool, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, alloy::sol_types::private::String, alloy::sol_types::private::Address, ); @@ -4473,7 +4539,7 @@ pub mod Simple { Self::SELECTORS.binary_search(&selector).is_ok() } #[inline] - #[allow(unsafe_code, non_snake_case)] + #[allow(non_snake_case)] fn abi_decode_raw( selector: [u8; 4], data: &[u8], @@ -4715,7 +4781,7 @@ pub mod Simple { selector, )); }; - (unsafe { DECODE_SHIMS.get_unchecked(idx) })(data, validate) + DECODE_SHIMS[idx](data, validate) } #[inline] fn abi_encoded_size(&self) -> usize { @@ -5095,14 +5161,14 @@ pub mod Simple { ///Creates a new call builder for the [`addToArray`] function. pub fn addToArray( &self, - value: alloy::sol_types::private::U256, + value: alloy::sol_types::private::primitives::aliases::U256, ) -> alloy_contract::SolCallBuilder { self.call_builder(&addToArrayCall { value }) } ///Creates a new call builder for the [`arr1`] function. pub fn arr1( &self, - _0: alloy::sol_types::private::U256, + _0: alloy::sol_types::private::primitives::aliases::U256, ) -> alloy_contract::SolCallBuilder { self.call_builder(&arr1Call { _0 }) } @@ -5146,7 +5212,7 @@ pub mod Simple { ///Creates a new call builder for the [`m1`] function. pub fn m1( &self, - _0: alloy::sol_types::private::U256, + _0: alloy::sol_types::private::primitives::aliases::U256, ) -> alloy_contract::SolCallBuilder { self.call_builder(&m1Call { _0 }) } @@ -5185,7 +5251,7 @@ pub mod Simple { ///Creates a new call builder for the [`setMapping`] function. pub fn setMapping( &self, - key: alloy::sol_types::private::U256, + key: alloy::sol_types::private::primitives::aliases::U256, value: alloy::sol_types::private::Address, ) -> alloy_contract::SolCallBuilder { self.call_builder(&setMappingCall { key, value }) @@ -5238,7 +5304,7 @@ pub mod Simple { ///Creates a new call builder for the [`setS2`] function. pub fn setS2( &self, - newS2: alloy::sol_types::private::U256, + newS2: alloy::sol_types::private::primitives::aliases::U256, ) -> alloy_contract::SolCallBuilder { self.call_builder(&setS2Call { newS2 }) } @@ -5259,7 +5325,7 @@ pub mod Simple { pub fn setSimples( &self, newS1: bool, - newS2: alloy::sol_types::private::U256, + newS2: alloy::sol_types::private::primitives::aliases::U256, newS3: alloy::sol_types::private::String, newS4: alloy::sol_types::private::Address, ) -> alloy_contract::SolCallBuilder { From 1cb06af2c88f6a91d63f08568ca2c5c9e97b77ab Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 19 Dec 2024 12:37:27 +0000 Subject: [PATCH 18/47] Changed TableSource to be a trait --- Cargo.toml | 2 +- mp2-common/src/eth.rs | 4 +- mp2-v1/src/api.rs | 3 +- mp2-v1/src/values_extraction/api.rs | 22 +- mp2-v1/src/values_extraction/leaf_receipt.rs | 7 +- mp2-v1/src/values_extraction/mod.rs | 6 +- mp2-v1/test-contracts/src/Simple.sol | 162 ++- mp2-v1/tests/common/bindings/simple.rs | 1039 +++++++++++------ mp2-v1/tests/common/cases/contract.rs | 315 ++--- mp2-v1/tests/common/cases/indexing.rs | 184 +-- mp2-v1/tests/common/cases/mod.rs | 6 +- .../common/cases/query/aggregated_queries.rs | 49 +- mp2-v1/tests/common/cases/query/mod.rs | 28 +- .../cases/query/simple_select_queries.rs | 53 +- mp2-v1/tests/common/cases/slot_info.rs | 377 +++++- mp2-v1/tests/common/cases/table_source.rs | 829 +++++++------ mp2-v1/tests/common/context.rs | 2 +- mp2-v1/tests/common/mod.rs | 129 +- mp2-v1/tests/integrated_tests.rs | 54 +- 19 files changed, 1978 insertions(+), 1293 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 952415d9a..fab40aaec 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -76,7 +76,7 @@ rand_chacha = "0.3.1" revm = { version = "3.5", default-features = false } rlp = "0.5" rstest = "0.23" -serde = { version = "1.0", features = ["derive"] } +serde = { version = "1.0", features = ["derive", "std"] } serde_json = "1.0" serial_test = "3.0" sha2 = "0.10" diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index e2c264ce4..1292de7de 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -4,7 +4,7 @@ use alloy::{ consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom}, eips::BlockNumberOrTag, network::{eip2718::Encodable2718, BlockResponse}, - primitives::{Address, B256}, + primitives::{Address, B256, U256}, providers::{Provider, RootProvider}, rlp::{Decodable, Encodable as AlloyEncodable}, rpc::types::{ @@ -157,7 +157,7 @@ pub struct ReceiptProofInfo { } /// Contains all the information for an [`Event`] in rlp form -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, Serialize, Deserialize, Hash, PartialEq, Eq)] pub struct EventLogInfo { /// Size in bytes of the whole log rlp encoded pub size: usize, diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 4af635a3f..fc95241a7 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -101,7 +101,8 @@ impl /// Instantiate the circuits employed for the pre-processing stage of LPN, /// returning their corresponding parameters -pub fn build_circuits_params() -> PublicParameters { +pub fn build_circuits_params( +) -> PublicParameters { log::info!("Building contract_extraction parameters..."); let contract_extraction = contract_extraction::build_circuits_params(); log::info!("Building length_extraction parameters..."); diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 8639474eb..268f1c10e 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -57,7 +57,8 @@ pub enum CircuitInput< LeafSingle(LeafSingleCircuit), LeafMapping(LeafMappingCircuit), LeafMappingOfMappings(LeafMappingOfMappingsCircuit), - LeafReceipt(ReceiptLeafCircuit), + LeafReceipt(ReceiptLeafCircuit), + Extension(ExtensionInput), Branch(BranchInput), } @@ -144,7 +145,8 @@ where query: &ReceiptQuery, ) -> Self { CircuitInput::LeafReceipt( - ReceiptLeafCircuit::new(info, query).expect("Could not construct Receipt Leaf Circuit"), + ReceiptLeafCircuit::::new::(info, query) + .expect("Could not construct Receipt Leaf Circuit"), ) } @@ -198,7 +200,7 @@ pub struct PublicParameters< 0, LeafMappingOfMappingsWires, >, - leaf_receipt: CircuitWithUniversalVerifier, + leaf_receipt: CircuitWithUniversalVerifier>, extension: CircuitWithUniversalVerifier, #[cfg(not(test))] branches: BranchCircuits, @@ -429,8 +431,7 @@ where >(()); debug!("Building leaf receipt circuit"); - let leaf_receipt = - circuit_builder.build_circuit::>(()); + let leaf_receipt = circuit_builder.build_circuit::>(()); debug!("Building extension circuit"); let extension = circuit_builder.build_circuit::(()); @@ -955,7 +956,7 @@ mod tests { // The branch case for receipts is identical to that of a mapping so we use the same api. println!("Proving branch..."); - let branch_input = CircuitInput::new_mapping_variable_branch( + let branch_input = CircuitInput::new_branch( second_info.mpt_proof[proof_length_1 - 2].clone(), vec![leaf_proof1, leaf_proof2], ); @@ -968,15 +969,6 @@ mod tests { ); } - fn test_circuits(is_simple_aggregation: bool, num_children: usize) { - let contract_address = Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(); - let chain_id = 10; - let id = identifier_single_var_column(TEST_SLOT, &contract_address, chain_id, vec![]); - let key_id = - identifier_for_mapping_key_column(TEST_SLOT, &contract_address, chain_id, vec![]); - let value_id = - identifier_for_mapping_value_column(TEST_SLOT, &contract_address, chain_id, vec![]); - } /// Generate a leaf proof. fn prove_leaf(params: &PublicParameters, node: Vec, test_slot: StorageSlotInfo) -> Vec { // RLP(RLP(compact(partial_key_in_nibble)), RLP(value)) diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index cef724c25..2d815bc4a 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -672,10 +672,13 @@ where } /// Num of children = 0 -impl CircuitLogicWires for ReceiptLeafWires { +impl CircuitLogicWires for ReceiptLeafWires +where + [(); PAD_LEN(NODE_LEN)]:, +{ type CircuitBuilderParams = (); - type Inputs = ReceiptLeafCircuit; + type Inputs = ReceiptLeafCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index 2d68d7fa3..8692924ce 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -15,7 +15,7 @@ use mp2_common::{ eth::{left_pad32, EventLogInfo, ReceiptProofInfo, StorageSlot}, group_hashing::map_to_curve_point, poseidon::{empty_poseidon_hash, hash_to_int_value, H}, - types::{HashOutput, MAPPING_LEAF_VALUE_LEN}, + types::{GFp, HashOutput, MAPPING_LEAF_VALUE_LEN}, utils::{Endianness, Packer, ToFields}, F, }; @@ -26,7 +26,7 @@ use plonky2::{ }; use plonky2_ecgfp5::curve::{curve::Point as Digest, scalar_field::Scalar}; use serde::{Deserialize, Serialize}; -use std::iter::once; +use std::iter::{self, once}; pub mod api; mod branch; @@ -755,7 +755,7 @@ pub fn compute_receipt_leaf_value_digest\x986\x0C\x90?h\xC0\xDC\x0B$sC\xEEs\xBF\xF76\x8B\x1C.\xF1A-l\xB2\xDCdsolcC\0\x08\x18\x003", + b"`\x80`@R4\x80\x15a\0\x0FW_\x80\xFD[Pa\x14\x9A\x80a\0\x1D_9_\xF3\xFE`\x80`@R4\x80\x15a\0\x0FW_\x80\xFD[P`\x046\x10a\x01=W_5`\xE0\x1C\x80c\x80&\xDE1\x11a\0\xB4W\x80c\xA5\xD6f\xA9\x11a\0yW\x80c\xA5\xD6f\xA9\x14a\x03]W\x80c\xC6\xA7\xF0\xFE\x14a\x03rW\x80c\xC8\xAF:\xA6\x14a\x03\x85W\x80c\xD1^\xC8Q\x14a\x03\x98W\x80c\xEA\xD1\x84\0\x14a\x03\xDAW\x80c\xF2]T\xF5\x14a\x03\xFCW_\x80\xFD[\x80c\x80&\xDE1\x14a\x02sW\x80c\x85\xB6H\x9F\x14a\x02\x86W\x80c\x88\xDF\xDD\xC6\x14a\x02\xF0W\x80c\x96\xDC\x9AA\x14a\x03*W\x80c\xA3\x14\x15\x0F\x14a\x03TW_\x80\xFD[\x80c>p\x16n\x11a\x01\x05W\x80c>p\x16n\x14a\x01\xD4W\x80c>\x90`\xC7\x14a\x01\xE7W\x80cL\xF5\xA9J\x14a\x01\xFAW\x80cQ\x97o\xC8\x14a\x02#W\x80ci\x87\xB1\xFB\x14a\x026W\x80cl\xC0\x14\xDE\x14a\x02WW_\x80\xFD[\x80c\x02\0\"\\\x14a\x01AW\x80c\x02\xE3\0:\x14a\x01VW\x80c\x0C\x16\x16\xC9\x14a\x01iW\x80c\x1C\x13C\x15\x14a\x01|W\x80c*\xE4&\x86\x14a\x01\x8FW[_\x80\xFD[a\x01Ta\x01O6`\x04a\x0C\xE0V[a\x04\x0FV[\0[a\x01Ta\x01d6`\x04a\r\xE7V[a\x04SV[a\x01Ta\x01w6`\x04a\x0E\xDAV[a\x06\x16V[a\x01Ta\x01\x8A6`\x04a\x0F\x93V[a\x07SV[a\x01\xB7a\x01\x9D6`\x04a\x0F\xBDV[`\x04` R_\x90\x81R`@\x90 T`\x01`\x01`\xA0\x1B\x03\x16\x81V[`@Q`\x01`\x01`\xA0\x1B\x03\x90\x91\x16\x81R` \x01[`@Q\x80\x91\x03\x90\xF3[a\x01Ta\x01\xE26`\x04a\x0F\xD4V[a\x07\x80V[a\x01Ta\x01\xF56`\x04a\x0F\xEAV[a\x07\x92V[a\x01Ta\x02\x086`\x04a\x10\xB5V[_\x92\x83R`\t` \x90\x81R`@\x80\x85 \x93\x85R\x92\x90R\x91 UV[a\x01Ta\x0216`\x04a\x10\xDEV[a\t\"V[a\x02Ia\x02D6`\x04a\x0F\xBDV[a\n\x96V[`@Q\x90\x81R` \x01a\x01\xCBV[_Ta\x02c\x90`\xFF\x16\x81V[`@Q\x90\x15\x15\x81R` \x01a\x01\xCBV[a\x01Ta\x02\x816`\x04a\x11\xB6V[a\n\xB5V[a\x02\xCBa\x02\x946`\x04a\x11\xFDV[`\n` \x90\x81R_\x92\x83R`@\x80\x84 \x90\x91R\x90\x82R\x90 \x80T`\x01\x90\x91\x01T`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[`@\x80Q\x93\x84R`\x01`\x01`\x80\x1B\x03\x92\x83\x16` \x85\x01R\x91\x16\x90\x82\x01R``\x01a\x01\xCBV[a\x02\xCBa\x02\xFE6`\x04a\x0F\xBDV[`\x08` R_\x90\x81R`@\x90 \x80T`\x01\x90\x91\x01T`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[a\x02Ia\x0386`\x04a\x11\xFDV[`\t` \x90\x81R_\x92\x83R`@\x80\x84 \x90\x91R\x90\x82R\x90 T\x81V[a\x02I`\x01T\x81V[a\x03ea\x0B\x07V[`@Qa\x01\xCB\x91\x90a\x12\x1DV[a\x01Ta\x03\x806`\x04a\x12iV[a\x0B\x93V[`\x03Ta\x01\xB7\x90`\x01`\x01`\xA0\x1B\x03\x16\x81V[a\x01Ta\x03\xA66`\x04a\x0F\xBDV[`\x05\x80T`\x01\x81\x01\x82U_\x91\x90\x91R\x7F\x03kc\x84\xB5\xEC\xA7\x91\xC6'a\x15-\x0Cy\xBB\x06\x04\xC1\x04\xA5\xFBoN\xB0p?1T\xBB=\xB0\x01UV[`\x06T`\x07Ta\x02\xCB\x91\x90`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[a\x01Ta\x04\n6`\x04a\x0F\xBDV[`\x01UV[_\x80T`\xFF\x19\x16\x85\x15\x15\x17\x90U`\x01\x83\x90U`\x02a\x04-\x83\x82a\x138V[P`\x03\x80T`\x01`\x01`\xA0\x1B\x03\x19\x16`\x01`\x01`\xA0\x1B\x03\x92\x90\x92\x16\x91\x90\x91\x17\x90UPPPV[_[\x81Q\x81\x10\x15a\x06\x12W_\x82\x82\x81Q\x81\x10a\x04qWa\x04qa\x14\x0CV[` \x02` \x01\x01Q`\xA0\x01Q`\x02\x81\x11\x15a\x04\x8EWa\x04\x8Ea\x13\xF8V[\x03a\x04\xFDW`\n_\x83\x83\x81Q\x81\x10a\x04\xA8Wa\x04\xA8a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x81R` \x01\x90\x81R` \x01_ _\x83\x83\x81Q\x81\x10a\x04\xD3Wa\x04\xD3a\x14\x0CV[` \x90\x81\x02\x91\x90\x91\x01\x81\x01Q\x81\x01Q\x82R\x81\x01\x91\x90\x91R`@\x01_\x90\x81 \x81\x81U`\x01\x01Ua\x06\nV[`\x02\x82\x82\x81Q\x81\x10a\x05\x11Wa\x05\x11a\x14\x0CV[` \x02` \x01\x01Q`\xA0\x01Q`\x02\x81\x11\x15a\x05.Wa\x05.a\x13\xF8V[\x14\x80a\x05hWP`\x01\x82\x82\x81Q\x81\x10a\x05IWa\x05Ia\x14\x0CV[` \x02` \x01\x01Q`\xA0\x01Q`\x02\x81\x11\x15a\x05fWa\x05fa\x13\xF8V[\x14[\x15a\x06\nWa\x06\n\x82\x82\x81Q\x81\x10a\x05\x82Wa\x05\x82a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x05\x9FWa\x05\x9Fa\x14\x0CV[` \x02` \x01\x01Q` \x01Q\x84\x84\x81Q\x81\x10a\x05\xBDWa\x05\xBDa\x14\x0CV[` \x02` \x01\x01Q`@\x01Q\x85\x85\x81Q\x81\x10a\x05\xDBWa\x05\xDBa\x14\x0CV[` \x02` \x01\x01Q``\x01Q\x86\x86\x81Q\x81\x10a\x05\xF9Wa\x05\xF9a\x14\x0CV[` \x02` \x01\x01Q`\x80\x01Qa\x0B\x93V[`\x01\x01a\x04UV[PPV[_[\x81Q\x81\x10\x15a\x06\x12W_\x82\x82\x81Q\x81\x10a\x064Wa\x064a\x14\x0CV[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x06QWa\x06Qa\x13\xF8V[\x03a\x06\x98W`\x04_\x83\x83\x81Q\x81\x10a\x06kWa\x06ka\x14\x0CV[` \x90\x81\x02\x91\x90\x91\x01\x81\x01QQ\x82R\x81\x01\x91\x90\x91R`@\x01_ \x80T`\x01`\x01`\xA0\x1B\x03\x19\x16\x90Ua\x07KV[`\x02\x82\x82\x81Q\x81\x10a\x06\xACWa\x06\xACa\x14\x0CV[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x06\xC9Wa\x06\xC9a\x13\xF8V[\x14\x80a\x07\x03WP`\x01\x82\x82\x81Q\x81\x10a\x06\xE4Wa\x06\xE4a\x14\x0CV[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x07\x01Wa\x07\x01a\x13\xF8V[\x14[\x15a\x07KWa\x07K\x82\x82\x81Q\x81\x10a\x07\x1DWa\x07\x1Da\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x07:Wa\x07:a\x14\x0CV[` \x02` \x01\x01Q` \x01Qa\x07SV[`\x01\x01a\x06\x18V[_\x91\x82R`\x04` R`@\x90\x91 \x80T`\x01`\x01`\xA0\x1B\x03\x19\x16`\x01`\x01`\xA0\x1B\x03\x90\x92\x16\x91\x90\x91\x17\x90UV[\x80`\x06a\x07\x8D\x82\x82a\x14 V[PPPV[_[\x81Q\x81\x10\x15a\x06\x12W_\x82\x82\x81Q\x81\x10a\x07\xB0Wa\x07\xB0a\x14\x0CV[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x07\xCDWa\x07\xCDa\x13\xF8V[\x03a\x083W`\t_\x83\x83\x81Q\x81\x10a\x07\xE7Wa\x07\xE7a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x81R` \x01\x90\x81R` \x01_ _\x83\x83\x81Q\x81\x10a\x08\x12Wa\x08\x12a\x14\x0CV[` \x02` \x01\x01Q` \x01Q\x81R` \x01\x90\x81R` \x01_ _\x90Ua\t\x1AV[`\x02\x82\x82\x81Q\x81\x10a\x08GWa\x08Ga\x14\x0CV[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x08dWa\x08da\x13\xF8V[\x14\x80a\x08\x9EWP`\x01\x82\x82\x81Q\x81\x10a\x08\x7FWa\x08\x7Fa\x14\x0CV[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x08\x9CWa\x08\x9Ca\x13\xF8V[\x14[\x15a\t\x1AWa\t\x1A\x82\x82\x81Q\x81\x10a\x08\xB8Wa\x08\xB8a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x08\xD5Wa\x08\xD5a\x14\x0CV[` \x02` \x01\x01Q` \x01Q\x84\x84\x81Q\x81\x10a\x08\xF3Wa\x08\xF3a\x14\x0CV[` \x02` \x01\x01Q`@\x01Q_\x92\x83R`\t` \x90\x81R`@\x80\x85 \x93\x85R\x92\x90R\x91 UV[`\x01\x01a\x07\x94V[_[\x81Q\x81\x10\x15a\x06\x12W_\x82\x82\x81Q\x81\x10a\t@Wa\t@a\x14\x0CV[` \x02` \x01\x01Q`\x80\x01Q`\x02\x81\x11\x15a\t]Wa\t]a\x13\xF8V[\x03a\t\x9FW`\x08_\x83\x83\x81Q\x81\x10a\twWa\twa\x14\x0CV[` \x90\x81\x02\x91\x90\x91\x01\x81\x01QQ\x82R\x81\x01\x91\x90\x91R`@\x01_\x90\x81 \x81\x81U`\x01\x01Ua\n\x8EV[`\x02\x82\x82\x81Q\x81\x10a\t\xB3Wa\t\xB3a\x14\x0CV[` \x02` \x01\x01Q`\x80\x01Q`\x02\x81\x11\x15a\t\xD0Wa\t\xD0a\x13\xF8V[\x14\x80a\n\nWP`\x01\x82\x82\x81Q\x81\x10a\t\xEBWa\t\xEBa\x14\x0CV[` \x02` \x01\x01Q`\x80\x01Q`\x02\x81\x11\x15a\n\x08Wa\n\x08a\x13\xF8V[\x14[\x15a\n\x8EWa\n\x8E\x82\x82\x81Q\x81\x10a\n$Wa\n$a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\nAWa\nAa\x14\x0CV[` \x02` \x01\x01Q` \x01Q\x84\x84\x81Q\x81\x10a\n_Wa\n_a\x14\x0CV[` \x02` \x01\x01Q`@\x01Q\x85\x85\x81Q\x81\x10a\n}Wa\n}a\x14\x0CV[` \x02` \x01\x01Q``\x01Qa\n\xB5V[`\x01\x01a\t$V[`\x05\x81\x81T\x81\x10a\n\xA5W_\x80\xFD[_\x91\x82R` \x90\x91 \x01T\x90P\x81V[`@\x80Q``\x81\x01\x82R\x93\x84R`\x01`\x01`\x80\x1B\x03\x92\x83\x16` \x80\x86\x01\x91\x82R\x92\x84\x16\x85\x83\x01\x90\x81R_\x96\x87R`\x08\x90\x93R\x94 \x92Q\x83U\x92Q\x92Q\x81\x16`\x01`\x80\x1B\x02\x92\x16\x91\x90\x91\x17`\x01\x90\x91\x01UV[`\x02\x80Ta\x0B\x14\x90a\x12\xBBV[\x80`\x1F\x01` \x80\x91\x04\x02` \x01`@Q\x90\x81\x01`@R\x80\x92\x91\x90\x81\x81R` \x01\x82\x80Ta\x0B@\x90a\x12\xBBV[\x80\x15a\x0B\x8BW\x80`\x1F\x10a\x0BbWa\x01\0\x80\x83T\x04\x02\x83R\x91` \x01\x91a\x0B\x8BV[\x82\x01\x91\x90_R` _ \x90[\x81T\x81R\x90`\x01\x01\x90` \x01\x80\x83\x11a\x0BnW\x82\x90\x03`\x1F\x16\x82\x01\x91[PPPPP\x81V[`@\x80Q``\x81\x01\x82R\x93\x84R`\x01`\x01`\x80\x1B\x03\x92\x83\x16` \x80\x86\x01\x91\x82R\x92\x84\x16\x85\x83\x01\x90\x81R_\x97\x88R`\n\x84R\x82\x88 \x96\x88R\x95\x90\x92R\x90\x94 \x91Q\x82U\x92Q\x91Q\x83\x16`\x01`\x80\x1B\x02\x91\x90\x92\x16\x17`\x01\x90\x91\x01UV[cNH{q`\xE0\x1B_R`A`\x04R`$_\xFD[`@Q`\xC0\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C%Wa\x0C%a\x0B\xEEV[`@R\x90V[`@Q``\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C%Wa\x0C%a\x0B\xEEV[`@Q`\x80\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C%Wa\x0C%a\x0B\xEEV[`@Q`\xA0\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C%Wa\x0C%a\x0B\xEEV[`@Q`\x1F\x82\x01`\x1F\x19\x16\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C\xBDWa\x0C\xBDa\x0B\xEEV[`@R\x91\x90PV[\x805`\x01`\x01`\xA0\x1B\x03\x81\x16\x81\x14a\x0C\xDBW_\x80\xFD[\x91\x90PV[_\x80_\x80`\x80\x85\x87\x03\x12\x15a\x0C\xF3W_\x80\xFD[\x845\x80\x15\x15\x81\x14a\r\x02W_\x80\xFD[\x93P` \x85\x81\x015\x93P`@\x86\x015g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x80\x82\x11\x15a\r&W_\x80\xFD[\x81\x88\x01\x91P\x88`\x1F\x83\x01\x12a\r9W_\x80\xFD[\x815\x81\x81\x11\x15a\rKWa\rKa\x0B\xEEV[a\r]`\x1F\x82\x01`\x1F\x19\x16\x85\x01a\x0C\x94V[\x91P\x80\x82R\x89\x84\x82\x85\x01\x01\x11\x15a\rrW_\x80\xFD[\x80\x84\x84\x01\x85\x84\x017_\x84\x82\x84\x01\x01RP\x80\x94PPPPa\r\x94``\x86\x01a\x0C\xC5V[\x90P\x92\x95\x91\x94P\x92PV[_g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x82\x11\x15a\r\xB8Wa\r\xB8a\x0B\xEEV[P`\x05\x1B` \x01\x90V[`\x01`\x01`\x80\x1B\x03\x81\x16\x81\x14a\r\xD6W_\x80\xFD[PV[\x805`\x03\x81\x10a\x0C\xDBW_\x80\xFD[_` \x80\x83\x85\x03\x12\x15a\r\xF8W_\x80\xFD[\x825g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x0E\x0EW_\x80\xFD[\x83\x01`\x1F\x81\x01\x85\x13a\x0E\x1EW_\x80\xFD[\x805a\x0E1a\x0E,\x82a\r\x9FV[a\x0C\x94V[\x81\x81R`\xC0\x91\x82\x02\x83\x01\x84\x01\x91\x84\x82\x01\x91\x90\x88\x84\x11\x15a\x0EOW_\x80\xFD[\x93\x85\x01\x93[\x83\x85\x10\x15a\x0E\xCEW\x80\x85\x8A\x03\x12\x15a\x0EjW_\x80\xFD[a\x0Era\x0C\x02V[\x855\x81R\x86\x86\x015\x87\x82\x01R`@\x80\x87\x015\x90\x82\x01R``\x80\x87\x015a\x0E\x97\x81a\r\xC2V[\x90\x82\x01R`\x80\x86\x81\x015a\x0E\xAA\x81a\r\xC2V[\x90\x82\x01R`\xA0a\x0E\xBB\x87\x82\x01a\r\xD9V[\x90\x82\x01R\x83R\x93\x84\x01\x93\x91\x85\x01\x91a\x0ETV[P\x97\x96PPPPPPPV[_` \x80\x83\x85\x03\x12\x15a\x0E\xEBW_\x80\xFD[\x825g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x0F\x01W_\x80\xFD[\x83\x01`\x1F\x81\x01\x85\x13a\x0F\x11W_\x80\xFD[\x805a\x0F\x1Fa\x0E,\x82a\r\x9FV[\x81\x81R``\x91\x82\x02\x83\x01\x84\x01\x91\x84\x82\x01\x91\x90\x88\x84\x11\x15a\x0F=W_\x80\xFD[\x93\x85\x01\x93[\x83\x85\x10\x15a\x0E\xCEW\x80\x85\x8A\x03\x12\x15a\x0FXW_\x80\xFD[a\x0F`a\x0C+V[\x855\x81Ra\x0Fo\x87\x87\x01a\x0C\xC5V[\x87\x82\x01R`@a\x0F\x80\x81\x88\x01a\r\xD9V[\x90\x82\x01R\x83R\x93\x84\x01\x93\x91\x85\x01\x91a\x0FBV[_\x80`@\x83\x85\x03\x12\x15a\x0F\xA4W_\x80\xFD[\x825\x91Pa\x0F\xB4` \x84\x01a\x0C\xC5V[\x90P\x92P\x92\x90PV[_` \x82\x84\x03\x12\x15a\x0F\xCDW_\x80\xFD[P5\x91\x90PV[_``\x82\x84\x03\x12\x15a\x0F\xE4W_\x80\xFD[P\x91\x90PV[_` \x80\x83\x85\x03\x12\x15a\x0F\xFBW_\x80\xFD[\x825g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x10\x11W_\x80\xFD[\x83\x01`\x1F\x81\x01\x85\x13a\x10!W_\x80\xFD[\x805a\x10/a\x0E,\x82a\r\x9FV[\x81\x81R`\x07\x91\x90\x91\x1B\x82\x01\x83\x01\x90\x83\x81\x01\x90\x87\x83\x11\x15a\x10MW_\x80\xFD[\x92\x84\x01\x92[\x82\x84\x10\x15a\x10\xAAW`\x80\x84\x89\x03\x12\x15a\x10iW_\x80\xFD[a\x10qa\x0CNV[\x845\x81R\x85\x85\x015\x86\x82\x01R`@\x80\x86\x015\x90\x82\x01R``a\x10\x94\x81\x87\x01a\r\xD9V[\x90\x82\x01R\x82R`\x80\x93\x90\x93\x01\x92\x90\x84\x01\x90a\x10RV[\x97\x96PPPPPPPV[_\x80_``\x84\x86\x03\x12\x15a\x10\xC7W_\x80\xFD[PP\x815\x93` \x83\x015\x93P`@\x90\x92\x015\x91\x90PV[_` \x80\x83\x85\x03\x12\x15a\x10\xEFW_\x80\xFD[\x825g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x11\x05W_\x80\xFD[\x83\x01`\x1F\x81\x01\x85\x13a\x11\x15W_\x80\xFD[\x805a\x11#a\x0E,\x82a\r\x9FV[\x81\x81R`\xA0\x91\x82\x02\x83\x01\x84\x01\x91\x84\x82\x01\x91\x90\x88\x84\x11\x15a\x11AW_\x80\xFD[\x93\x85\x01\x93[\x83\x85\x10\x15a\x0E\xCEW\x80\x85\x8A\x03\x12\x15a\x11\\W_\x80\xFD[a\x11da\x0CqV[\x855\x81R\x86\x86\x015\x87\x82\x01R`@\x80\x87\x015a\x11\x7F\x81a\r\xC2V[\x90\x82\x01R``\x86\x81\x015a\x11\x92\x81a\r\xC2V[\x90\x82\x01R`\x80a\x11\xA3\x87\x82\x01a\r\xD9V[\x90\x82\x01R\x83R\x93\x84\x01\x93\x91\x85\x01\x91a\x11FV[_\x80_\x80`\x80\x85\x87\x03\x12\x15a\x11\xC9W_\x80\xFD[\x845\x93P` \x85\x015\x92P`@\x85\x015a\x11\xE2\x81a\r\xC2V[\x91P``\x85\x015a\x11\xF2\x81a\r\xC2V[\x93\x96\x92\x95P\x90\x93PPV[_\x80`@\x83\x85\x03\x12\x15a\x12\x0EW_\x80\xFD[PP\x805\x92` \x90\x91\x015\x91PV[_` \x80\x83R\x83Q\x80` \x85\x01R_[\x81\x81\x10\x15a\x12IW\x85\x81\x01\x83\x01Q\x85\x82\x01`@\x01R\x82\x01a\x12-V[P_`@\x82\x86\x01\x01R`@`\x1F\x19`\x1F\x83\x01\x16\x85\x01\x01\x92PPP\x92\x91PPV[_\x80_\x80_`\xA0\x86\x88\x03\x12\x15a\x12}W_\x80\xFD[\x855\x94P` \x86\x015\x93P`@\x86\x015\x92P``\x86\x015a\x12\x9D\x81a\r\xC2V[\x91P`\x80\x86\x015a\x12\xAD\x81a\r\xC2V[\x80\x91PP\x92\x95P\x92\x95\x90\x93PV[`\x01\x81\x81\x1C\x90\x82\x16\x80a\x12\xCFW`\x7F\x82\x16\x91P[` \x82\x10\x81\x03a\x0F\xE4WcNH{q`\xE0\x1B_R`\"`\x04R`$_\xFD[`\x1F\x82\x11\x15a\x07\x8DW\x80_R` _ `\x1F\x84\x01`\x05\x1C\x81\x01` \x85\x10\x15a\x13\x12WP\x80[`\x1F\x84\x01`\x05\x1C\x82\x01\x91P[\x81\x81\x10\x15a\x131W_\x81U`\x01\x01a\x13\x1EV[PPPPPV[\x81Qg\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x13RWa\x13Ra\x0B\xEEV[a\x13f\x81a\x13`\x84Ta\x12\xBBV[\x84a\x12\xEDV[` \x80`\x1F\x83\x11`\x01\x81\x14a\x13\x99W_\x84\x15a\x13\x82WP\x85\x83\x01Q[_\x19`\x03\x86\x90\x1B\x1C\x19\x16`\x01\x85\x90\x1B\x17\x85Ua\x13\xF0V[_\x85\x81R` \x81 `\x1F\x19\x86\x16\x91[\x82\x81\x10\x15a\x13\xC7W\x88\x86\x01Q\x82U\x94\x84\x01\x94`\x01\x90\x91\x01\x90\x84\x01a\x13\xA8V[P\x85\x82\x10\x15a\x13\xE4W\x87\x85\x01Q_\x19`\x03\x88\x90\x1B`\xF8\x16\x1C\x19\x16\x81U[PP`\x01\x84`\x01\x1B\x01\x85U[PPPPPPV[cNH{q`\xE0\x1B_R`!`\x04R`$_\xFD[cNH{q`\xE0\x1B_R`2`\x04R`$_\xFD[\x815\x81U` \x82\x015a\x142\x81a\r\xC2V[`@\x83\x015a\x14@\x81a\r\xC2V[`\x01`\x01`\x80\x1B\x03\x19\x81`\x80\x1B\x16`\x01`\x01`\x80\x1B\x03\x83\x16\x17`\x01\x84\x01UPPPPV\xFE\xA2dipfsX\"\x12 h|o\xE6\xE2e\xAA\xBEb@N\x89\xB3\x9F\x88<\x05\xF4\x92+zJ\x13\x8FRs\xC7\x12\xC7B\xEFOdsolcC\0\x08\x18\x003", ); /// The runtime bytecode of the contract, as deployed on the network. /// /// ```text - ///0x608060405234801561000f575f80fd5b506004361061013d575f3560e01c806388dfddc6116100b4578063c7bf4db511610079578063c7bf4db514610379578063c8af3aa61461038c578063d15ec8511461039f578063ead18400146103e1578063f25d54f514610403578063fb586c7d14610416575f80fd5b806388dfddc6146102e457806396dc9a411461031e578063a314150f14610348578063a5d666a914610351578063c6a7f0fe14610366575f80fd5b80632eb5cfd8116101055780632eb5cfd8146101ee5780634cf5a94a146102015780636987b1fb1461022a5780636cc014de1461024b5780638026de311461026757806385b6489f1461027a575f80fd5b80630200225c146101415780630c1616c9146101565780631417a4f0146101695780631c134315146101965780632ae42686146101a9575b5f80fd5b61015461014f366004610ce8565b610429565b005b610154610164366004610dd8565b61046d565b610154610177366004610eb8565b6006929092556001600160801b03918216600160801b02911617600755565b6101546101a4366004610ef1565b6105ae565b6101d16101b7366004610f1b565b60046020525f90815260409020546001600160a01b031681565b6040516001600160a01b0390911681526020015b60405180910390f35b6101546101fc366004610f32565b6105db565b61015461020f366004610ffd565b5f928352600960209081526040808520938552929052912055565b61023d610238366004610f1b565b61076b565b6040519081526020016101e5565b5f546102579060ff1681565b60405190151581526020016101e5565b610154610275366004611026565b61078a565b6102bf61028836600461105e565b600a60209081525f9283526040808420909152908252902080546001909101546001600160801b0380821691600160801b90041683565b604080519384526001600160801b0392831660208501529116908201526060016101e5565b6102bf6102f2366004610f1b565b60086020525f9081526040902080546001909101546001600160801b0380821691600160801b90041683565b61023d61032c36600461105e565b600960209081525f928352604080842090915290825290205481565b61023d60015481565b6103596107dc565b6040516101e5919061107e565b6101546103743660046110ca565b610868565b610154610387366004611116565b6108c3565b6003546101d1906001600160a01b031681565b6101546103ad366004610f1b565b600580546001810182555f919091527f036b6384b5eca791c62761152d0c79bb0604c104a5fb6f4eb0703f3154bb3db00155565b6006546007546102bf91906001600160801b0380821691600160801b90041683565b610154610411366004610f1b565b600155565b6101546104243660046111ea565b610a37565b5f805460ff191685151517905560018390556002610447838261134c565b50600380546001600160a01b0319166001600160a01b0392909216919091179055505050565b5f5b81518110156105aa575f82828151811061048b5761048b611420565b60200260200101516040015160028111156104a8576104a861140c565b036104ef5760045f8383815181106104c2576104c2611420565b6020908102919091018101515182528101919091526040015f2080546001600160a01b03191690556105a2565b600282828151811061050357610503611420565b60200260200101516040015160028111156105205761052061140c565b148061055a5750600182828151811061053b5761053b611420565b60200260200101516040015160028111156105585761055861140c565b145b156105a2576105a282828151811061057457610574611420565b60200260200101515f015183838151811061059157610591611420565b6020026020010151602001516105ae565b60010161046f565b5050565b5f9182526004602052604090912080546001600160a01b0319166001600160a01b03909216919091179055565b5f5b81518110156105aa575f8282815181106105f9576105f9611420565b60200260200101516060015160028111156106165761061661140c565b0361067c5760095f83838151811061063057610630611420565b60200260200101515f015181526020019081526020015f205f83838151811061065b5761065b611420565b60200260200101516020015181526020019081526020015f205f9055610763565b600282828151811061069057610690611420565b60200260200101516060015160028111156106ad576106ad61140c565b14806106e7575060018282815181106106c8576106c8611420565b60200260200101516060015160028111156106e5576106e561140c565b145b156107635761076382828151811061070157610701611420565b60200260200101515f015183838151811061071e5761071e611420565b60200260200101516020015184848151811061073c5761073c611420565b6020026020010151604001515f928352600960209081526040808520938552929052912055565b6001016105dd565b6005818154811061077a575f80fd5b5f91825260209091200154905081565b604080516060810182529384526001600160801b0392831660208086019182529284168583019081525f9687526008909352942092518355925192518116600160801b02921691909117600190910155565b600280546107e9906112c8565b80601f0160208091040260200160405190810160405280929190818152602001828054610815906112c8565b80156108605780601f1061083757610100808354040283529160200191610860565b820191905f5260205f20905b81548152906001019060200180831161084357829003601f168201915b505050505081565b604080516060810182529384526001600160801b0392831660208086019182529284168583019081525f978852600a84528288209688529590925290942091518255925191518316600160801b029190921617600190910155565b5f5b81518110156105aa575f8282815181106108e1576108e1611420565b60200260200101516080015160028111156108fe576108fe61140c565b036109405760085f83838151811061091857610918611420565b6020908102919091018101515182528101919091526040015f90812081815560010155610a2f565b600282828151811061095457610954611420565b60200260200101516080015160028111156109715761097161140c565b14806109ab5750600182828151811061098c5761098c611420565b60200260200101516080015160028111156109a9576109a961140c565b145b15610a2f57610a2f8282815181106109c5576109c5611420565b60200260200101515f01518383815181106109e2576109e2611420565b602002602001015160200151848481518110610a0057610a00611420565b602002602001015160400151858581518110610a1e57610a1e611420565b60200260200101516060015161078a565b6001016108c5565b5f5b81518110156105aa575f828281518110610a5557610a55611420565b602002602001015160a001516002811115610a7257610a7261140c565b03610ae157600a5f838381518110610a8c57610a8c611420565b60200260200101515f015181526020019081526020015f205f838381518110610ab757610ab7611420565b60209081029190910181015181015182528101919091526040015f90812081815560010155610bee565b6002828281518110610af557610af5611420565b602002602001015160a001516002811115610b1257610b1261140c565b1480610b4c57506001828281518110610b2d57610b2d611420565b602002602001015160a001516002811115610b4a57610b4a61140c565b145b15610bee57610bee828281518110610b6657610b66611420565b60200260200101515f0151838381518110610b8357610b83611420565b602002602001015160200151848481518110610ba157610ba1611420565b602002602001015160400151858581518110610bbf57610bbf611420565b602002602001015160600151868681518110610bdd57610bdd611420565b602002602001015160800151610868565b600101610a39565b634e487b7160e01b5f52604160045260245ffd5b6040516060810167ffffffffffffffff81118282101715610c2d57610c2d610bf6565b60405290565b6040516080810167ffffffffffffffff81118282101715610c2d57610c2d610bf6565b60405160a0810167ffffffffffffffff81118282101715610c2d57610c2d610bf6565b60405160c0810167ffffffffffffffff81118282101715610c2d57610c2d610bf6565b604051601f8201601f1916810167ffffffffffffffff81118282101715610cc557610cc5610bf6565b604052919050565b80356001600160a01b0381168114610ce3575f80fd5b919050565b5f805f8060808587031215610cfb575f80fd5b84358015158114610d0a575f80fd5b93506020858101359350604086013567ffffffffffffffff80821115610d2e575f80fd5b818801915088601f830112610d41575f80fd5b813581811115610d5357610d53610bf6565b610d65601f8201601f19168501610c9c565b91508082528984828501011115610d7a575f80fd5b80848401858401375f84828401015250809450505050610d9c60608601610ccd565b905092959194509250565b5f67ffffffffffffffff821115610dc057610dc0610bf6565b5060051b60200190565b803560038110610ce3575f80fd5b5f6020808385031215610de9575f80fd5b823567ffffffffffffffff811115610dff575f80fd5b8301601f81018513610e0f575f80fd5b8035610e22610e1d82610da7565b610c9c565b81815260609182028301840191848201919088841115610e40575f80fd5b938501935b83851015610e965780858a031215610e5b575f80fd5b610e63610c0a565b85358152610e72878701610ccd565b878201526040610e83818801610dca565b9082015283529384019391850191610e45565b50979650505050505050565b80356001600160801b0381168114610ce3575f80fd5b5f805f60608486031215610eca575f80fd5b83359250610eda60208501610ea2565b9150610ee860408501610ea2565b90509250925092565b5f8060408385031215610f02575f80fd5b82359150610f1260208401610ccd565b90509250929050565b5f60208284031215610f2b575f80fd5b5035919050565b5f6020808385031215610f43575f80fd5b823567ffffffffffffffff811115610f59575f80fd5b8301601f81018513610f69575f80fd5b8035610f77610e1d82610da7565b81815260079190911b82018301908381019087831115610f95575f80fd5b928401925b82841015610ff25760808489031215610fb1575f80fd5b610fb9610c33565b843581528585013586820152604080860135908201526060610fdc818701610dca565b9082015282526080939093019290840190610f9a565b979650505050505050565b5f805f6060848603121561100f575f80fd5b505081359360208301359350604090920135919050565b5f805f8060808587031215611039575f80fd5b843593506020850135925061105060408601610ea2565b9150610d9c60608601610ea2565b5f806040838503121561106f575f80fd5b50508035926020909101359150565b5f602080835283518060208501525f5b818110156110aa5785810183015185820160400152820161108e565b505f604082860101526040601f19601f8301168501019250505092915050565b5f805f805f60a086880312156110de575f80fd5b8535945060208601359350604086013592506110fc60608701610ea2565b915061110a60808701610ea2565b90509295509295909350565b5f6020808385031215611127575f80fd5b823567ffffffffffffffff81111561113d575f80fd5b8301601f8101851361114d575f80fd5b803561115b610e1d82610da7565b81815260a09182028301840191848201919088841115611179575f80fd5b938501935b83851015610e965780858a031215611194575f80fd5b61119c610c56565b85358152868601358782015260406111b5818801610ea2565b9082015260606111c6878201610ea2565b9082015260806111d7878201610dca565b908201528352938401939185019161117e565b5f60208083850312156111fb575f80fd5b823567ffffffffffffffff811115611211575f80fd5b8301601f81018513611221575f80fd5b803561122f610e1d82610da7565b81815260c0918202830184019184820191908884111561124d575f80fd5b938501935b83851015610e965780858a031215611268575f80fd5b611270610c79565b853581528686013587820152604080870135908201526060611293818801610ea2565b9082015260806112a4878201610ea2565b9082015260a06112b5878201610dca565b9082015283529384019391850191611252565b600181811c908216806112dc57607f821691505b6020821081036112fa57634e487b7160e01b5f52602260045260245ffd5b50919050565b601f82111561134757805f5260205f20601f840160051c810160208510156113255750805b601f840160051c820191505b81811015611344575f8155600101611331565b50505b505050565b815167ffffffffffffffff81111561136657611366610bf6565b61137a8161137484546112c8565b84611300565b602080601f8311600181146113ad575f84156113965750858301515b5f19600386901b1c1916600185901b178555611404565b5f85815260208120601f198616915b828110156113db578886015182559484019460019091019084016113bc565b50858210156113f857878501515f19600388901b60f8161c191681555b505060018460011b0185555b505050505050565b634e487b7160e01b5f52602160045260245ffd5b634e487b7160e01b5f52603260045260245ffdfea2646970667358221220d2b83ac3b43e98360c903f68c0dc0b247343ee73bff7368b1c2ef1412d6cb2dc64736f6c63430008180033 + ///0x608060405234801561000f575f80fd5b506004361061013d575f3560e01c80638026de31116100b4578063a5d666a911610079578063a5d666a91461035d578063c6a7f0fe14610372578063c8af3aa614610385578063d15ec85114610398578063ead18400146103da578063f25d54f5146103fc575f80fd5b80638026de311461027357806385b6489f1461028657806388dfddc6146102f057806396dc9a411461032a578063a314150f14610354575f80fd5b80633e70166e116101055780633e70166e146101d45780633e9060c7146101e75780634cf5a94a146101fa57806351976fc8146102235780636987b1fb146102365780636cc014de14610257575f80fd5b80630200225c1461014157806302e3003a146101565780630c1616c9146101695780631c1343151461017c5780632ae426861461018f575b5f80fd5b61015461014f366004610ce0565b61040f565b005b610154610164366004610de7565b610453565b610154610177366004610eda565b610616565b61015461018a366004610f93565b610753565b6101b761019d366004610fbd565b60046020525f90815260409020546001600160a01b031681565b6040516001600160a01b0390911681526020015b60405180910390f35b6101546101e2366004610fd4565b610780565b6101546101f5366004610fea565b610792565b6101546102083660046110b5565b5f928352600960209081526040808520938552929052912055565b6101546102313660046110de565b610922565b610249610244366004610fbd565b610a96565b6040519081526020016101cb565b5f546102639060ff1681565b60405190151581526020016101cb565b6101546102813660046111b6565b610ab5565b6102cb6102943660046111fd565b600a60209081525f9283526040808420909152908252902080546001909101546001600160801b0380821691600160801b90041683565b604080519384526001600160801b0392831660208501529116908201526060016101cb565b6102cb6102fe366004610fbd565b60086020525f9081526040902080546001909101546001600160801b0380821691600160801b90041683565b6102496103383660046111fd565b600960209081525f928352604080842090915290825290205481565b61024960015481565b610365610b07565b6040516101cb919061121d565b610154610380366004611269565b610b93565b6003546101b7906001600160a01b031681565b6101546103a6366004610fbd565b600580546001810182555f919091527f036b6384b5eca791c62761152d0c79bb0604c104a5fb6f4eb0703f3154bb3db00155565b6006546007546102cb91906001600160801b0380821691600160801b90041683565b61015461040a366004610fbd565b600155565b5f805460ff19168515151790556001839055600261042d8382611338565b50600380546001600160a01b0319166001600160a01b0392909216919091179055505050565b5f5b8151811015610612575f8282815181106104715761047161140c565b602002602001015160a00151600281111561048e5761048e6113f8565b036104fd57600a5f8383815181106104a8576104a861140c565b60200260200101515f015181526020019081526020015f205f8383815181106104d3576104d361140c565b60209081029190910181015181015182528101919091526040015f9081208181556001015561060a565b60028282815181106105115761051161140c565b602002602001015160a00151600281111561052e5761052e6113f8565b1480610568575060018282815181106105495761054961140c565b602002602001015160a001516002811115610566576105666113f8565b145b1561060a5761060a8282815181106105825761058261140c565b60200260200101515f015183838151811061059f5761059f61140c565b6020026020010151602001518484815181106105bd576105bd61140c565b6020026020010151604001518585815181106105db576105db61140c565b6020026020010151606001518686815181106105f9576105f961140c565b602002602001015160800151610b93565b600101610455565b5050565b5f5b8151811015610612575f8282815181106106345761063461140c565b6020026020010151604001516002811115610651576106516113f8565b036106985760045f83838151811061066b5761066b61140c565b6020908102919091018101515182528101919091526040015f2080546001600160a01b031916905561074b565b60028282815181106106ac576106ac61140c565b60200260200101516040015160028111156106c9576106c96113f8565b1480610703575060018282815181106106e4576106e461140c565b6020026020010151604001516002811115610701576107016113f8565b145b1561074b5761074b82828151811061071d5761071d61140c565b60200260200101515f015183838151811061073a5761073a61140c565b602002602001015160200151610753565b600101610618565b5f9182526004602052604090912080546001600160a01b0319166001600160a01b03909216919091179055565b80600661078d8282611420565b505050565b5f5b8151811015610612575f8282815181106107b0576107b061140c565b60200260200101516060015160028111156107cd576107cd6113f8565b036108335760095f8383815181106107e7576107e761140c565b60200260200101515f015181526020019081526020015f205f8383815181106108125761081261140c565b60200260200101516020015181526020019081526020015f205f905561091a565b60028282815181106108475761084761140c565b6020026020010151606001516002811115610864576108646113f8565b148061089e5750600182828151811061087f5761087f61140c565b602002602001015160600151600281111561089c5761089c6113f8565b145b1561091a5761091a8282815181106108b8576108b861140c565b60200260200101515f01518383815181106108d5576108d561140c565b6020026020010151602001518484815181106108f3576108f361140c565b6020026020010151604001515f928352600960209081526040808520938552929052912055565b600101610794565b5f5b8151811015610612575f8282815181106109405761094061140c565b602002602001015160800151600281111561095d5761095d6113f8565b0361099f5760085f8383815181106109775761097761140c565b6020908102919091018101515182528101919091526040015f90812081815560010155610a8e565b60028282815181106109b3576109b361140c565b60200260200101516080015160028111156109d0576109d06113f8565b1480610a0a575060018282815181106109eb576109eb61140c565b6020026020010151608001516002811115610a0857610a086113f8565b145b15610a8e57610a8e828281518110610a2457610a2461140c565b60200260200101515f0151838381518110610a4157610a4161140c565b602002602001015160200151848481518110610a5f57610a5f61140c565b602002602001015160400151858581518110610a7d57610a7d61140c565b602002602001015160600151610ab5565b600101610924565b60058181548110610aa5575f80fd5b5f91825260209091200154905081565b604080516060810182529384526001600160801b0392831660208086019182529284168583019081525f9687526008909352942092518355925192518116600160801b02921691909117600190910155565b60028054610b14906112bb565b80601f0160208091040260200160405190810160405280929190818152602001828054610b40906112bb565b8015610b8b5780601f10610b6257610100808354040283529160200191610b8b565b820191905f5260205f20905b815481529060010190602001808311610b6e57829003601f168201915b505050505081565b604080516060810182529384526001600160801b0392831660208086019182529284168583019081525f978852600a84528288209688529590925290942091518255925191518316600160801b029190921617600190910155565b634e487b7160e01b5f52604160045260245ffd5b60405160c0810167ffffffffffffffff81118282101715610c2557610c25610bee565b60405290565b6040516060810167ffffffffffffffff81118282101715610c2557610c25610bee565b6040516080810167ffffffffffffffff81118282101715610c2557610c25610bee565b60405160a0810167ffffffffffffffff81118282101715610c2557610c25610bee565b604051601f8201601f1916810167ffffffffffffffff81118282101715610cbd57610cbd610bee565b604052919050565b80356001600160a01b0381168114610cdb575f80fd5b919050565b5f805f8060808587031215610cf3575f80fd5b84358015158114610d02575f80fd5b93506020858101359350604086013567ffffffffffffffff80821115610d26575f80fd5b818801915088601f830112610d39575f80fd5b813581811115610d4b57610d4b610bee565b610d5d601f8201601f19168501610c94565b91508082528984828501011115610d72575f80fd5b80848401858401375f84828401015250809450505050610d9460608601610cc5565b905092959194509250565b5f67ffffffffffffffff821115610db857610db8610bee565b5060051b60200190565b6001600160801b0381168114610dd6575f80fd5b50565b803560038110610cdb575f80fd5b5f6020808385031215610df8575f80fd5b823567ffffffffffffffff811115610e0e575f80fd5b8301601f81018513610e1e575f80fd5b8035610e31610e2c82610d9f565b610c94565b81815260c09182028301840191848201919088841115610e4f575f80fd5b938501935b83851015610ece5780858a031215610e6a575f80fd5b610e72610c02565b85358152868601358782015260408087013590820152606080870135610e9781610dc2565b90820152608086810135610eaa81610dc2565b9082015260a0610ebb878201610dd9565b9082015283529384019391850191610e54565b50979650505050505050565b5f6020808385031215610eeb575f80fd5b823567ffffffffffffffff811115610f01575f80fd5b8301601f81018513610f11575f80fd5b8035610f1f610e2c82610d9f565b81815260609182028301840191848201919088841115610f3d575f80fd5b938501935b83851015610ece5780858a031215610f58575f80fd5b610f60610c2b565b85358152610f6f878701610cc5565b878201526040610f80818801610dd9565b9082015283529384019391850191610f42565b5f8060408385031215610fa4575f80fd5b82359150610fb460208401610cc5565b90509250929050565b5f60208284031215610fcd575f80fd5b5035919050565b5f60608284031215610fe4575f80fd5b50919050565b5f6020808385031215610ffb575f80fd5b823567ffffffffffffffff811115611011575f80fd5b8301601f81018513611021575f80fd5b803561102f610e2c82610d9f565b81815260079190911b8201830190838101908783111561104d575f80fd5b928401925b828410156110aa5760808489031215611069575f80fd5b611071610c4e565b843581528585013586820152604080860135908201526060611094818701610dd9565b9082015282526080939093019290840190611052565b979650505050505050565b5f805f606084860312156110c7575f80fd5b505081359360208301359350604090920135919050565b5f60208083850312156110ef575f80fd5b823567ffffffffffffffff811115611105575f80fd5b8301601f81018513611115575f80fd5b8035611123610e2c82610d9f565b81815260a09182028301840191848201919088841115611141575f80fd5b938501935b83851015610ece5780858a03121561115c575f80fd5b611164610c71565b85358152868601358782015260408087013561117f81610dc2565b9082015260608681013561119281610dc2565b9082015260806111a3878201610dd9565b9082015283529384019391850191611146565b5f805f80608085870312156111c9575f80fd5b843593506020850135925060408501356111e281610dc2565b915060608501356111f281610dc2565b939692955090935050565b5f806040838503121561120e575f80fd5b50508035926020909101359150565b5f602080835283518060208501525f5b818110156112495785810183015185820160400152820161122d565b505f604082860101526040601f19601f8301168501019250505092915050565b5f805f805f60a0868803121561127d575f80fd5b853594506020860135935060408601359250606086013561129d81610dc2565b915060808601356112ad81610dc2565b809150509295509295909350565b600181811c908216806112cf57607f821691505b602082108103610fe457634e487b7160e01b5f52602260045260245ffd5b601f82111561078d57805f5260205f20601f840160051c810160208510156113125750805b601f840160051c820191505b81811015611331575f815560010161131e565b5050505050565b815167ffffffffffffffff81111561135257611352610bee565b6113668161136084546112bb565b846112ed565b602080601f831160018114611399575f84156113825750858301515b5f19600386901b1c1916600185901b1785556113f0565b5f85815260208120601f198616915b828110156113c7578886015182559484019460019091019084016113a8565b50858210156113e457878501515f19600388901b60f8161c191681555b505060018460011b0185555b505050505050565b634e487b7160e01b5f52602160045260245ffd5b634e487b7160e01b5f52603260045260245ffd5b81358155602082013561143281610dc2565b604083013561144081610dc2565b6001600160801b03198160801b166001600160801b0383161760018401555050505056fea2646970667358221220687c6fe6e265aabe62404e89b39f883c05f4922b7a4a138f5273c712c742ef4f64736f6c63430008180033 /// ``` #[rustfmt::skip] #[allow(clippy::all)] pub static DEPLOYED_BYTECODE: alloy_sol_types::private::Bytes = alloy_sol_types::private::Bytes::from_static( - b"`\x80`@R4\x80\x15a\0\x0FW_\x80\xFD[P`\x046\x10a\x01=W_5`\xE0\x1C\x80c\x88\xDF\xDD\xC6\x11a\0\xB4W\x80c\xC7\xBFM\xB5\x11a\0yW\x80c\xC7\xBFM\xB5\x14a\x03yW\x80c\xC8\xAF:\xA6\x14a\x03\x8CW\x80c\xD1^\xC8Q\x14a\x03\x9FW\x80c\xEA\xD1\x84\0\x14a\x03\xE1W\x80c\xF2]T\xF5\x14a\x04\x03W\x80c\xFBXl}\x14a\x04\x16W_\x80\xFD[\x80c\x88\xDF\xDD\xC6\x14a\x02\xE4W\x80c\x96\xDC\x9AA\x14a\x03\x1EW\x80c\xA3\x14\x15\x0F\x14a\x03HW\x80c\xA5\xD6f\xA9\x14a\x03QW\x80c\xC6\xA7\xF0\xFE\x14a\x03fW_\x80\xFD[\x80c.\xB5\xCF\xD8\x11a\x01\x05W\x80c.\xB5\xCF\xD8\x14a\x01\xEEW\x80cL\xF5\xA9J\x14a\x02\x01W\x80ci\x87\xB1\xFB\x14a\x02*W\x80cl\xC0\x14\xDE\x14a\x02KW\x80c\x80&\xDE1\x14a\x02gW\x80c\x85\xB6H\x9F\x14a\x02zW_\x80\xFD[\x80c\x02\0\"\\\x14a\x01AW\x80c\x0C\x16\x16\xC9\x14a\x01VW\x80c\x14\x17\xA4\xF0\x14a\x01iW\x80c\x1C\x13C\x15\x14a\x01\x96W\x80c*\xE4&\x86\x14a\x01\xA9W[_\x80\xFD[a\x01Ta\x01O6`\x04a\x0C\xE8V[a\x04)V[\0[a\x01Ta\x01d6`\x04a\r\xD8V[a\x04mV[a\x01Ta\x01w6`\x04a\x0E\xB8V[`\x06\x92\x90\x92U`\x01`\x01`\x80\x1B\x03\x91\x82\x16`\x01`\x80\x1B\x02\x91\x16\x17`\x07UV[a\x01Ta\x01\xA46`\x04a\x0E\xF1V[a\x05\xAEV[a\x01\xD1a\x01\xB76`\x04a\x0F\x1BV[`\x04` R_\x90\x81R`@\x90 T`\x01`\x01`\xA0\x1B\x03\x16\x81V[`@Q`\x01`\x01`\xA0\x1B\x03\x90\x91\x16\x81R` \x01[`@Q\x80\x91\x03\x90\xF3[a\x01Ta\x01\xFC6`\x04a\x0F2V[a\x05\xDBV[a\x01Ta\x02\x0F6`\x04a\x0F\xFDV[_\x92\x83R`\t` \x90\x81R`@\x80\x85 \x93\x85R\x92\x90R\x91 UV[a\x02=a\x0286`\x04a\x0F\x1BV[a\x07kV[`@Q\x90\x81R` \x01a\x01\xE5V[_Ta\x02W\x90`\xFF\x16\x81V[`@Q\x90\x15\x15\x81R` \x01a\x01\xE5V[a\x01Ta\x02u6`\x04a\x10&V[a\x07\x8AV[a\x02\xBFa\x02\x886`\x04a\x10^V[`\n` \x90\x81R_\x92\x83R`@\x80\x84 \x90\x91R\x90\x82R\x90 \x80T`\x01\x90\x91\x01T`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[`@\x80Q\x93\x84R`\x01`\x01`\x80\x1B\x03\x92\x83\x16` \x85\x01R\x91\x16\x90\x82\x01R``\x01a\x01\xE5V[a\x02\xBFa\x02\xF26`\x04a\x0F\x1BV[`\x08` R_\x90\x81R`@\x90 \x80T`\x01\x90\x91\x01T`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[a\x02=a\x03,6`\x04a\x10^V[`\t` \x90\x81R_\x92\x83R`@\x80\x84 \x90\x91R\x90\x82R\x90 T\x81V[a\x02=`\x01T\x81V[a\x03Ya\x07\xDCV[`@Qa\x01\xE5\x91\x90a\x10~V[a\x01Ta\x03t6`\x04a\x10\xCAV[a\x08hV[a\x01Ta\x03\x876`\x04a\x11\x16V[a\x08\xC3V[`\x03Ta\x01\xD1\x90`\x01`\x01`\xA0\x1B\x03\x16\x81V[a\x01Ta\x03\xAD6`\x04a\x0F\x1BV[`\x05\x80T`\x01\x81\x01\x82U_\x91\x90\x91R\x7F\x03kc\x84\xB5\xEC\xA7\x91\xC6'a\x15-\x0Cy\xBB\x06\x04\xC1\x04\xA5\xFBoN\xB0p?1T\xBB=\xB0\x01UV[`\x06T`\x07Ta\x02\xBF\x91\x90`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[a\x01Ta\x04\x116`\x04a\x0F\x1BV[`\x01UV[a\x01Ta\x04$6`\x04a\x11\xEAV[a\n7V[_\x80T`\xFF\x19\x16\x85\x15\x15\x17\x90U`\x01\x83\x90U`\x02a\x04G\x83\x82a\x13LV[P`\x03\x80T`\x01`\x01`\xA0\x1B\x03\x19\x16`\x01`\x01`\xA0\x1B\x03\x92\x90\x92\x16\x91\x90\x91\x17\x90UPPPV[_[\x81Q\x81\x10\x15a\x05\xAAW_\x82\x82\x81Q\x81\x10a\x04\x8BWa\x04\x8Ba\x14 V[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x04\xA8Wa\x04\xA8a\x14\x0CV[\x03a\x04\xEFW`\x04_\x83\x83\x81Q\x81\x10a\x04\xC2Wa\x04\xC2a\x14 V[` \x90\x81\x02\x91\x90\x91\x01\x81\x01QQ\x82R\x81\x01\x91\x90\x91R`@\x01_ \x80T`\x01`\x01`\xA0\x1B\x03\x19\x16\x90Ua\x05\xA2V[`\x02\x82\x82\x81Q\x81\x10a\x05\x03Wa\x05\x03a\x14 V[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x05 Wa\x05 a\x14\x0CV[\x14\x80a\x05ZWP`\x01\x82\x82\x81Q\x81\x10a\x05;Wa\x05;a\x14 V[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x05XWa\x05Xa\x14\x0CV[\x14[\x15a\x05\xA2Wa\x05\xA2\x82\x82\x81Q\x81\x10a\x05tWa\x05ta\x14 V[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x05\x91Wa\x05\x91a\x14 V[` \x02` \x01\x01Q` \x01Qa\x05\xAEV[`\x01\x01a\x04oV[PPV[_\x91\x82R`\x04` R`@\x90\x91 \x80T`\x01`\x01`\xA0\x1B\x03\x19\x16`\x01`\x01`\xA0\x1B\x03\x90\x92\x16\x91\x90\x91\x17\x90UV[_[\x81Q\x81\x10\x15a\x05\xAAW_\x82\x82\x81Q\x81\x10a\x05\xF9Wa\x05\xF9a\x14 V[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x06\x16Wa\x06\x16a\x14\x0CV[\x03a\x06|W`\t_\x83\x83\x81Q\x81\x10a\x060Wa\x060a\x14 V[` \x02` \x01\x01Q_\x01Q\x81R` \x01\x90\x81R` \x01_ _\x83\x83\x81Q\x81\x10a\x06[Wa\x06[a\x14 V[` \x02` \x01\x01Q` \x01Q\x81R` \x01\x90\x81R` \x01_ _\x90Ua\x07cV[`\x02\x82\x82\x81Q\x81\x10a\x06\x90Wa\x06\x90a\x14 V[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x06\xADWa\x06\xADa\x14\x0CV[\x14\x80a\x06\xE7WP`\x01\x82\x82\x81Q\x81\x10a\x06\xC8Wa\x06\xC8a\x14 V[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x06\xE5Wa\x06\xE5a\x14\x0CV[\x14[\x15a\x07cWa\x07c\x82\x82\x81Q\x81\x10a\x07\x01Wa\x07\x01a\x14 V[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x07\x1EWa\x07\x1Ea\x14 V[` \x02` \x01\x01Q` \x01Q\x84\x84\x81Q\x81\x10a\x07\x986\x0C\x90?h\xC0\xDC\x0B$sC\xEEs\xBF\xF76\x8B\x1C.\xF1A-l\xB2\xDCdsolcC\0\x08\x18\x003", + b"`\x80`@R4\x80\x15a\0\x0FW_\x80\xFD[P`\x046\x10a\x01=W_5`\xE0\x1C\x80c\x80&\xDE1\x11a\0\xB4W\x80c\xA5\xD6f\xA9\x11a\0yW\x80c\xA5\xD6f\xA9\x14a\x03]W\x80c\xC6\xA7\xF0\xFE\x14a\x03rW\x80c\xC8\xAF:\xA6\x14a\x03\x85W\x80c\xD1^\xC8Q\x14a\x03\x98W\x80c\xEA\xD1\x84\0\x14a\x03\xDAW\x80c\xF2]T\xF5\x14a\x03\xFCW_\x80\xFD[\x80c\x80&\xDE1\x14a\x02sW\x80c\x85\xB6H\x9F\x14a\x02\x86W\x80c\x88\xDF\xDD\xC6\x14a\x02\xF0W\x80c\x96\xDC\x9AA\x14a\x03*W\x80c\xA3\x14\x15\x0F\x14a\x03TW_\x80\xFD[\x80c>p\x16n\x11a\x01\x05W\x80c>p\x16n\x14a\x01\xD4W\x80c>\x90`\xC7\x14a\x01\xE7W\x80cL\xF5\xA9J\x14a\x01\xFAW\x80cQ\x97o\xC8\x14a\x02#W\x80ci\x87\xB1\xFB\x14a\x026W\x80cl\xC0\x14\xDE\x14a\x02WW_\x80\xFD[\x80c\x02\0\"\\\x14a\x01AW\x80c\x02\xE3\0:\x14a\x01VW\x80c\x0C\x16\x16\xC9\x14a\x01iW\x80c\x1C\x13C\x15\x14a\x01|W\x80c*\xE4&\x86\x14a\x01\x8FW[_\x80\xFD[a\x01Ta\x01O6`\x04a\x0C\xE0V[a\x04\x0FV[\0[a\x01Ta\x01d6`\x04a\r\xE7V[a\x04SV[a\x01Ta\x01w6`\x04a\x0E\xDAV[a\x06\x16V[a\x01Ta\x01\x8A6`\x04a\x0F\x93V[a\x07SV[a\x01\xB7a\x01\x9D6`\x04a\x0F\xBDV[`\x04` R_\x90\x81R`@\x90 T`\x01`\x01`\xA0\x1B\x03\x16\x81V[`@Q`\x01`\x01`\xA0\x1B\x03\x90\x91\x16\x81R` \x01[`@Q\x80\x91\x03\x90\xF3[a\x01Ta\x01\xE26`\x04a\x0F\xD4V[a\x07\x80V[a\x01Ta\x01\xF56`\x04a\x0F\xEAV[a\x07\x92V[a\x01Ta\x02\x086`\x04a\x10\xB5V[_\x92\x83R`\t` \x90\x81R`@\x80\x85 \x93\x85R\x92\x90R\x91 UV[a\x01Ta\x0216`\x04a\x10\xDEV[a\t\"V[a\x02Ia\x02D6`\x04a\x0F\xBDV[a\n\x96V[`@Q\x90\x81R` \x01a\x01\xCBV[_Ta\x02c\x90`\xFF\x16\x81V[`@Q\x90\x15\x15\x81R` \x01a\x01\xCBV[a\x01Ta\x02\x816`\x04a\x11\xB6V[a\n\xB5V[a\x02\xCBa\x02\x946`\x04a\x11\xFDV[`\n` \x90\x81R_\x92\x83R`@\x80\x84 \x90\x91R\x90\x82R\x90 \x80T`\x01\x90\x91\x01T`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[`@\x80Q\x93\x84R`\x01`\x01`\x80\x1B\x03\x92\x83\x16` \x85\x01R\x91\x16\x90\x82\x01R``\x01a\x01\xCBV[a\x02\xCBa\x02\xFE6`\x04a\x0F\xBDV[`\x08` R_\x90\x81R`@\x90 \x80T`\x01\x90\x91\x01T`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[a\x02Ia\x0386`\x04a\x11\xFDV[`\t` \x90\x81R_\x92\x83R`@\x80\x84 \x90\x91R\x90\x82R\x90 T\x81V[a\x02I`\x01T\x81V[a\x03ea\x0B\x07V[`@Qa\x01\xCB\x91\x90a\x12\x1DV[a\x01Ta\x03\x806`\x04a\x12iV[a\x0B\x93V[`\x03Ta\x01\xB7\x90`\x01`\x01`\xA0\x1B\x03\x16\x81V[a\x01Ta\x03\xA66`\x04a\x0F\xBDV[`\x05\x80T`\x01\x81\x01\x82U_\x91\x90\x91R\x7F\x03kc\x84\xB5\xEC\xA7\x91\xC6'a\x15-\x0Cy\xBB\x06\x04\xC1\x04\xA5\xFBoN\xB0p?1T\xBB=\xB0\x01UV[`\x06T`\x07Ta\x02\xCB\x91\x90`\x01`\x01`\x80\x1B\x03\x80\x82\x16\x91`\x01`\x80\x1B\x90\x04\x16\x83V[a\x01Ta\x04\n6`\x04a\x0F\xBDV[`\x01UV[_\x80T`\xFF\x19\x16\x85\x15\x15\x17\x90U`\x01\x83\x90U`\x02a\x04-\x83\x82a\x138V[P`\x03\x80T`\x01`\x01`\xA0\x1B\x03\x19\x16`\x01`\x01`\xA0\x1B\x03\x92\x90\x92\x16\x91\x90\x91\x17\x90UPPPV[_[\x81Q\x81\x10\x15a\x06\x12W_\x82\x82\x81Q\x81\x10a\x04qWa\x04qa\x14\x0CV[` \x02` \x01\x01Q`\xA0\x01Q`\x02\x81\x11\x15a\x04\x8EWa\x04\x8Ea\x13\xF8V[\x03a\x04\xFDW`\n_\x83\x83\x81Q\x81\x10a\x04\xA8Wa\x04\xA8a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x81R` \x01\x90\x81R` \x01_ _\x83\x83\x81Q\x81\x10a\x04\xD3Wa\x04\xD3a\x14\x0CV[` \x90\x81\x02\x91\x90\x91\x01\x81\x01Q\x81\x01Q\x82R\x81\x01\x91\x90\x91R`@\x01_\x90\x81 \x81\x81U`\x01\x01Ua\x06\nV[`\x02\x82\x82\x81Q\x81\x10a\x05\x11Wa\x05\x11a\x14\x0CV[` \x02` \x01\x01Q`\xA0\x01Q`\x02\x81\x11\x15a\x05.Wa\x05.a\x13\xF8V[\x14\x80a\x05hWP`\x01\x82\x82\x81Q\x81\x10a\x05IWa\x05Ia\x14\x0CV[` \x02` \x01\x01Q`\xA0\x01Q`\x02\x81\x11\x15a\x05fWa\x05fa\x13\xF8V[\x14[\x15a\x06\nWa\x06\n\x82\x82\x81Q\x81\x10a\x05\x82Wa\x05\x82a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x05\x9FWa\x05\x9Fa\x14\x0CV[` \x02` \x01\x01Q` \x01Q\x84\x84\x81Q\x81\x10a\x05\xBDWa\x05\xBDa\x14\x0CV[` \x02` \x01\x01Q`@\x01Q\x85\x85\x81Q\x81\x10a\x05\xDBWa\x05\xDBa\x14\x0CV[` \x02` \x01\x01Q``\x01Q\x86\x86\x81Q\x81\x10a\x05\xF9Wa\x05\xF9a\x14\x0CV[` \x02` \x01\x01Q`\x80\x01Qa\x0B\x93V[`\x01\x01a\x04UV[PPV[_[\x81Q\x81\x10\x15a\x06\x12W_\x82\x82\x81Q\x81\x10a\x064Wa\x064a\x14\x0CV[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x06QWa\x06Qa\x13\xF8V[\x03a\x06\x98W`\x04_\x83\x83\x81Q\x81\x10a\x06kWa\x06ka\x14\x0CV[` \x90\x81\x02\x91\x90\x91\x01\x81\x01QQ\x82R\x81\x01\x91\x90\x91R`@\x01_ \x80T`\x01`\x01`\xA0\x1B\x03\x19\x16\x90Ua\x07KV[`\x02\x82\x82\x81Q\x81\x10a\x06\xACWa\x06\xACa\x14\x0CV[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x06\xC9Wa\x06\xC9a\x13\xF8V[\x14\x80a\x07\x03WP`\x01\x82\x82\x81Q\x81\x10a\x06\xE4Wa\x06\xE4a\x14\x0CV[` \x02` \x01\x01Q`@\x01Q`\x02\x81\x11\x15a\x07\x01Wa\x07\x01a\x13\xF8V[\x14[\x15a\x07KWa\x07K\x82\x82\x81Q\x81\x10a\x07\x1DWa\x07\x1Da\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x07:Wa\x07:a\x14\x0CV[` \x02` \x01\x01Q` \x01Qa\x07SV[`\x01\x01a\x06\x18V[_\x91\x82R`\x04` R`@\x90\x91 \x80T`\x01`\x01`\xA0\x1B\x03\x19\x16`\x01`\x01`\xA0\x1B\x03\x90\x92\x16\x91\x90\x91\x17\x90UV[\x80`\x06a\x07\x8D\x82\x82a\x14 V[PPPV[_[\x81Q\x81\x10\x15a\x06\x12W_\x82\x82\x81Q\x81\x10a\x07\xB0Wa\x07\xB0a\x14\x0CV[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x07\xCDWa\x07\xCDa\x13\xF8V[\x03a\x083W`\t_\x83\x83\x81Q\x81\x10a\x07\xE7Wa\x07\xE7a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x81R` \x01\x90\x81R` \x01_ _\x83\x83\x81Q\x81\x10a\x08\x12Wa\x08\x12a\x14\x0CV[` \x02` \x01\x01Q` \x01Q\x81R` \x01\x90\x81R` \x01_ _\x90Ua\t\x1AV[`\x02\x82\x82\x81Q\x81\x10a\x08GWa\x08Ga\x14\x0CV[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x08dWa\x08da\x13\xF8V[\x14\x80a\x08\x9EWP`\x01\x82\x82\x81Q\x81\x10a\x08\x7FWa\x08\x7Fa\x14\x0CV[` \x02` \x01\x01Q``\x01Q`\x02\x81\x11\x15a\x08\x9CWa\x08\x9Ca\x13\xF8V[\x14[\x15a\t\x1AWa\t\x1A\x82\x82\x81Q\x81\x10a\x08\xB8Wa\x08\xB8a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\x08\xD5Wa\x08\xD5a\x14\x0CV[` \x02` \x01\x01Q` \x01Q\x84\x84\x81Q\x81\x10a\x08\xF3Wa\x08\xF3a\x14\x0CV[` \x02` \x01\x01Q`@\x01Q_\x92\x83R`\t` \x90\x81R`@\x80\x85 \x93\x85R\x92\x90R\x91 UV[`\x01\x01a\x07\x94V[_[\x81Q\x81\x10\x15a\x06\x12W_\x82\x82\x81Q\x81\x10a\t@Wa\t@a\x14\x0CV[` \x02` \x01\x01Q`\x80\x01Q`\x02\x81\x11\x15a\t]Wa\t]a\x13\xF8V[\x03a\t\x9FW`\x08_\x83\x83\x81Q\x81\x10a\twWa\twa\x14\x0CV[` \x90\x81\x02\x91\x90\x91\x01\x81\x01QQ\x82R\x81\x01\x91\x90\x91R`@\x01_\x90\x81 \x81\x81U`\x01\x01Ua\n\x8EV[`\x02\x82\x82\x81Q\x81\x10a\t\xB3Wa\t\xB3a\x14\x0CV[` \x02` \x01\x01Q`\x80\x01Q`\x02\x81\x11\x15a\t\xD0Wa\t\xD0a\x13\xF8V[\x14\x80a\n\nWP`\x01\x82\x82\x81Q\x81\x10a\t\xEBWa\t\xEBa\x14\x0CV[` \x02` \x01\x01Q`\x80\x01Q`\x02\x81\x11\x15a\n\x08Wa\n\x08a\x13\xF8V[\x14[\x15a\n\x8EWa\n\x8E\x82\x82\x81Q\x81\x10a\n$Wa\n$a\x14\x0CV[` \x02` \x01\x01Q_\x01Q\x83\x83\x81Q\x81\x10a\nAWa\nAa\x14\x0CV[` \x02` \x01\x01Q` \x01Q\x84\x84\x81Q\x81\x10a\n_Wa\n_a\x14\x0CV[` \x02` \x01\x01Q`@\x01Q\x85\x85\x81Q\x81\x10a\n}Wa\n}a\x14\x0CV[` \x02` \x01\x01Q``\x01Qa\n\xB5V[`\x01\x01a\t$V[`\x05\x81\x81T\x81\x10a\n\xA5W_\x80\xFD[_\x91\x82R` \x90\x91 \x01T\x90P\x81V[`@\x80Q``\x81\x01\x82R\x93\x84R`\x01`\x01`\x80\x1B\x03\x92\x83\x16` \x80\x86\x01\x91\x82R\x92\x84\x16\x85\x83\x01\x90\x81R_\x96\x87R`\x08\x90\x93R\x94 \x92Q\x83U\x92Q\x92Q\x81\x16`\x01`\x80\x1B\x02\x92\x16\x91\x90\x91\x17`\x01\x90\x91\x01UV[`\x02\x80Ta\x0B\x14\x90a\x12\xBBV[\x80`\x1F\x01` \x80\x91\x04\x02` \x01`@Q\x90\x81\x01`@R\x80\x92\x91\x90\x81\x81R` \x01\x82\x80Ta\x0B@\x90a\x12\xBBV[\x80\x15a\x0B\x8BW\x80`\x1F\x10a\x0BbWa\x01\0\x80\x83T\x04\x02\x83R\x91` \x01\x91a\x0B\x8BV[\x82\x01\x91\x90_R` _ \x90[\x81T\x81R\x90`\x01\x01\x90` \x01\x80\x83\x11a\x0BnW\x82\x90\x03`\x1F\x16\x82\x01\x91[PPPPP\x81V[`@\x80Q``\x81\x01\x82R\x93\x84R`\x01`\x01`\x80\x1B\x03\x92\x83\x16` \x80\x86\x01\x91\x82R\x92\x84\x16\x85\x83\x01\x90\x81R_\x97\x88R`\n\x84R\x82\x88 \x96\x88R\x95\x90\x92R\x90\x94 \x91Q\x82U\x92Q\x91Q\x83\x16`\x01`\x80\x1B\x02\x91\x90\x92\x16\x17`\x01\x90\x91\x01UV[cNH{q`\xE0\x1B_R`A`\x04R`$_\xFD[`@Q`\xC0\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C%Wa\x0C%a\x0B\xEEV[`@R\x90V[`@Q``\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C%Wa\x0C%a\x0B\xEEV[`@Q`\x80\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C%Wa\x0C%a\x0B\xEEV[`@Q`\xA0\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C%Wa\x0C%a\x0B\xEEV[`@Q`\x1F\x82\x01`\x1F\x19\x16\x81\x01g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x82\x82\x10\x17\x15a\x0C\xBDWa\x0C\xBDa\x0B\xEEV[`@R\x91\x90PV[\x805`\x01`\x01`\xA0\x1B\x03\x81\x16\x81\x14a\x0C\xDBW_\x80\xFD[\x91\x90PV[_\x80_\x80`\x80\x85\x87\x03\x12\x15a\x0C\xF3W_\x80\xFD[\x845\x80\x15\x15\x81\x14a\r\x02W_\x80\xFD[\x93P` \x85\x81\x015\x93P`@\x86\x015g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x80\x82\x11\x15a\r&W_\x80\xFD[\x81\x88\x01\x91P\x88`\x1F\x83\x01\x12a\r9W_\x80\xFD[\x815\x81\x81\x11\x15a\rKWa\rKa\x0B\xEEV[a\r]`\x1F\x82\x01`\x1F\x19\x16\x85\x01a\x0C\x94V[\x91P\x80\x82R\x89\x84\x82\x85\x01\x01\x11\x15a\rrW_\x80\xFD[\x80\x84\x84\x01\x85\x84\x017_\x84\x82\x84\x01\x01RP\x80\x94PPPPa\r\x94``\x86\x01a\x0C\xC5V[\x90P\x92\x95\x91\x94P\x92PV[_g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x82\x11\x15a\r\xB8Wa\r\xB8a\x0B\xEEV[P`\x05\x1B` \x01\x90V[`\x01`\x01`\x80\x1B\x03\x81\x16\x81\x14a\r\xD6W_\x80\xFD[PV[\x805`\x03\x81\x10a\x0C\xDBW_\x80\xFD[_` \x80\x83\x85\x03\x12\x15a\r\xF8W_\x80\xFD[\x825g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x0E\x0EW_\x80\xFD[\x83\x01`\x1F\x81\x01\x85\x13a\x0E\x1EW_\x80\xFD[\x805a\x0E1a\x0E,\x82a\r\x9FV[a\x0C\x94V[\x81\x81R`\xC0\x91\x82\x02\x83\x01\x84\x01\x91\x84\x82\x01\x91\x90\x88\x84\x11\x15a\x0EOW_\x80\xFD[\x93\x85\x01\x93[\x83\x85\x10\x15a\x0E\xCEW\x80\x85\x8A\x03\x12\x15a\x0EjW_\x80\xFD[a\x0Era\x0C\x02V[\x855\x81R\x86\x86\x015\x87\x82\x01R`@\x80\x87\x015\x90\x82\x01R``\x80\x87\x015a\x0E\x97\x81a\r\xC2V[\x90\x82\x01R`\x80\x86\x81\x015a\x0E\xAA\x81a\r\xC2V[\x90\x82\x01R`\xA0a\x0E\xBB\x87\x82\x01a\r\xD9V[\x90\x82\x01R\x83R\x93\x84\x01\x93\x91\x85\x01\x91a\x0ETV[P\x97\x96PPPPPPPV[_` \x80\x83\x85\x03\x12\x15a\x0E\xEBW_\x80\xFD[\x825g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x0F\x01W_\x80\xFD[\x83\x01`\x1F\x81\x01\x85\x13a\x0F\x11W_\x80\xFD[\x805a\x0F\x1Fa\x0E,\x82a\r\x9FV[\x81\x81R``\x91\x82\x02\x83\x01\x84\x01\x91\x84\x82\x01\x91\x90\x88\x84\x11\x15a\x0F=W_\x80\xFD[\x93\x85\x01\x93[\x83\x85\x10\x15a\x0E\xCEW\x80\x85\x8A\x03\x12\x15a\x0FXW_\x80\xFD[a\x0F`a\x0C+V[\x855\x81Ra\x0Fo\x87\x87\x01a\x0C\xC5V[\x87\x82\x01R`@a\x0F\x80\x81\x88\x01a\r\xD9V[\x90\x82\x01R\x83R\x93\x84\x01\x93\x91\x85\x01\x91a\x0FBV[_\x80`@\x83\x85\x03\x12\x15a\x0F\xA4W_\x80\xFD[\x825\x91Pa\x0F\xB4` \x84\x01a\x0C\xC5V[\x90P\x92P\x92\x90PV[_` \x82\x84\x03\x12\x15a\x0F\xCDW_\x80\xFD[P5\x91\x90PV[_``\x82\x84\x03\x12\x15a\x0F\xE4W_\x80\xFD[P\x91\x90PV[_` \x80\x83\x85\x03\x12\x15a\x0F\xFBW_\x80\xFD[\x825g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x10\x11W_\x80\xFD[\x83\x01`\x1F\x81\x01\x85\x13a\x10!W_\x80\xFD[\x805a\x10/a\x0E,\x82a\r\x9FV[\x81\x81R`\x07\x91\x90\x91\x1B\x82\x01\x83\x01\x90\x83\x81\x01\x90\x87\x83\x11\x15a\x10MW_\x80\xFD[\x92\x84\x01\x92[\x82\x84\x10\x15a\x10\xAAW`\x80\x84\x89\x03\x12\x15a\x10iW_\x80\xFD[a\x10qa\x0CNV[\x845\x81R\x85\x85\x015\x86\x82\x01R`@\x80\x86\x015\x90\x82\x01R``a\x10\x94\x81\x87\x01a\r\xD9V[\x90\x82\x01R\x82R`\x80\x93\x90\x93\x01\x92\x90\x84\x01\x90a\x10RV[\x97\x96PPPPPPPV[_\x80_``\x84\x86\x03\x12\x15a\x10\xC7W_\x80\xFD[PP\x815\x93` \x83\x015\x93P`@\x90\x92\x015\x91\x90PV[_` \x80\x83\x85\x03\x12\x15a\x10\xEFW_\x80\xFD[\x825g\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x11\x05W_\x80\xFD[\x83\x01`\x1F\x81\x01\x85\x13a\x11\x15W_\x80\xFD[\x805a\x11#a\x0E,\x82a\r\x9FV[\x81\x81R`\xA0\x91\x82\x02\x83\x01\x84\x01\x91\x84\x82\x01\x91\x90\x88\x84\x11\x15a\x11AW_\x80\xFD[\x93\x85\x01\x93[\x83\x85\x10\x15a\x0E\xCEW\x80\x85\x8A\x03\x12\x15a\x11\\W_\x80\xFD[a\x11da\x0CqV[\x855\x81R\x86\x86\x015\x87\x82\x01R`@\x80\x87\x015a\x11\x7F\x81a\r\xC2V[\x90\x82\x01R``\x86\x81\x015a\x11\x92\x81a\r\xC2V[\x90\x82\x01R`\x80a\x11\xA3\x87\x82\x01a\r\xD9V[\x90\x82\x01R\x83R\x93\x84\x01\x93\x91\x85\x01\x91a\x11FV[_\x80_\x80`\x80\x85\x87\x03\x12\x15a\x11\xC9W_\x80\xFD[\x845\x93P` \x85\x015\x92P`@\x85\x015a\x11\xE2\x81a\r\xC2V[\x91P``\x85\x015a\x11\xF2\x81a\r\xC2V[\x93\x96\x92\x95P\x90\x93PPV[_\x80`@\x83\x85\x03\x12\x15a\x12\x0EW_\x80\xFD[PP\x805\x92` \x90\x91\x015\x91PV[_` \x80\x83R\x83Q\x80` \x85\x01R_[\x81\x81\x10\x15a\x12IW\x85\x81\x01\x83\x01Q\x85\x82\x01`@\x01R\x82\x01a\x12-V[P_`@\x82\x86\x01\x01R`@`\x1F\x19`\x1F\x83\x01\x16\x85\x01\x01\x92PPP\x92\x91PPV[_\x80_\x80_`\xA0\x86\x88\x03\x12\x15a\x12}W_\x80\xFD[\x855\x94P` \x86\x015\x93P`@\x86\x015\x92P``\x86\x015a\x12\x9D\x81a\r\xC2V[\x91P`\x80\x86\x015a\x12\xAD\x81a\r\xC2V[\x80\x91PP\x92\x95P\x92\x95\x90\x93PV[`\x01\x81\x81\x1C\x90\x82\x16\x80a\x12\xCFW`\x7F\x82\x16\x91P[` \x82\x10\x81\x03a\x0F\xE4WcNH{q`\xE0\x1B_R`\"`\x04R`$_\xFD[`\x1F\x82\x11\x15a\x07\x8DW\x80_R` _ `\x1F\x84\x01`\x05\x1C\x81\x01` \x85\x10\x15a\x13\x12WP\x80[`\x1F\x84\x01`\x05\x1C\x82\x01\x91P[\x81\x81\x10\x15a\x131W_\x81U`\x01\x01a\x13\x1EV[PPPPPV[\x81Qg\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x81\x11\x15a\x13RWa\x13Ra\x0B\xEEV[a\x13f\x81a\x13`\x84Ta\x12\xBBV[\x84a\x12\xEDV[` \x80`\x1F\x83\x11`\x01\x81\x14a\x13\x99W_\x84\x15a\x13\x82WP\x85\x83\x01Q[_\x19`\x03\x86\x90\x1B\x1C\x19\x16`\x01\x85\x90\x1B\x17\x85Ua\x13\xF0V[_\x85\x81R` \x81 `\x1F\x19\x86\x16\x91[\x82\x81\x10\x15a\x13\xC7W\x88\x86\x01Q\x82U\x94\x84\x01\x94`\x01\x90\x91\x01\x90\x84\x01a\x13\xA8V[P\x85\x82\x10\x15a\x13\xE4W\x87\x85\x01Q_\x19`\x03\x88\x90\x1B`\xF8\x16\x1C\x19\x16\x81U[PP`\x01\x84`\x01\x1B\x01\x85U[PPPPPPV[cNH{q`\xE0\x1B_R`!`\x04R`$_\xFD[cNH{q`\xE0\x1B_R`2`\x04R`$_\xFD[\x815\x81U` \x82\x015a\x142\x81a\r\xC2V[`@\x83\x015a\x14@\x81a\r\xC2V[`\x01`\x01`\x80\x1B\x03\x19\x81`\x80\x1B\x16`\x01`\x01`\x80\x1B\x03\x83\x16\x17`\x01\x84\x01UPPPPV\xFE\xA2dipfsX\"\x12 h|o\xE6\xE2e\xAA\xBEb@N\x89\xB3\x9F\x88<\x05\xF4\x92+zJ\x13\x8FRs\xC7\x12\xC7B\xEFOdsolcC\0\x08\x18\x003", ); #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] @@ -730,6 +742,227 @@ pub mod Simple { } }; /**```solidity + struct LargeStruct { uint256 field1; uint128 field2; uint128 field3; } + ```*/ + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] + #[derive(Clone)] + pub struct LargeStruct { + pub field1: alloy::sol_types::private::primitives::aliases::U256, + pub field2: u128, + pub field3: u128, + } + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] + const _: () = { + use alloy::sol_types as alloy_sol_types; + #[doc(hidden)] + type UnderlyingSolTuple<'a> = ( + alloy::sol_types::sol_data::Uint<256>, + alloy::sol_types::sol_data::Uint<128>, + alloy::sol_types::sol_data::Uint<128>, + ); + #[doc(hidden)] + type UnderlyingRustTuple<'a> = ( + alloy::sol_types::private::primitives::aliases::U256, + u128, + u128, + ); + #[cfg(test)] + #[allow(dead_code, unreachable_patterns)] + fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { + match _t { + alloy_sol_types::private::AssertTypeEq::< + ::RustType, + >(_) => {} + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: LargeStruct) -> Self { + (value.field1, value.field2, value.field3) + } + } + #[automatically_derived] + #[doc(hidden)] + impl ::core::convert::From> for LargeStruct { + fn from(tuple: UnderlyingRustTuple<'_>) -> Self { + Self { + field1: tuple.0, + field2: tuple.1, + field3: tuple.2, + } + } + } + #[automatically_derived] + impl alloy_sol_types::SolValue for LargeStruct { + type SolType = Self; + } + #[automatically_derived] + impl alloy_sol_types::private::SolTypeValue for LargeStruct { + #[inline] + fn stv_to_tokens(&self) -> ::Token<'_> { + ( + as alloy_sol_types::SolType>::tokenize( + &self.field1, + ), + as alloy_sol_types::SolType>::tokenize( + &self.field2, + ), + as alloy_sol_types::SolType>::tokenize( + &self.field3, + ), + ) + } + #[inline] + fn stv_abi_encoded_size(&self) -> usize { + if let Some(size) = ::ENCODED_SIZE { + return size; + } + let tuple = + as ::core::convert::From>::from(self.clone()); + as alloy_sol_types::SolType>::abi_encoded_size(&tuple) + } + #[inline] + fn stv_eip712_data_word(&self) -> alloy_sol_types::Word { + ::eip712_hash_struct(self) + } + #[inline] + fn stv_abi_encode_packed_to(&self, out: &mut alloy_sol_types::private::Vec) { + let tuple = + as ::core::convert::From>::from(self.clone()); + as alloy_sol_types::SolType>::abi_encode_packed_to( + &tuple, out, + ) + } + #[inline] + fn stv_abi_packed_encoded_size(&self) -> usize { + if let Some(size) = ::PACKED_ENCODED_SIZE { + return size; + } + let tuple = + as ::core::convert::From>::from(self.clone()); + as alloy_sol_types::SolType>::abi_packed_encoded_size( + &tuple, + ) + } + } + #[automatically_derived] + impl alloy_sol_types::SolType for LargeStruct { + type RustType = Self; + type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; + const SOL_NAME: &'static str = ::NAME; + const ENCODED_SIZE: Option = + as alloy_sol_types::SolType>::ENCODED_SIZE; + const PACKED_ENCODED_SIZE: Option = + as alloy_sol_types::SolType>::PACKED_ENCODED_SIZE; + #[inline] + fn valid_token(token: &Self::Token<'_>) -> bool { + as alloy_sol_types::SolType>::valid_token(token) + } + #[inline] + fn detokenize(token: Self::Token<'_>) -> Self::RustType { + let tuple = as alloy_sol_types::SolType>::detokenize(token); + >>::from(tuple) + } + } + #[automatically_derived] + impl alloy_sol_types::SolStruct for LargeStruct { + const NAME: &'static str = "LargeStruct"; + #[inline] + fn eip712_root_type() -> alloy_sol_types::private::Cow<'static, str> { + alloy_sol_types::private::Cow::Borrowed( + "LargeStruct(uint256 field1,uint128 field2,uint128 field3)", + ) + } + #[inline] + fn eip712_components( + ) -> alloy_sol_types::private::Vec> + { + alloy_sol_types::private::Vec::new() + } + #[inline] + fn eip712_encode_type() -> alloy_sol_types::private::Cow<'static, str> { + ::eip712_root_type() + } + #[inline] + fn eip712_encode_data(&self) -> alloy_sol_types::private::Vec { + [ + as alloy_sol_types::SolType>::eip712_data_word(&self.field1) + .0, + as alloy_sol_types::SolType>::eip712_data_word(&self.field2) + .0, + as alloy_sol_types::SolType>::eip712_data_word(&self.field3) + .0, + ] + .concat() + } + } + #[automatically_derived] + impl alloy_sol_types::EventTopic for LargeStruct { + #[inline] + fn topic_preimage_length(rust: &Self::RustType) -> usize { + 0usize + + as alloy_sol_types::EventTopic>::topic_preimage_length( + &rust.field1, + ) + + as alloy_sol_types::EventTopic>::topic_preimage_length( + &rust.field2, + ) + + as alloy_sol_types::EventTopic>::topic_preimage_length( + &rust.field3, + ) + } + #[inline] + fn encode_topic_preimage( + rust: &Self::RustType, + out: &mut alloy_sol_types::private::Vec, + ) { + out.reserve(::topic_preimage_length(rust)); + as alloy_sol_types::EventTopic>::encode_topic_preimage( + &rust.field1, + out, + ); + as alloy_sol_types::EventTopic>::encode_topic_preimage( + &rust.field2, + out, + ); + as alloy_sol_types::EventTopic>::encode_topic_preimage( + &rust.field3, + out, + ); + } + #[inline] + fn encode_topic(rust: &Self::RustType) -> alloy_sol_types::abi::token::WordToken { + let mut out = alloy_sol_types::private::Vec::new(); + ::encode_topic_preimage(rust, &mut out); + alloy_sol_types::abi::token::WordToken(alloy_sol_types::private::keccak256(out)) + } + } + }; + /**```solidity struct MappingChange { uint256 key; address value; MappingOperation operation; } ```*/ #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] @@ -938,15 +1171,20 @@ pub mod Simple { /**```solidity struct MappingOfSingleValueMappingsChange { uint256 outerKey; uint256 innerKey; uint256 value; MappingOperation operation; } ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct MappingOfSingleValueMappingsChange { - pub outerKey: alloy::sol_types::private::U256, - pub innerKey: alloy::sol_types::private::U256, - pub value: alloy::sol_types::private::U256, + pub outerKey: alloy::sol_types::private::primitives::aliases::U256, + pub innerKey: alloy::sol_types::private::primitives::aliases::U256, + pub value: alloy::sol_types::private::primitives::aliases::U256, pub operation: ::RustType, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; #[doc(hidden)] @@ -958,9 +1196,9 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, ::RustType, ); #[cfg(test)] @@ -1168,17 +1406,22 @@ pub mod Simple { /**```solidity struct MappingOfStructMappingsChange { uint256 outerKey; uint256 innerKey; uint256 field1; uint128 field2; uint128 field3; MappingOperation operation; } ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct MappingOfStructMappingsChange { - pub outerKey: alloy::sol_types::private::U256, - pub innerKey: alloy::sol_types::private::U256, - pub field1: alloy::sol_types::private::U256, + pub outerKey: alloy::sol_types::private::primitives::aliases::U256, + pub innerKey: alloy::sol_types::private::primitives::aliases::U256, + pub field1: alloy::sol_types::private::primitives::aliases::U256, pub field2: u128, pub field3: u128, pub operation: ::RustType, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; #[doc(hidden)] @@ -1192,9 +1435,9 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, u128, u128, ::RustType, @@ -1451,16 +1694,21 @@ pub mod Simple { /**```solidity struct MappingStructChange { uint256 key; uint256 field1; uint128 field2; uint128 field3; MappingOperation operation; } ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct MappingStructChange { - pub key: alloy::sol_types::private::U256, - pub field1: alloy::sol_types::private::U256, + pub key: alloy::sol_types::private::primitives::aliases::U256, + pub field1: alloy::sol_types::private::primitives::aliases::U256, pub field2: u128, pub field3: u128, pub operation: ::RustType, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; #[doc(hidden)] @@ -1473,8 +1721,8 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, u128, u128, ::RustType, @@ -1933,20 +2181,21 @@ pub mod Simple { } } }; - /**Function with signature `changeMapping((uint256,address,uint8)[])` and selector `0x0c1616c9`. + /**Function with signature `changeMapping((uint256,uint256,uint256,uint128,uint128,uint8)[])` and selector `0x02e3003a`. ```solidity - function changeMapping(MappingChange[] memory changes) external; + function changeMapping(MappingOfStructMappingsChange[] memory changes) external; ```*/ #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] - pub struct changeMappingCall { - pub changes: - alloy::sol_types::private::Vec<::RustType>, + pub struct changeMapping_0Call { + pub changes: alloy::sol_types::private::Vec< + ::RustType, + >, } - ///Container type for the return parameters of the [`changeMapping((uint256,address,uint8)[])`](changeMappingCall) function. + ///Container type for the return parameters of the [`changeMapping((uint256,uint256,uint256,uint128,uint128,uint8)[])`](changeMapping_0Call) function. #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] - pub struct changeMappingReturn {} + pub struct changeMapping_0Return {} #[allow( non_camel_case_types, non_snake_case, @@ -1957,11 +2206,12 @@ pub mod Simple { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] - type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Array,); + type UnderlyingSolTuple<'a> = + (alloy::sol_types::sol_data::Array,); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( alloy::sol_types::private::Vec< - ::RustType, + ::RustType, >, ); #[cfg(test)] @@ -1975,14 +2225,14 @@ pub mod Simple { } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From for UnderlyingRustTuple<'_> { - fn from(value: changeMappingCall) -> Self { + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: changeMapping_0Call) -> Self { (value.changes,) } } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From> for changeMappingCall { + impl ::core::convert::From> for changeMapping_0Call { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { Self { changes: tuple.0 } } @@ -2004,28 +2254,30 @@ pub mod Simple { } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From for UnderlyingRustTuple<'_> { - fn from(value: changeMappingReturn) -> Self { + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: changeMapping_0Return) -> Self { () } } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From> for changeMappingReturn { + impl ::core::convert::From> for changeMapping_0Return { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { Self {} } } } #[automatically_derived] - impl alloy_sol_types::SolCall for changeMappingCall { - type Parameters<'a> = (alloy::sol_types::sol_data::Array,); + impl alloy_sol_types::SolCall for changeMapping_0Call { + type Parameters<'a> = + (alloy::sol_types::sol_data::Array,); type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; - type Return = changeMappingReturn; + type Return = changeMapping_0Return; type ReturnTuple<'a> = (); type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; - const SIGNATURE: &'static str = "changeMapping((uint256,address,uint8)[])"; - const SELECTOR: [u8; 4] = [12u8, 22u8, 22u8, 201u8]; + const SIGNATURE: &'static str = + "changeMapping((uint256,uint256,uint256,uint128,uint128,uint8)[])"; + const SELECTOR: [u8; 4] = [2u8, 227u8, 0u8, 58u8]; #[inline] fn new<'a>( tuple: as alloy_sol_types::SolType>::RustType, @@ -2034,11 +2286,11 @@ pub mod Simple { } #[inline] fn tokenize(&self) -> Self::Token<'_> { - ( - as alloy_sol_types::SolType>::tokenize(&self.changes), - ) + ( as alloy_sol_types::SolType>::tokenize( + &self.changes + ),) } #[inline] fn abi_decode_returns( @@ -2052,32 +2304,35 @@ pub mod Simple { } } }; - /**Function with signature `changeMappingOfSingleValueMappings((uint256,uint256,uint256,uint8)[])` and selector `0x2eb5cfd8`. + /**Function with signature `changeMapping((uint256,address,uint8)[])` and selector `0x0c1616c9`. ```solidity - function changeMappingOfSingleValueMappings(MappingOfSingleValueMappingsChange[] memory changes) external; + function changeMapping(MappingChange[] memory changes) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] - pub struct changeMappingOfSingleValueMappingsCall { - pub changes: alloy::sol_types::private::Vec< - ::RustType, - >, + pub struct changeMapping_1Call { + pub changes: + alloy::sol_types::private::Vec<::RustType>, } - ///Container type for the return parameters of the [`changeMappingOfSingleValueMappings((uint256,uint256,uint256,uint8)[])`](changeMappingOfSingleValueMappingsCall) function. - #[allow(non_camel_case_types, non_snake_case)] + ///Container type for the return parameters of the [`changeMapping((uint256,address,uint8)[])`](changeMapping_1Call) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] - pub struct changeMappingOfSingleValueMappingsReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + pub struct changeMapping_1Return {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] - type UnderlyingSolTuple<'a> = - (alloy::sol_types::sol_data::Array,); + type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Array,); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( alloy::sol_types::private::Vec< - ::RustType, + ::RustType, >, ); #[cfg(test)] @@ -2091,14 +2346,14 @@ pub mod Simple { } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From for UnderlyingRustTuple<'_> { - fn from(value: changeMappingOfSingleValueMappingsCall) -> Self { + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: changeMapping_1Call) -> Self { (value.changes,) } } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From> for changeMappingOfSingleValueMappingsCall { + impl ::core::convert::From> for changeMapping_1Call { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { Self { changes: tuple.0 } } @@ -2120,30 +2375,28 @@ pub mod Simple { } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From for UnderlyingRustTuple<'_> { - fn from(value: changeMappingOfSingleValueMappingsReturn) -> Self { + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: changeMapping_1Return) -> Self { () } } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From> for changeMappingOfSingleValueMappingsReturn { + impl ::core::convert::From> for changeMapping_1Return { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { Self {} } } } #[automatically_derived] - impl alloy_sol_types::SolCall for changeMappingOfSingleValueMappingsCall { - type Parameters<'a> = - (alloy::sol_types::sol_data::Array,); + impl alloy_sol_types::SolCall for changeMapping_1Call { + type Parameters<'a> = (alloy::sol_types::sol_data::Array,); type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; - type Return = changeMappingOfSingleValueMappingsReturn; + type Return = changeMapping_1Return; type ReturnTuple<'a> = (); type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; - const SIGNATURE: &'static str = - "changeMappingOfSingleValueMappings((uint256,uint256,uint256,uint8)[])"; - const SELECTOR: [u8; 4] = [46u8, 181u8, 207u8, 216u8]; + const SIGNATURE: &'static str = "changeMapping((uint256,address,uint8)[])"; + const SELECTOR: [u8; 4] = [12u8, 22u8, 22u8, 201u8]; #[inline] fn new<'a>( tuple: as alloy_sol_types::SolType>::RustType, @@ -2152,11 +2405,11 @@ pub mod Simple { } #[inline] fn tokenize(&self) -> Self::Token<'_> { - ( as alloy_sol_types::SolType>::tokenize( - &self.changes - ),) + ( + as alloy_sol_types::SolType>::tokenize(&self.changes), + ) } #[inline] fn abi_decode_returns( @@ -2170,32 +2423,37 @@ pub mod Simple { } } }; - /**Function with signature `changeMappingOfStructMappings((uint256,uint256,uint256,uint128,uint128,uint8)[])` and selector `0xfb586c7d`. + /**Function with signature `changeMapping((uint256,uint256,uint256,uint8)[])` and selector `0x3e9060c7`. ```solidity - function changeMappingOfStructMappings(MappingOfStructMappingsChange[] memory changes) external; + function changeMapping(MappingOfSingleValueMappingsChange[] memory changes) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] - pub struct changeMappingOfStructMappingsCall { + pub struct changeMapping_2Call { pub changes: alloy::sol_types::private::Vec< - ::RustType, + ::RustType, >, } - ///Container type for the return parameters of the [`changeMappingOfStructMappings((uint256,uint256,uint256,uint128,uint128,uint8)[])`](changeMappingOfStructMappingsCall) function. - #[allow(non_camel_case_types, non_snake_case)] + ///Container type for the return parameters of the [`changeMapping((uint256,uint256,uint256,uint8)[])`](changeMapping_2Call) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] - pub struct changeMappingOfStructMappingsReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + pub struct changeMapping_2Return {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] type UnderlyingSolTuple<'a> = - (alloy::sol_types::sol_data::Array,); + (alloy::sol_types::sol_data::Array,); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( alloy::sol_types::private::Vec< - ::RustType, + ::RustType, >, ); #[cfg(test)] @@ -2209,14 +2467,14 @@ pub mod Simple { } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From for UnderlyingRustTuple<'_> { - fn from(value: changeMappingOfStructMappingsCall) -> Self { + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: changeMapping_2Call) -> Self { (value.changes,) } } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From> for changeMappingOfStructMappingsCall { + impl ::core::convert::From> for changeMapping_2Call { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { Self { changes: tuple.0 } } @@ -2238,30 +2496,29 @@ pub mod Simple { } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From for UnderlyingRustTuple<'_> { - fn from(value: changeMappingOfStructMappingsReturn) -> Self { + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: changeMapping_2Return) -> Self { () } } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From> for changeMappingOfStructMappingsReturn { + impl ::core::convert::From> for changeMapping_2Return { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { Self {} } } } #[automatically_derived] - impl alloy_sol_types::SolCall for changeMappingOfStructMappingsCall { + impl alloy_sol_types::SolCall for changeMapping_2Call { type Parameters<'a> = - (alloy::sol_types::sol_data::Array,); + (alloy::sol_types::sol_data::Array,); type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; - type Return = changeMappingOfStructMappingsReturn; + type Return = changeMapping_2Return; type ReturnTuple<'a> = (); type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; - const SIGNATURE: &'static str = - "changeMappingOfStructMappings((uint256,uint256,uint256,uint128,uint128,uint8)[])"; - const SELECTOR: [u8; 4] = [251u8, 88u8, 108u8, 125u8]; + const SIGNATURE: &'static str = "changeMapping((uint256,uint256,uint256,uint8)[])"; + const SELECTOR: [u8; 4] = [62u8, 144u8, 96u8, 199u8]; #[inline] fn new<'a>( tuple: as alloy_sol_types::SolType>::RustType, @@ -2271,7 +2528,7 @@ pub mod Simple { #[inline] fn tokenize(&self) -> Self::Token<'_> { ( as alloy_sol_types::SolType>::tokenize( &self.changes ),) @@ -2288,22 +2545,27 @@ pub mod Simple { } } }; - /**Function with signature `changeMappingStruct((uint256,uint256,uint128,uint128,uint8)[])` and selector `0xc7bf4db5`. + /**Function with signature `changeMapping((uint256,uint256,uint128,uint128,uint8)[])` and selector `0x51976fc8`. ```solidity - function changeMappingStruct(MappingStructChange[] memory changes) external; + function changeMapping(MappingStructChange[] memory changes) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] - pub struct changeMappingStructCall { + pub struct changeMapping_3Call { pub changes: alloy::sol_types::private::Vec< ::RustType, >, } - ///Container type for the return parameters of the [`changeMappingStruct((uint256,uint256,uint128,uint128,uint8)[])`](changeMappingStructCall) function. - #[allow(non_camel_case_types, non_snake_case)] + ///Container type for the return parameters of the [`changeMapping((uint256,uint256,uint128,uint128,uint8)[])`](changeMapping_3Call) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] - pub struct changeMappingStructReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + pub struct changeMapping_3Return {} + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -2326,14 +2588,14 @@ pub mod Simple { } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From for UnderlyingRustTuple<'_> { - fn from(value: changeMappingStructCall) -> Self { + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: changeMapping_3Call) -> Self { (value.changes,) } } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From> for changeMappingStructCall { + impl ::core::convert::From> for changeMapping_3Call { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { Self { changes: tuple.0 } } @@ -2355,29 +2617,29 @@ pub mod Simple { } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From for UnderlyingRustTuple<'_> { - fn from(value: changeMappingStructReturn) -> Self { + impl ::core::convert::From for UnderlyingRustTuple<'_> { + fn from(value: changeMapping_3Return) -> Self { () } } #[automatically_derived] #[doc(hidden)] - impl ::core::convert::From> for changeMappingStructReturn { + impl ::core::convert::From> for changeMapping_3Return { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { Self {} } } } #[automatically_derived] - impl alloy_sol_types::SolCall for changeMappingStructCall { + impl alloy_sol_types::SolCall for changeMapping_3Call { type Parameters<'a> = (alloy::sol_types::sol_data::Array,); type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; - type Return = changeMappingStructReturn; + type Return = changeMapping_3Return; type ReturnTuple<'a> = (); type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; const SIGNATURE: &'static str = - "changeMappingStruct((uint256,uint256,uint128,uint128,uint8)[])"; - const SELECTOR: [u8; 4] = [199u8, 191u8, 77u8, 181u8]; + "changeMapping((uint256,uint256,uint128,uint128,uint8)[])"; + const SELECTOR: [u8; 4] = [81u8, 151u8, 111u8, 200u8]; #[inline] fn new<'a>( tuple: as alloy_sol_types::SolType>::RustType, @@ -2524,19 +2786,24 @@ pub mod Simple { ```solidity function mappingOfSingleValueMappings(uint256, uint256) external view returns (uint256); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct mappingOfSingleValueMappingsCall { - pub _0: alloy::sol_types::private::U256, - pub _1: alloy::sol_types::private::U256, + pub _0: alloy::sol_types::private::primitives::aliases::U256, + pub _1: alloy::sol_types::private::primitives::aliases::U256, } ///Container type for the return parameters of the [`mappingOfSingleValueMappings(uint256,uint256)`](mappingOfSingleValueMappingsCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct mappingOfSingleValueMappingsReturn { - pub _0: alloy::sol_types::private::U256, + pub _0: alloy::sol_types::private::primitives::aliases::U256, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -2547,8 +2814,8 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, ); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] @@ -2581,7 +2848,7 @@ pub mod Simple { #[doc(hidden)] type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256,); + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -2651,21 +2918,26 @@ pub mod Simple { ```solidity function mappingOfStructMappings(uint256, uint256) external view returns (uint256 field1, uint128 field2, uint128 field3); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct mappingOfStructMappingsCall { - pub _0: alloy::sol_types::private::U256, - pub _1: alloy::sol_types::private::U256, + pub _0: alloy::sol_types::private::primitives::aliases::U256, + pub _1: alloy::sol_types::private::primitives::aliases::U256, } ///Container type for the return parameters of the [`mappingOfStructMappings(uint256,uint256)`](mappingOfStructMappingsCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct mappingOfStructMappingsReturn { - pub field1: alloy::sol_types::private::U256, + pub field1: alloy::sol_types::private::primitives::aliases::U256, pub field2: u128, pub field3: u128, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -2676,8 +2948,8 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, ); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] @@ -2714,7 +2986,11 @@ pub mod Simple { alloy::sol_types::sol_data::Uint<128>, ); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256, u128, u128); + type UnderlyingRustTuple<'a> = ( + alloy::sol_types::private::primitives::aliases::U256, + u128, + u128, + ); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -3362,18 +3638,23 @@ pub mod Simple { ```solidity function setMappingOfSingleValueMappings(uint256 outerKey, uint256 innerKey, uint256 value) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setMappingOfSingleValueMappingsCall { - pub outerKey: alloy::sol_types::private::U256, - pub innerKey: alloy::sol_types::private::U256, - pub value: alloy::sol_types::private::U256, + pub outerKey: alloy::sol_types::private::primitives::aliases::U256, + pub innerKey: alloy::sol_types::private::primitives::aliases::U256, + pub value: alloy::sol_types::private::primitives::aliases::U256, } ///Container type for the return parameters of the [`setMappingOfSingleValueMappings(uint256,uint256,uint256)`](setMappingOfSingleValueMappingsCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setMappingOfSingleValueMappingsReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -3385,9 +3666,9 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, ); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] @@ -3496,20 +3777,25 @@ pub mod Simple { ```solidity function setMappingOfStructMappings(uint256 outerKey, uint256 innerKey, uint256 field1, uint128 field2, uint128 field3) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setMappingOfStructMappingsCall { - pub outerKey: alloy::sol_types::private::U256, - pub innerKey: alloy::sol_types::private::U256, - pub field1: alloy::sol_types::private::U256, + pub outerKey: alloy::sol_types::private::primitives::aliases::U256, + pub innerKey: alloy::sol_types::private::primitives::aliases::U256, + pub field1: alloy::sol_types::private::primitives::aliases::U256, pub field2: u128, pub field3: u128, } ///Container type for the return parameters of the [`setMappingOfStructMappings(uint256,uint256,uint256,uint128,uint128)`](setMappingOfStructMappingsCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setMappingOfStructMappingsReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -3523,9 +3809,9 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, u128, u128, ); @@ -3652,19 +3938,24 @@ pub mod Simple { ```solidity function setMappingStruct(uint256 _key, uint256 _field1, uint128 _field2, uint128 _field3) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setMappingStructCall { - pub _key: alloy::sol_types::private::U256, - pub _field1: alloy::sol_types::private::U256, + pub _key: alloy::sol_types::private::primitives::aliases::U256, + pub _field1: alloy::sol_types::private::primitives::aliases::U256, pub _field2: u128, pub _field3: u128, } ///Container type for the return parameters of the [`setMappingStruct(uint256,uint256,uint128,uint128)`](setMappingStructCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setMappingStructReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -3677,8 +3968,8 @@ pub mod Simple { ); #[doc(hidden)] type UnderlyingRustTuple<'a> = ( - alloy::sol_types::private::U256, - alloy::sol_types::private::U256, + alloy::sol_types::private::primitives::aliases::U256, + alloy::sol_types::private::primitives::aliases::U256, u128, u128, ); @@ -3903,33 +4194,32 @@ pub mod Simple { } } }; - /**Function with signature `setSimpleStruct(uint256,uint128,uint128)` and selector `0x1417a4f0`. + /**Function with signature `setSimpleStruct((uint256,uint128,uint128))` and selector `0x3e70166e`. ```solidity - function setSimpleStruct(uint256 _field1, uint128 _field2, uint128 _field3) external; + function setSimpleStruct(LargeStruct memory input) external; ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setSimpleStructCall { - pub _field1: alloy::sol_types::private::U256, - pub _field2: u128, - pub _field3: u128, + pub input: ::RustType, } - ///Container type for the return parameters of the [`setSimpleStruct(uint256,uint128,uint128)`](setSimpleStructCall) function. - #[allow(non_camel_case_types, non_snake_case)] + ///Container type for the return parameters of the [`setSimpleStruct((uint256,uint128,uint128))`](setSimpleStructCall) function. + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct setSimpleStructReturn {} - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] - type UnderlyingSolTuple<'a> = ( - alloy::sol_types::sol_data::Uint<256>, - alloy::sol_types::sol_data::Uint<128>, - alloy::sol_types::sol_data::Uint<128>, - ); + type UnderlyingSolTuple<'a> = (LargeStruct,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256, u128, u128); + type UnderlyingRustTuple<'a> = (::RustType,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -3943,18 +4233,14 @@ pub mod Simple { #[doc(hidden)] impl ::core::convert::From for UnderlyingRustTuple<'_> { fn from(value: setSimpleStructCall) -> Self { - (value._field1, value._field2, value._field3) + (value.input,) } } #[automatically_derived] #[doc(hidden)] impl ::core::convert::From> for setSimpleStructCall { fn from(tuple: UnderlyingRustTuple<'_>) -> Self { - Self { - _field1: tuple.0, - _field2: tuple.1, - _field3: tuple.2, - } + Self { input: tuple.0 } } } } @@ -3989,17 +4275,13 @@ pub mod Simple { } #[automatically_derived] impl alloy_sol_types::SolCall for setSimpleStructCall { - type Parameters<'a> = ( - alloy::sol_types::sol_data::Uint<256>, - alloy::sol_types::sol_data::Uint<128>, - alloy::sol_types::sol_data::Uint<128>, - ); + type Parameters<'a> = (LargeStruct,); type Token<'a> = as alloy_sol_types::SolType>::Token<'a>; type Return = setSimpleStructReturn; type ReturnTuple<'a> = (); type ReturnToken<'a> = as alloy_sol_types::SolType>::Token<'a>; - const SIGNATURE: &'static str = "setSimpleStruct(uint256,uint128,uint128)"; - const SELECTOR: [u8; 4] = [20u8, 23u8, 164u8, 240u8]; + const SIGNATURE: &'static str = "setSimpleStruct((uint256,uint128,uint128))"; + const SELECTOR: [u8; 4] = [62u8, 112u8, 22u8, 110u8]; #[inline] fn new<'a>( tuple: as alloy_sol_types::SolType>::RustType, @@ -4008,17 +4290,9 @@ pub mod Simple { } #[inline] fn tokenize(&self) -> Self::Token<'_> { - ( - as alloy_sol_types::SolType>::tokenize( - &self._field1, - ), - as alloy_sol_types::SolType>::tokenize( - &self._field2, - ), - as alloy_sol_types::SolType>::tokenize( - &self._field3, - ), - ) + (::tokenize( + &self.input, + ),) } #[inline] fn abi_decode_returns( @@ -4182,18 +4456,23 @@ pub mod Simple { ```solidity function simpleStruct() external view returns (uint256 field1, uint128 field2, uint128 field3); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct simpleStructCall {} ///Container type for the return parameters of the [`simpleStruct()`](simpleStructCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct simpleStructReturn { - pub field1: alloy::sol_types::private::U256, + pub field1: alloy::sol_types::private::primitives::aliases::U256, pub field2: u128, pub field3: u128, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { @@ -4233,7 +4512,11 @@ pub mod Simple { alloy::sol_types::sol_data::Uint<128>, ); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256, u128, u128); + type UnderlyingRustTuple<'a> = ( + alloy::sol_types::private::primitives::aliases::U256, + u128, + u128, + ); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -4301,27 +4584,32 @@ pub mod Simple { ```solidity function structMapping(uint256) external view returns (uint256 field1, uint128 field2, uint128 field3); ```*/ - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct structMappingCall { - pub _0: alloy::sol_types::private::U256, + pub _0: alloy::sol_types::private::primitives::aliases::U256, } ///Container type for the return parameters of the [`structMapping(uint256)`](structMappingCall) function. - #[allow(non_camel_case_types, non_snake_case)] + #[allow(non_camel_case_types, non_snake_case, clippy::pub_underscore_fields)] #[derive(Clone)] pub struct structMappingReturn { - pub field1: alloy::sol_types::private::U256, + pub field1: alloy::sol_types::private::primitives::aliases::U256, pub field2: u128, pub field3: u128, } - #[allow(non_camel_case_types, non_snake_case, clippy::style)] + #[allow( + non_camel_case_types, + non_snake_case, + clippy::pub_underscore_fields, + clippy::style + )] const _: () = { use alloy::sol_types as alloy_sol_types; { #[doc(hidden)] type UnderlyingSolTuple<'a> = (alloy::sol_types::sol_data::Uint<256>,); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256,); + type UnderlyingRustTuple<'a> = (alloy::sol_types::private::primitives::aliases::U256,); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -4354,7 +4642,11 @@ pub mod Simple { alloy::sol_types::sol_data::Uint<128>, ); #[doc(hidden)] - type UnderlyingRustTuple<'a> = (alloy::sol_types::private::U256, u128, u128); + type UnderlyingRustTuple<'a> = ( + alloy::sol_types::private::primitives::aliases::U256, + u128, + u128, + ); #[cfg(test)] #[allow(dead_code, unreachable_patterns)] fn _type_assertion(_t: alloy_sol_types::private::AssertTypeEq) { @@ -4426,10 +4718,10 @@ pub mod Simple { pub enum SimpleCalls { addToArray(addToArrayCall), arr1(arr1Call), - changeMapping(changeMappingCall), - changeMappingOfSingleValueMappings(changeMappingOfSingleValueMappingsCall), - changeMappingOfStructMappings(changeMappingOfStructMappingsCall), - changeMappingStruct(changeMappingStructCall), + changeMapping_0(changeMapping_0Call), + changeMapping_1(changeMapping_1Call), + changeMapping_2(changeMapping_2Call), + changeMapping_3(changeMapping_3Call), m1(m1Call), mappingOfSingleValueMappings(mappingOfSingleValueMappingsCall), mappingOfStructMappings(mappingOfStructMappingsCall), @@ -4457,12 +4749,14 @@ pub mod Simple { /// Prefer using `SolInterface` methods instead. pub const SELECTORS: &'static [[u8; 4usize]] = &[ [2u8, 0u8, 34u8, 92u8], + [2u8, 227u8, 0u8, 58u8], [12u8, 22u8, 22u8, 201u8], - [20u8, 23u8, 164u8, 240u8], [28u8, 19u8, 67u8, 21u8], [42u8, 228u8, 38u8, 134u8], - [46u8, 181u8, 207u8, 216u8], + [62u8, 112u8, 22u8, 110u8], + [62u8, 144u8, 96u8, 199u8], [76u8, 245u8, 169u8, 74u8], + [81u8, 151u8, 111u8, 200u8], [105u8, 135u8, 177u8, 251u8], [108u8, 192u8, 20u8, 222u8], [128u8, 38u8, 222u8, 49u8], @@ -4472,12 +4766,10 @@ pub mod Simple { [163u8, 20u8, 21u8, 15u8], [165u8, 214u8, 102u8, 169u8], [198u8, 167u8, 240u8, 254u8], - [199u8, 191u8, 77u8, 181u8], [200u8, 175u8, 58u8, 166u8], [209u8, 94u8, 200u8, 81u8], [234u8, 209u8, 132u8, 0u8], [242u8, 93u8, 84u8, 245u8], - [251u8, 88u8, 108u8, 125u8], ]; } #[automatically_derived] @@ -4490,15 +4782,17 @@ pub mod Simple { match self { Self::addToArray(_) => ::SELECTOR, Self::arr1(_) => ::SELECTOR, - Self::changeMapping(_) => ::SELECTOR, - Self::changeMappingOfSingleValueMappings(_) => { - ::SELECTOR + Self::changeMapping_0(_) => { + ::SELECTOR } - Self::changeMappingOfStructMappings(_) => { - ::SELECTOR + Self::changeMapping_1(_) => { + ::SELECTOR } - Self::changeMappingStruct(_) => { - ::SELECTOR + Self::changeMapping_2(_) => { + ::SELECTOR + } + Self::changeMapping_3(_) => { + ::SELECTOR } Self::m1(_) => ::SELECTOR, Self::mappingOfSingleValueMappings(_) => { @@ -4557,28 +4851,28 @@ pub mod Simple { setSimples }, { - fn changeMapping( + fn changeMapping_0( data: &[u8], validate: bool, ) -> alloy_sol_types::Result { - ::abi_decode_raw( + ::abi_decode_raw( data, validate, ) - .map(SimpleCalls::changeMapping) + .map(SimpleCalls::changeMapping_0) } - changeMapping + changeMapping_0 }, { - fn setSimpleStruct( + fn changeMapping_1( data: &[u8], validate: bool, ) -> alloy_sol_types::Result { - ::abi_decode_raw( + ::abi_decode_raw( data, validate, ) - .map(SimpleCalls::setSimpleStruct) + .map(SimpleCalls::changeMapping_1) } - setSimpleStruct + changeMapping_1 }, { fn setMapping( @@ -4598,17 +4892,28 @@ pub mod Simple { m1 }, { - fn changeMappingOfSingleValueMappings( + fn setSimpleStruct( data: &[u8], validate: bool, ) -> alloy_sol_types::Result { - ::abi_decode_raw( - data, - validate, - ) - .map(SimpleCalls::changeMappingOfSingleValueMappings) + ::abi_decode_raw( + data, validate, + ) + .map(SimpleCalls::setSimpleStruct) + } + setSimpleStruct + }, + { + fn changeMapping_2( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(SimpleCalls::changeMapping_2) } - changeMappingOfSingleValueMappings + changeMapping_2 }, { fn setMappingOfSingleValueMappings( @@ -4623,6 +4928,18 @@ pub mod Simple { } setMappingOfSingleValueMappings }, + { + fn changeMapping_3( + data: &[u8], + validate: bool, + ) -> alloy_sol_types::Result { + ::abi_decode_raw( + data, validate, + ) + .map(SimpleCalls::changeMapping_3) + } + changeMapping_3 + }, { fn arr1(data: &[u8], validate: bool) -> alloy_sol_types::Result { ::abi_decode_raw(data, validate) @@ -4713,18 +5030,6 @@ pub mod Simple { } setMappingOfStructMappings }, - { - fn changeMappingStruct( - data: &[u8], - validate: bool, - ) -> alloy_sol_types::Result { - ::abi_decode_raw( - data, validate, - ) - .map(SimpleCalls::changeMappingStruct) - } - changeMappingStruct - }, { fn s4(data: &[u8], validate: bool) -> alloy_sol_types::Result { ::abi_decode_raw(data, validate) @@ -4761,19 +5066,6 @@ pub mod Simple { } setS2 }, - { - fn changeMappingOfStructMappings( - data: &[u8], - validate: bool, - ) -> alloy_sol_types::Result { - ::abi_decode_raw( - data, - validate, - ) - .map(SimpleCalls::changeMappingOfStructMappings) - } - changeMappingOfStructMappings - }, ]; let Ok(idx) = Self::SELECTORS.binary_search(&selector) else { return Err(alloy_sol_types::Error::unknown_selector( @@ -4792,23 +5084,23 @@ pub mod Simple { Self::arr1(inner) => { ::abi_encoded_size(inner) } - Self::changeMapping(inner) => { - ::abi_encoded_size( + Self::changeMapping_0(inner) => { + ::abi_encoded_size( inner, ) } - Self::changeMappingOfSingleValueMappings(inner) => { - ::abi_encoded_size( + Self::changeMapping_1(inner) => { + ::abi_encoded_size( inner, ) } - Self::changeMappingOfStructMappings(inner) => { - ::abi_encoded_size( + Self::changeMapping_2(inner) => { + ::abi_encoded_size( inner, ) } - Self::changeMappingStruct(inner) => { - ::abi_encoded_size( + Self::changeMapping_3(inner) => { + ::abi_encoded_size( inner, ) } @@ -4890,26 +5182,26 @@ pub mod Simple { Self::arr1(inner) => { ::abi_encode_raw(inner, out) } - Self::changeMapping(inner) => { - ::abi_encode_raw( + Self::changeMapping_0(inner) => { + ::abi_encode_raw( inner, out, ) } - Self::changeMappingOfSingleValueMappings(inner) => { - ::abi_encode_raw( + Self::changeMapping_1(inner) => { + ::abi_encode_raw( inner, out, ) } - Self::changeMappingOfStructMappings(inner) => { - ::abi_encode_raw( + Self::changeMapping_2(inner) => { + ::abi_encode_raw( inner, out, ) } - Self::changeMappingStruct(inner) => { - ::abi_encode_raw( + Self::changeMapping_3(inner) => { + ::abi_encode_raw( inner, out, ) @@ -5172,42 +5464,41 @@ pub mod Simple { ) -> alloy_contract::SolCallBuilder { self.call_builder(&arr1Call { _0 }) } - ///Creates a new call builder for the [`changeMapping`] function. - pub fn changeMapping( + ///Creates a new call builder for the [`changeMapping_0`] function. + pub fn changeMapping_0( &self, changes: alloy::sol_types::private::Vec< - ::RustType, + ::RustType, >, - ) -> alloy_contract::SolCallBuilder { - self.call_builder(&changeMappingCall { changes }) + ) -> alloy_contract::SolCallBuilder { + self.call_builder(&changeMapping_0Call { changes }) } - ///Creates a new call builder for the [`changeMappingOfSingleValueMappings`] function. - pub fn changeMappingOfSingleValueMappings( + ///Creates a new call builder for the [`changeMapping_1`] function. + pub fn changeMapping_1( &self, changes: alloy::sol_types::private::Vec< - ::RustType, + ::RustType, >, - ) -> alloy_contract::SolCallBuilder - { - self.call_builder(&changeMappingOfSingleValueMappingsCall { changes }) + ) -> alloy_contract::SolCallBuilder { + self.call_builder(&changeMapping_1Call { changes }) } - ///Creates a new call builder for the [`changeMappingOfStructMappings`] function. - pub fn changeMappingOfStructMappings( + ///Creates a new call builder for the [`changeMapping_2`] function. + pub fn changeMapping_2( &self, changes: alloy::sol_types::private::Vec< - ::RustType, + ::RustType, >, - ) -> alloy_contract::SolCallBuilder { - self.call_builder(&changeMappingOfStructMappingsCall { changes }) + ) -> alloy_contract::SolCallBuilder { + self.call_builder(&changeMapping_2Call { changes }) } - ///Creates a new call builder for the [`changeMappingStruct`] function. - pub fn changeMappingStruct( + ///Creates a new call builder for the [`changeMapping_3`] function. + pub fn changeMapping_3( &self, changes: alloy::sol_types::private::Vec< ::RustType, >, - ) -> alloy_contract::SolCallBuilder { - self.call_builder(&changeMappingStructCall { changes }) + ) -> alloy_contract::SolCallBuilder { + self.call_builder(&changeMapping_3Call { changes }) } ///Creates a new call builder for the [`m1`] function. pub fn m1( @@ -5219,16 +5510,16 @@ pub mod Simple { ///Creates a new call builder for the [`mappingOfSingleValueMappings`] function. pub fn mappingOfSingleValueMappings( &self, - _0: alloy::sol_types::private::U256, - _1: alloy::sol_types::private::U256, + _0: alloy::sol_types::private::primitives::aliases::U256, + _1: alloy::sol_types::private::primitives::aliases::U256, ) -> alloy_contract::SolCallBuilder { self.call_builder(&mappingOfSingleValueMappingsCall { _0, _1 }) } ///Creates a new call builder for the [`mappingOfStructMappings`] function. pub fn mappingOfStructMappings( &self, - _0: alloy::sol_types::private::U256, - _1: alloy::sol_types::private::U256, + _0: alloy::sol_types::private::primitives::aliases::U256, + _1: alloy::sol_types::private::primitives::aliases::U256, ) -> alloy_contract::SolCallBuilder { self.call_builder(&mappingOfStructMappingsCall { _0, _1 }) } @@ -5259,9 +5550,9 @@ pub mod Simple { ///Creates a new call builder for the [`setMappingOfSingleValueMappings`] function. pub fn setMappingOfSingleValueMappings( &self, - outerKey: alloy::sol_types::private::U256, - innerKey: alloy::sol_types::private::U256, - value: alloy::sol_types::private::U256, + outerKey: alloy::sol_types::private::primitives::aliases::U256, + innerKey: alloy::sol_types::private::primitives::aliases::U256, + value: alloy::sol_types::private::primitives::aliases::U256, ) -> alloy_contract::SolCallBuilder { self.call_builder(&setMappingOfSingleValueMappingsCall { outerKey, @@ -5272,9 +5563,9 @@ pub mod Simple { ///Creates a new call builder for the [`setMappingOfStructMappings`] function. pub fn setMappingOfStructMappings( &self, - outerKey: alloy::sol_types::private::U256, - innerKey: alloy::sol_types::private::U256, - field1: alloy::sol_types::private::U256, + outerKey: alloy::sol_types::private::primitives::aliases::U256, + innerKey: alloy::sol_types::private::primitives::aliases::U256, + field1: alloy::sol_types::private::primitives::aliases::U256, field2: u128, field3: u128, ) -> alloy_contract::SolCallBuilder { @@ -5289,8 +5580,8 @@ pub mod Simple { ///Creates a new call builder for the [`setMappingStruct`] function. pub fn setMappingStruct( &self, - _key: alloy::sol_types::private::U256, - _field1: alloy::sol_types::private::U256, + _key: alloy::sol_types::private::primitives::aliases::U256, + _field1: alloy::sol_types::private::primitives::aliases::U256, _field2: u128, _field3: u128, ) -> alloy_contract::SolCallBuilder { @@ -5311,15 +5602,9 @@ pub mod Simple { ///Creates a new call builder for the [`setSimpleStruct`] function. pub fn setSimpleStruct( &self, - _field1: alloy::sol_types::private::U256, - _field2: u128, - _field3: u128, + input: ::RustType, ) -> alloy_contract::SolCallBuilder { - self.call_builder(&setSimpleStructCall { - _field1, - _field2, - _field3, - }) + self.call_builder(&setSimpleStructCall { input }) } ///Creates a new call builder for the [`setSimples`] function. pub fn setSimples( @@ -5343,7 +5628,7 @@ pub mod Simple { ///Creates a new call builder for the [`structMapping`] function. pub fn structMapping( &self, - _0: alloy::sol_types::private::U256, + _0: alloy::sol_types::private::primitives::aliases::U256, ) -> alloy_contract::SolCallBuilder { self.call_builder(&structMappingCall { _0 }) } diff --git a/mp2-v1/tests/common/cases/contract.rs b/mp2-v1/tests/common/cases/contract.rs index 8ad16edbe..3103e05f4 100644 --- a/mp2-v1/tests/common/cases/contract.rs +++ b/mp2-v1/tests/common/cases/contract.rs @@ -1,7 +1,6 @@ -use super::{ - slot_info::{LargeStruct, MappingKey, MappingOfMappingsKey, StorageSlotValue}, - table_source::DEFAULT_ADDRESS, -}; +use std::future::Future; + +use super::slot_info::{LargeStruct, MappingInfo, StorageSlotMappingKey, StorageSlotValue}; use crate::common::{ bindings::simple::{ Simple, @@ -14,11 +13,18 @@ use crate::common::{ }; use alloy::{ contract::private::Provider, + network::Ethereum, primitives::{Address, U256}, - providers::ProviderBuilder, + providers::{ProviderBuilder, RootProvider}, + transports::Transport, }; +use anyhow::Result; use itertools::Itertools; -use log::{debug, info}; +use log::info; + +use crate::common::bindings::simple::Simple::SimpleInstance; + +use super::indexing::ContractUpdate; pub struct Contract { pub address: Address, @@ -40,6 +46,41 @@ impl Contract { let chain_id = ctx.rpc.get_chain_id().await.unwrap(); Self { address, chain_id } } + + /// Creates a new [`Contract`] from an [`Address`] and `chain_id` + pub fn new(address: Address, chain_id: u64) -> Contract { + Contract { address, chain_id } + } + /// Getter for `chain_id` + pub fn chain_id(&self) -> u64 { + self.chain_id + } + /// Getter for [`Address`] + pub fn address(&self) -> Address { + self.address + } +} + +/// Trait implemented by any test contract. +pub trait TestContract +where + T: Transport + Clone, +{ + /// How this implementor ingests updates. + type Update: ContractUpdate; + /// The actual contract instance. + type Contract; + /// Function that generates a new instance of self given a [`Provider`] and a `chain_id` + fn new(address: Address, provider: &RootProvider) -> Self; + /// Get an instance of the contract. + fn get_instance(&self) -> &Self::Contract; + /// Apply an update to the contract. + async fn apply_update(&self, ctx: &TestContext, update: &Self::Update) -> Result<()> { + let contract = self.get_instance(); + update.apply_to(ctx, contract).await; + info!("Updated contract with new values {:?}", update); + Ok(()) + } } /// Common functions for a specific type to interact with the test contract @@ -48,7 +89,11 @@ pub trait ContractController { async fn current_values(ctx: &TestContext, contract: &Contract) -> Self; /// Update the values to the contract. - async fn update_contract(&self, ctx: &TestContext, contract: &Contract); + fn update_contract( + &self, + ctx: &TestContext, + contract: &Contract, + ) -> impl Future + Send; } /// Single values collection @@ -112,7 +157,7 @@ impl ContractController for LargeStruct { .on_http(ctx.rpc_url.parse().unwrap()); let simple_contract = Simple::new(contract.address, &provider); - let call = simple_contract.setSimpleStruct(self.field1, self.field2, self.field3); + let call = simple_contract.setSimpleStruct((*self).into()); call.send().await.unwrap().watch().await.unwrap(); // Sanity check { @@ -133,6 +178,20 @@ pub enum MappingUpdate { Update(K, V, V), } +impl MappingUpdate +where + K: StorageSlotMappingKey, + V: StorageSlotValue, +{ + pub fn to_tuple(&self) -> (K, V) { + match self { + MappingUpdate::Insertion(key, value) + | MappingUpdate::Deletion(key, value) + | MappingUpdate::Update(key, _, value) => (key.clone(), value.clone()), + } + } +} + impl From<&MappingUpdate> for MappingOperation { fn from(update: &MappingUpdate) -> Self { Self::from(match update { @@ -143,123 +202,10 @@ impl From<&MappingUpdate> for MappingOperation { } } -impl ContractController for Vec> { +impl ContractController for Vec> { async fn current_values(_ctx: &TestContext, _contract: &Contract) -> Self { unimplemented!("Unimplemented for fetching the all mapping values") } - - async fn update_contract(&self, ctx: &TestContext, contract: &Contract) { - let provider = ProviderBuilder::new() - .with_recommended_fillers() - .wallet(ctx.wallet()) - .on_http(ctx.rpc_url.parse().unwrap()); - let contract = Simple::new(contract.address, &provider); - - let changes = self - .iter() - .map(|tuple| { - let operation: MappingOperation = tuple.into(); - let operation = operation.into(); - let (key, value) = match tuple { - MappingUpdate::Deletion(k, _) => (*k, *DEFAULT_ADDRESS), - MappingUpdate::Update(k, _, v) | MappingUpdate::Insertion(k, v) => (*k, *v), - }; - MappingChange { - operation, - key, - value, - } - }) - .collect_vec(); - - let call = contract.changeMapping(changes); - call.send().await.unwrap().watch().await.unwrap(); - // Sanity check - for update in self.iter() { - match update { - MappingUpdate::Deletion(k, _) => { - let res = contract.m1(*k).call().await.unwrap(); - let v: U256 = res._0.into_word().into(); - assert_eq!(v, U256::ZERO, "Key deletion is wrong on contract"); - } - MappingUpdate::Insertion(k, v) => { - let res = contract.m1(*k).call().await.unwrap(); - let new_value: U256 = res._0.into_word().into(); - let new_value = Address::from_u256_slice(&[new_value]); - assert_eq!(&new_value, v, "Key insertion is wrong on contract"); - } - MappingUpdate::Update(k, _, v) => { - let res = contract.m1(*k).call().await.unwrap(); - let new_value: U256 = res._0.into_word().into(); - let new_value = Address::from_u256_slice(&[new_value]); - assert_eq!(&new_value, v, "Key update is wrong on contract"); - } - } - } - log::info!("Updated simple contract single values"); - } -} - -impl ContractController for Vec> { - async fn current_values(_ctx: &TestContext, _contract: &Contract) -> Self { - unimplemented!("Unimplemented for fetching the all mapping values") - } - - async fn update_contract(&self, ctx: &TestContext, contract: &Contract) { - let provider = ProviderBuilder::new() - .with_recommended_fillers() - .wallet(ctx.wallet()) - .on_http(ctx.rpc_url.parse().unwrap()); - let contract = Simple::new(contract.address, &provider); - - let changes = self - .iter() - .map(|tuple| { - let operation: MappingOperation = tuple.into(); - let operation = operation.into(); - let (key, field1, field2, field3) = match tuple { - MappingUpdate::Insertion(k, v) - | MappingUpdate::Deletion(k, v) - | MappingUpdate::Update(k, _, v) => (*k, v.field1, v.field2, v.field3), - }; - MappingStructChange { - operation, - key, - field1, - field2, - field3, - } - }) - .collect_vec(); - - let call = contract.changeMappingStruct(changes); - call.send().await.unwrap().watch().await.unwrap(); - // Sanity check - for update in self.iter() { - match update { - MappingUpdate::Deletion(k, _) => { - let res = contract.structMapping(*k).call().await.unwrap(); - assert_eq!( - LargeStruct::from(res), - LargeStruct::new(U256::from(0), 0, 0) - ); - } - MappingUpdate::Insertion(k, v) | MappingUpdate::Update(k, _, v) => { - let res = contract.structMapping(*k).call().await.unwrap(); - debug!("Set mapping struct: key = {k}, value = {v:?}"); - assert_eq!(&LargeStruct::from(res), v); - } - } - } - log::info!("Updated simple contract for mapping values of LargeStruct"); - } -} - -impl ContractController for Vec> { - async fn current_values(_ctx: &TestContext, _contract: &Contract) -> Self { - unimplemented!("Unimplemented for fetching the all mapping of mappings") - } - async fn update_contract(&self, ctx: &TestContext, contract: &Contract) { let provider = ProviderBuilder::new() .with_recommended_fillers() @@ -267,129 +213,8 @@ impl ContractController for Vec> { .on_http(ctx.rpc_url.parse().unwrap()); let contract = Simple::new(contract.address, &provider); - let changes = self - .iter() - .map(|tuple| { - let operation: MappingOperation = tuple.into(); - let operation = operation.into(); - let (k, v) = match tuple { - MappingUpdate::Insertion(k, v) - | MappingUpdate::Deletion(k, v) - | MappingUpdate::Update(k, _, v) => (k, v), - }; + let changes = self.iter().map(T::to_call).collect_vec(); - MappingOfSingleValueMappingsChange { - operation, - outerKey: k.outer_key, - innerKey: k.inner_key, - value: *v, - } - }) - .collect_vec(); - - let call = contract.changeMappingOfSingleValueMappings(changes); - call.send().await.unwrap().watch().await.unwrap(); - // Sanity check - for update in self.iter() { - match update { - MappingUpdate::Insertion(k, v) => { - let res = contract - .mappingOfSingleValueMappings(k.outer_key, k.inner_key) - .call() - .await - .unwrap(); - assert_eq!(&res._0, v, "Insertion is wrong on contract"); - } - MappingUpdate::Deletion(k, _) => { - let res = contract - .mappingOfSingleValueMappings(k.outer_key, k.inner_key) - .call() - .await - .unwrap(); - assert_eq!(res._0, U256::ZERO, "Deletion is wrong on contract"); - } - MappingUpdate::Update(k, _, v) => { - let res = contract - .mappingOfSingleValueMappings(k.outer_key, k.inner_key) - .call() - .await - .unwrap(); - assert_eq!(&res._0, v, "Update is wrong on contract"); - } - } - } - log::info!("Updated simple contract for mapping of single value mappings"); - } -} - -impl ContractController for Vec> { - async fn current_values(_ctx: &TestContext, _contract: &Contract) -> Self { - unimplemented!("Unimplemented for fetching the all mapping of mappings") - } - - async fn update_contract(&self, ctx: &TestContext, contract: &Contract) { - let provider = ProviderBuilder::new() - .with_recommended_fillers() - .wallet(ctx.wallet()) - .on_http(ctx.rpc_url.parse().unwrap()); - let contract = Simple::new(contract.address, &provider); - - let changes = self - .iter() - .map(|tuple| { - let operation: MappingOperation = tuple.into(); - let operation = operation.into(); - let (k, v) = match tuple { - MappingUpdate::Insertion(k, v) - | MappingUpdate::Deletion(k, v) - | MappingUpdate::Update(k, _, v) => (k, v), - }; - - MappingOfStructMappingsChange { - operation, - outerKey: k.outer_key, - innerKey: k.inner_key, - field1: v.field1, - field2: v.field2, - field3: v.field3, - } - }) - .collect_vec(); - - let call = contract.changeMappingOfStructMappings(changes); - call.send().await.unwrap().watch().await.unwrap(); - // Sanity check - for update in self.iter() { - match update { - MappingUpdate::Insertion(k, v) => { - let res = contract - .mappingOfStructMappings(k.outer_key, k.inner_key) - .call() - .await - .unwrap(); - let res = LargeStruct::from(res); - assert_eq!(&res, v, "Insertion is wrong on contract"); - } - MappingUpdate::Deletion(k, _) => { - let res = contract - .mappingOfStructMappings(k.outer_key, k.inner_key) - .call() - .await - .unwrap(); - let res = LargeStruct::from(res); - assert_eq!(res, LargeStruct::default(), "Deletion is wrong on contract"); - } - MappingUpdate::Update(k, _, v) => { - let res = contract - .mappingOfStructMappings(k.outer_key, k.inner_key) - .call() - .await - .unwrap(); - let res = LargeStruct::from(res); - assert_eq!(&res, v, "Update is wrong on contract"); - } - } - } - log::info!("Updated simple contract for mapping of LargeStruct mappings"); + T::call_contract(&contract, changes).await } } diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index 568466b68..fa613712b 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -1,6 +1,8 @@ //! Test case for local Simple contract //! Reference `test-contracts/src/Simple.sol` for the details of Simple contract. +use std::future::Future; + use anyhow::Result; use itertools::Itertools; use log::{debug, info}; @@ -24,6 +26,9 @@ use rand::{thread_rng, Rng}; use ryhope::storage::RoEpochKvStorage; use crate::common::{ + bindings::simple::Simple::{ + m1Call, mappingOfSingleValueMappingsCall, mappingOfStructMappingsCall, structMappingCall, + }, cases::{ contract::Contract, identifier_for_mapping_key_column, @@ -42,7 +47,16 @@ use crate::common::{ TableInfo, TestContext, }; -use super::{ContractExtractionArgs, TableIndexing, TableSource}; +use super::{ + super::bindings::simple::Simple::SimpleInstance, + slot_info::{SimpleMapping, SimpleNestedMapping, StructMapping, StructNestedMapping}, + ContractExtractionArgs, TableIndexing, TableSource, +}; +use alloy::{ + contract::private::Transport, + network::Ethereum, + providers::{ProviderBuilder, RootProvider}, +}; use mp2_common::{eth::StorageSlot, proof::ProofWithVK, types::HashOutput}; /// Test slots for single values extraction @@ -96,10 +110,13 @@ fn single_value_slot_inputs() -> Vec { slot_inputs } -impl TableIndexing { +impl TableIndexing { pub(crate) async fn merge_table_test_case( ctx: &mut TestContext, - ) -> Result<(Self, Vec>)> { + ) -> Result<( + TableIndexing, + Vec>, + )> { // Deploy the simple contract. let contract = Contract::deploy_simple_contract(ctx).await; let contract_address = contract.address; @@ -150,6 +167,7 @@ impl TableIndexing { MAPPING_STRUCT_SLOT as u8, mapping_index.clone(), slot_inputs.clone(), + None, ); // Construct the table columns. let (secondary_column, rest_columns) = match mapping_index { @@ -218,7 +236,7 @@ impl TableIndexing { (secondary_column, rest_columns, row_unique_id, source) }; - let mut source = TableSource::Merge(MergeSource::new(single_source, mapping_source)); + let mut source = MergeSource::new(single_source, mapping_source); let genesis_change = source.init_contract_data(ctx, &contract).await; let value_column = mapping_rest_columns[0].name.clone(); let all_columns = [single_columns.as_slice(), &mapping_rest_columns].concat(); @@ -249,14 +267,14 @@ impl TableIndexing { ) .await; Ok(( - Self { + TableIndexing:: { value_column, source: source.clone(), table, contract, - contract_extraction: ContractExtractionArgs { + contract_extraction: Some(ContractExtractionArgs { slot: StorageSlot::Simple(CONTRACT_SLOT), - }, + }), }, genesis_change, )) @@ -265,7 +283,10 @@ impl TableIndexing { /// The single value test case includes the all single value slots and one single Struct slot. pub(crate) async fn single_value_test_case( ctx: &mut TestContext, - ) -> Result<(Self, Vec>)> { + ) -> Result<( + TableIndexing, + Vec>, + )> { let rng = &mut thread_rng(); // Deploy the simple contract. @@ -282,7 +303,6 @@ impl TableIndexing { let indexing_genesis_block = ctx.block_number().await; let secondary_index_slot_input = source.secondary_index_slot_input().unwrap(); let rest_column_slot_inputs = source.rest_column_slot_inputs(); - let source = TableSource::Single(source); // Defining the columns structure of the table from the source slots // This is depending on what is our data source, mappings and CSV both have their @@ -339,14 +359,14 @@ impl TableIndexing { ) .await; Ok(( - Self { + TableIndexing:: { value_column: "".to_string(), source, table, contract, - contract_extraction: ContractExtractionArgs { + contract_extraction: Some(ContractExtractionArgs { slot: StorageSlot::Simple(CONTRACT_SLOT), - }, + }), }, genesis_updates, )) @@ -355,7 +375,10 @@ impl TableIndexing { /// The test case for mapping of single values pub(crate) async fn mapping_value_test_case( ctx: &mut TestContext, - ) -> Result<(Self, Vec>)> { + ) -> Result<( + TableIndexing>, + Vec>, + )> { // Deploy the simple contract. let contract = Contract::deploy_simple_contract(ctx).await; let contract_address = contract.address; @@ -369,18 +392,21 @@ impl TableIndexing { // Switch the test index. // let mapping_index = MappingIndex::Value(value_id); let mapping_index = MappingIndex::OuterKey(key_id); - let args = MappingExtractionArgs::new( + let mut source = MappingExtractionArgs::::new( MAPPING_SLOT, mapping_index.clone(), vec![slot_input.clone()], - ); - let mut source = TableSource::MappingValues( - args, Some(LengthExtractionArgs { slot: LENGTH_SLOT, value: LENGTH_VALUE, }), ); + + let contract = Contract { + address: contract_address, + chain_id, + }; + let table_row_updates = source.init_contract_data(ctx, &contract).await; let table = build_mapping_table( @@ -394,11 +420,11 @@ impl TableIndexing { let value_column = table.columns.rest[0].name.clone(); Ok(( - Self { + TableIndexing::> { value_column, - contract_extraction: ContractExtractionArgs { + contract_extraction: Some(ContractExtractionArgs { slot: StorageSlot::Simple(CONTRACT_SLOT), - }, + }), contract, source, table, @@ -410,7 +436,10 @@ impl TableIndexing { /// The test case for mapping of Struct values pub(crate) async fn mapping_struct_test_case( ctx: &mut TestContext, - ) -> Result<(Self, Vec>)> { + ) -> Result<( + TableIndexing>, + Vec>, + )> { // Deploy the simple contract. let contract = Contract::deploy_simple_contract(ctx).await; let contract_address = contract.address; @@ -432,23 +461,24 @@ impl TableIndexing { // Switch the test index. // let mapping_index = MappingIndex::OuterKey(key_id); let mapping_index = MappingIndex::Value(value_ids[1]); - let args = MappingExtractionArgs::new( + let mut source = MappingExtractionArgs::::new( MAPPING_STRUCT_SLOT as u8, mapping_index.clone(), slot_inputs.clone(), + None, ); - let mut source = TableSource::MappingStruct(args, None); + let table_row_updates = source.init_contract_data(ctx, &contract).await; let table = build_mapping_table(ctx, &mapping_index, key_id, value_ids, slot_inputs).await; let value_column = table.columns.rest[0].name.clone(); Ok(( - Self { + TableIndexing::> { value_column, - contract_extraction: ContractExtractionArgs { + contract_extraction: Some(ContractExtractionArgs { slot: StorageSlot::Simple(CONTRACT_SLOT), - }, + }), contract, source, table, @@ -459,7 +489,10 @@ impl TableIndexing { pub(crate) async fn mapping_of_single_value_mappings_test_case( ctx: &mut TestContext, - ) -> Result<(Self, Vec>)> { + ) -> Result<( + TableIndexing>, + Vec>, + )> { // Deploy the simple contract. let contract = Contract::deploy_simple_contract(ctx).await; let contract_address = contract.address; @@ -484,12 +517,13 @@ impl TableIndexing { // let index = MappingIndex::Value(value_id); // let index = MappingIndex::OuterKey(outer_key_id); let index = MappingIndex::InnerKey(inner_key_id); - let args = MappingExtractionArgs::new( + let mut source = MappingExtractionArgs::::new( MAPPING_OF_SINGLE_VALUE_MAPPINGS_SLOT, index.clone(), vec![slot_input.clone()], + None, ); - let mut source = TableSource::MappingOfSingleValueMappings(args); + let table_row_updates = source.init_contract_data(ctx, &contract).await; let table = build_mapping_of_mappings_table( @@ -504,11 +538,11 @@ impl TableIndexing { let value_column = table.columns.rest[0].name.clone(); Ok(( - Self { + TableIndexing::> { value_column, - contract_extraction: ContractExtractionArgs { + contract_extraction: Some(ContractExtractionArgs { slot: StorageSlot::Simple(CONTRACT_SLOT), - }, + }), contract, source, table, @@ -519,7 +553,10 @@ impl TableIndexing { pub(crate) async fn mapping_of_struct_mappings_test_case( ctx: &mut TestContext, - ) -> Result<(Self, Vec>)> { + ) -> Result<( + TableIndexing>, + Vec>, + )> { // Deploy the simple contract. let contract = Contract::deploy_simple_contract(ctx).await; let contract_address = contract.address; @@ -548,12 +585,13 @@ impl TableIndexing { // let index = MappingIndex::OuterKey(outer_key_id); // let index = MappingIndex::InnerKey(inner_key_id); let index = MappingIndex::Value(value_ids[1]); - let args = MappingExtractionArgs::new( + let mut source = MappingExtractionArgs::::new( MAPPING_OF_STRUCT_MAPPINGS_SLOT, index.clone(), slot_inputs.clone(), + None, ); - let mut source = TableSource::MappingOfStructMappings(args); + let table_row_updates = source.init_contract_data(ctx, &contract).await; let table = build_mapping_of_mappings_table( @@ -568,11 +606,11 @@ impl TableIndexing { let value_column = table.columns.rest[0].name.clone(); Ok(( - Self { + TableIndexing::> { value_column, - contract_extraction: ContractExtractionArgs { + contract_extraction: Some(ContractExtractionArgs { slot: StorageSlot::Simple(CONTRACT_SLOT), - }, + }), contract, source, table, @@ -781,34 +819,39 @@ impl TableIndexing { proof } Err(_) => { - let contract_proof = ctx - .prove_contract_extraction( - &self.contract.address, - self.contract_extraction.slot.clone(), - bn, - ) - .await; - ctx.storage - .store_proof(contract_proof_key, contract_proof.clone())?; - info!( - "Generated Contract Extraction (C.3) proof for block number {}", - bn - ); - { - let pvk = ProofWithVK::deserialize(&contract_proof)?; - let pis = - contract_extraction::PublicInputs::from_slice(&pvk.proof().public_inputs); - debug!( - " CONTRACT storage root pis.storage_root() {:?}", - hex::encode( - pis.root_hash_field() - .into_iter() - .flat_map(|u| u.to_be_bytes()) - .collect::>() + if let Some(contract_extraction) = &self.contract_extraction { + let contract_proof = ctx + .prove_contract_extraction( + &self.contract.address, + contract_extraction.slot.clone(), + bn, ) + .await; + ctx.storage + .store_proof(contract_proof_key, contract_proof.clone())?; + info!( + "Generated Contract Extraction (C.3) proof for block number {}", + bn ); + { + let pvk = ProofWithVK::deserialize(&contract_proof)?; + let pis = contract_extraction::PublicInputs::from_slice( + &pvk.proof().public_inputs, + ); + debug!( + " CONTRACT storage root pis.storage_root() {:?}", + hex::encode( + pis.root_hash_field() + .into_iter() + .flat_map(|u| u.to_be_bytes()) + .collect::>() + ) + ); + } + contract_proof + } else { + vec![] } - contract_proof } }; @@ -1073,6 +1116,15 @@ async fn build_mapping_of_mappings_table( .await } +pub trait ContractUpdate: std::fmt::Debug +where + T: Transport + Clone, +{ + type Contract; + + fn apply_to(&self, ctx: &TestContext, contract: &Self::Contract) -> impl Future; +} + #[derive(Clone, Debug)] pub enum ChangeType { Deletion, @@ -1241,12 +1293,12 @@ where } } -impl TableIndexing { - pub fn table_info(&self) -> TableInfo { +impl TableIndexing { + pub fn table_info(&self) -> TableInfo { TableInfo { public_name: self.table.public_name.clone(), value_column: self.value_column.clone(), - chain_id: self.contract.chain_id, + chain_id: self.contract.chain_id(), columns: self.table.columns.clone(), contract_address: self.contract.address, source: self.source.clone(), diff --git a/mp2-v1/tests/common/cases/mod.rs b/mp2-v1/tests/common/cases/mod.rs index eb94d6563..05dff5a8e 100644 --- a/mp2-v1/tests/common/cases/mod.rs +++ b/mp2-v1/tests/common/cases/mod.rs @@ -13,11 +13,11 @@ pub mod slot_info; pub mod table_source; /// Test case definition -pub(crate) struct TableIndexing { +pub(crate) struct TableIndexing { pub(crate) table: Table, pub(crate) contract: Contract, - pub(crate) contract_extraction: ContractExtractionArgs, - pub(crate) source: TableSource, + pub(crate) contract_extraction: Option, + pub(crate) source: T, // the column over which we can do queries like ` y > 64`. It is not the address column that we // assume it the secondary index always. pub(crate) value_column: String, diff --git a/mp2-v1/tests/common/cases/query/aggregated_queries.rs b/mp2-v1/tests/common/cases/query/aggregated_queries.rs index 0d4194f84..e0cf75396 100644 --- a/mp2-v1/tests/common/cases/query/aggregated_queries.rs +++ b/mp2-v1/tests/common/cases/query/aggregated_queries.rs @@ -3,16 +3,19 @@ use plonky2::{ }; use std::collections::HashMap; -use crate::common::{ - cases::{ - indexing::BLOCK_COLUMN_NAME, - query::{QueryCooking, SqlReturn, SqlType, NUM_CHUNKS, NUM_ROWS}, - table_source::BASE_VALUE, +use crate::{ + common::{ + cases::{ + indexing::BLOCK_COLUMN_NAME, + query::{QueryCooking, SqlReturn, SqlType, NUM_CHUNKS, NUM_ROWS}, + table_source::BASE_VALUE, + }, + proof_storage::{ProofKey, ProofStorage}, + rowtree::MerkleRowTree, + table::Table, + TableInfo, }, - proof_storage::{ProofKey, ProofStorage}, - rowtree::MerkleRowTree, - table::Table, - TableInfo, + TableSource, }; use crate::context::TestContext; @@ -424,9 +427,9 @@ pub(crate) fn check_final_outputs( type BlockRange = (BlockPrimaryIndex, BlockPrimaryIndex); -pub(crate) async fn cook_query_between_blocks( +pub(crate) async fn cook_query_between_blocks( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let max = table.row.current_epoch(); let min = max - 1; @@ -451,9 +454,9 @@ pub(crate) async fn cook_query_between_blocks( }) } -pub(crate) async fn cook_query_secondary_index_nonexisting_placeholder( +pub(crate) async fn cook_query_secondary_index_nonexisting_placeholder( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -499,9 +502,9 @@ pub(crate) async fn cook_query_secondary_index_nonexisting_placeholder( // cook up a SQL query on the secondary index and with a predicate on the non-indexed column. // we just iterate on mapping keys and take the one that exist for most blocks. We also choose // a value to filter over the non-indexed column -pub(crate) async fn cook_query_secondary_index_placeholder( +pub(crate) async fn cook_query_secondary_index_placeholder( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -544,9 +547,9 @@ pub(crate) async fn cook_query_secondary_index_placeholder( // cook up a SQL query on the secondary index. For that we just iterate on mapping keys and // take the one that exist for most blocks -pub(crate) async fn cook_query_unique_secondary_index( +pub(crate) async fn cook_query_unique_secondary_index( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -620,9 +623,9 @@ pub(crate) async fn cook_query_unique_secondary_index( }) } -pub(crate) async fn cook_query_partial_block_range( +pub(crate) async fn cook_query_partial_block_range( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -656,9 +659,9 @@ pub(crate) async fn cook_query_partial_block_range( }) } -pub(crate) async fn cook_query_no_matching_entries( +pub(crate) async fn cook_query_no_matching_entries( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let initial_epoch = table.row.initial_epoch(); // choose query bounds outside of the range [initial_epoch, last_epoch] @@ -688,9 +691,9 @@ pub(crate) async fn cook_query_no_matching_entries( /// Cook a query where there are no entries satisying the secondary query bounds only for some /// blocks of the primary index bounds (not for all the blocks) -pub(crate) async fn cook_query_non_matching_entries_some_blocks( +pub(crate) async fn cook_query_non_matching_entries_some_blocks( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, true).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); diff --git a/mp2-v1/tests/common/cases/query/mod.rs b/mp2-v1/tests/common/cases/query/mod.rs index 95243baf0..03c6fddf1 100644 --- a/mp2-v1/tests/common/cases/query/mod.rs +++ b/mp2-v1/tests/common/cases/query/mod.rs @@ -5,7 +5,7 @@ use aggregated_queries::{ cook_query_unique_secondary_index, prove_query as prove_aggregation_query, }; use alloy::primitives::U256; -use anyhow::{Context, Result}; +use anyhow::{anyhow, Context, Result}; use itertools::Itertools; use log::info; use mp2_v1::{ @@ -31,7 +31,7 @@ use crate::common::{ TableInfo, TestContext, }; -use super::table_source::TableSource; +use super::TableSource; pub mod aggregated_queries; pub mod simple_select_queries; @@ -103,19 +103,25 @@ pub(crate) struct QueryPlanner<'a> { pub(crate) columns: TableColumns, } -pub async fn test_query(ctx: &mut TestContext, table: Table, t: TableInfo) -> Result<()> { - match &t.source { - TableSource::MappingValues(_, _) - | TableSource::Merge(_) - | TableSource::MappingStruct(_, _) - | TableSource::MappingOfSingleValueMappings(_) - | TableSource::MappingOfStructMappings(_) => query_mapping(ctx, &table, &t).await?, - _ => unimplemented!("yet"), +pub async fn test_query( + ctx: &mut TestContext, + table: Table, + t: TableInfo, +) -> Result<()> { + if t.source.can_query() { + query_mapping(ctx, &table, &t).await?; + } else { + return Err(anyhow!("Can't query this type of table source yet")); } + Ok(()) } -async fn query_mapping(ctx: &mut TestContext, table: &Table, info: &TableInfo) -> Result<()> { +async fn query_mapping( + ctx: &mut TestContext, + table: &Table, + info: &TableInfo, +) -> Result<()> { let table_hash = info.metadata_hash(); let query_info = cook_query_between_blocks(table, info).await?; test_query_mapping(ctx, table, query_info, &table_hash).await?; diff --git a/mp2-v1/tests/common/cases/query/simple_select_queries.rs b/mp2-v1/tests/common/cases/query/simple_select_queries.rs index 18a4d9804..ad8f28b51 100644 --- a/mp2-v1/tests/common/cases/query/simple_select_queries.rs +++ b/mp2-v1/tests/common/cases/query/simple_select_queries.rs @@ -34,17 +34,20 @@ use verifiable_db::{ test_utils::MAX_NUM_OUTPUTS, }; -use crate::common::{ - cases::{ - indexing::BLOCK_COLUMN_NAME, - query::{ - aggregated_queries::{check_final_outputs, find_longest_lived_key}, - GlobalCircuitInput, QueryPlanner, RevelationCircuitInput, SqlReturn, SqlType, +use crate::{ + common::{ + cases::{ + indexing::BLOCK_COLUMN_NAME, + query::{ + aggregated_queries::{check_final_outputs, find_longest_lived_key}, + GlobalCircuitInput, QueryPlanner, RevelationCircuitInput, SqlReturn, SqlType, + }, }, + proof_storage::{ProofKey, ProofStorage}, + table::{Table, TableColumns}, + TableInfo, }, - proof_storage::{ProofKey, ProofStorage}, - table::{Table, TableColumns}, - TableInfo, + TableSource, }; use super::{QueryCircuitInput, QueryCooking, TestContext}; @@ -308,9 +311,9 @@ pub(crate) async fn prove_single_row( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -351,9 +354,9 @@ pub(crate) async fn cook_query_with_max_num_matching_rows( }) } -pub(crate) async fn cook_query_with_matching_rows( +pub(crate) async fn cook_query_with_matching_rows( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -397,9 +400,9 @@ pub(crate) async fn cook_query_with_matching_rows( } /// Cook a query where the offset is big enough to have no matching rows -pub(crate) async fn cook_query_too_big_offset( +pub(crate) async fn cook_query_too_big_offset( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -440,9 +443,9 @@ pub(crate) async fn cook_query_too_big_offset( }) } -pub(crate) async fn cook_query_no_matching_rows( +pub(crate) async fn cook_query_no_matching_rows( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let initial_epoch = table.index.initial_epoch(); let current_epoch = table.index.current_epoch(); @@ -486,9 +489,9 @@ pub(crate) async fn cook_query_no_matching_rows( }) } -pub(crate) async fn cook_query_with_distinct( +pub(crate) async fn cook_query_with_distinct( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -529,10 +532,10 @@ pub(crate) async fn cook_query_with_distinct( }) } -pub(crate) async fn cook_query_with_wildcard( +pub(crate) async fn cook_query_with_wildcard( table: &Table, distinct: bool, - info: &TableInfo, + info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); @@ -583,16 +586,16 @@ pub(crate) async fn cook_query_with_wildcard( }) } -pub(crate) async fn cook_query_with_wildcard_no_distinct( +pub(crate) async fn cook_query_with_wildcard_no_distinct( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { cook_query_with_wildcard(table, false, info).await } -pub(crate) async fn cook_query_with_wildcard_and_distinct( +pub(crate) async fn cook_query_with_wildcard_and_distinct( table: &Table, - info: &TableInfo, + info: &TableInfo, ) -> Result { cook_query_with_wildcard(table, true, info).await } diff --git a/mp2-v1/tests/common/cases/slot_info.rs b/mp2-v1/tests/common/cases/slot_info.rs index 3928373d9..0e1465999 100644 --- a/mp2-v1/tests/common/cases/slot_info.rs +++ b/mp2-v1/tests/common/cases/slot_info.rs @@ -1,9 +1,22 @@ //! Mapping key, storage value types and related functions for the storage slot -use crate::common::bindings::simple::Simple::{ - mappingOfStructMappingsReturn, simpleStructReturn, structMappingReturn, +use crate::common::{ + bindings::simple::{ + self, + Simple::{ + mappingOfStructMappingsReturn, simpleStructReturn, structMappingReturn, MappingChange, + MappingOfSingleValueMappingsChange, MappingOfStructMappingsChange, MappingOperation, + MappingStructChange, SimpleInstance, + }, + }, + Deserialize, Serialize, +}; +use alloy::{ + network::Network, + primitives::{Address, U256}, + providers::Provider, + transports::Transport, }; -use alloy::primitives::{Address, U256}; use derive_more::Constructor; use itertools::Itertools; use log::warn; @@ -13,18 +26,317 @@ use mp2_common::{ }; use mp2_v1::api::{SlotInput, SlotInputs}; use rand::{thread_rng, Rng}; -use serde::{Deserialize, Serialize}; -use std::{array, fmt::Debug}; + +use std::{array, fmt::Debug, future::Future, hash::Hash}; + +use super::contract::MappingUpdate; + +pub(crate) trait MappingInfo: StorageSlotMappingKey { + type Value: StorageSlotValue; + type Call; + fn to_call(update: &MappingUpdate) -> Self::Call; + + fn call_contract, N: Network>( + contract: &SimpleInstance, + changes: Vec, + ) -> impl Future + Send; +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +pub struct SimpleMapping { + inner: U256, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +pub struct StructMapping { + inner: U256, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +pub struct SimpleNestedMapping { + outer: U256, + inner: U256, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +pub struct StructNestedMapping { + outer: U256, + inner: U256, +} + +impl StorageSlotMappingKey for StructNestedMapping { + type Key = U256; + + const NO_KEYS: usize = 2; + + fn sample_key() -> Self { + let rng = &mut thread_rng(); + StructNestedMapping { + outer: U256::from_limbs(rng.gen()), + inner: U256::from_limbs(rng.gen()), + } + } + + fn slot_inputs(slot_inputs: Vec, length: Option) -> SlotInputs { + if let Some(length_slot) = length { + SlotInputs::MappingWithLength(slot_inputs, length_slot) + } else { + SlotInputs::MappingOfMappings(slot_inputs) + } + } + fn to_u256_vec(&self) -> Vec { + vec![self.outer, self.inner] + } + fn storage_slot(&self, slot: u8, evm_word: u32) -> StorageSlot { + let storage_slot = { + let parent_slot = StorageSlot::Mapping(self.outer.to_be_bytes_vec(), slot as usize); + StorageSlot::Node( + StorageSlotNode::new_mapping(parent_slot, self.inner.to_be_bytes_vec()).unwrap(), + ) + }; + if evm_word == 0 { + // We could construct the mapping slot for the EVM word of 0 directly even if the + // mapping value is a Struct, since the returned storage slot is only used to compute + // the slot location, and it's same with the Struct mapping and the EVM word of 0. + return storage_slot; + } + + // It's definitely a Struct if the EVM word is non zero. + StorageSlot::Node(StorageSlotNode::new_struct(storage_slot, evm_word)) + } +} + +impl MappingInfo for StructNestedMapping { + type Value = LargeStruct; + type Call = MappingOfStructMappingsChange; + fn to_call(update: &MappingUpdate) -> MappingOfStructMappingsChange { + let op: MappingOperation = update.into(); + + let (key, value) = update.to_tuple(); + + MappingOfStructMappingsChange { + outerKey: key.outer, + innerKey: key.inner, + field1: value.field1, + field2: value.field2, + field3: value.field3, + operation: op.into(), + } + } + + async fn call_contract, N: Network>( + contract: &SimpleInstance, + changes: Vec, + ) { + let call = contract.changeMapping_0(changes); + call.send().await.unwrap().watch().await.unwrap(); + } +} + +impl StorageSlotMappingKey for SimpleNestedMapping { + type Key = U256; + + const NO_KEYS: usize = 2; + + fn sample_key() -> Self { + let rng = &mut thread_rng(); + SimpleNestedMapping { + outer: U256::from_limbs(rng.gen()), + inner: U256::from_limbs(rng.gen()), + } + } + + fn slot_inputs(slot_inputs: Vec, length: Option) -> SlotInputs { + if let Some(length_slot) = length { + SlotInputs::MappingWithLength(slot_inputs, length_slot) + } else { + SlotInputs::MappingOfMappings(slot_inputs) + } + } + fn to_u256_vec(&self) -> Vec { + vec![self.outer, self.inner] + } + fn storage_slot(&self, slot: u8, evm_word: u32) -> StorageSlot { + let storage_slot = { + let parent_slot = StorageSlot::Mapping(self.outer.to_be_bytes_vec(), slot as usize); + StorageSlot::Node( + StorageSlotNode::new_mapping(parent_slot, self.inner.to_be_bytes_vec()).unwrap(), + ) + }; + if evm_word == 0 { + // We could construct the mapping slot for the EVM word of 0 directly even if the + // mapping value is a Struct, since the returned storage slot is only used to compute + // the slot location, and it's same with the Struct mapping and the EVM word of 0. + return storage_slot; + } + + // It's definitely a Struct if the EVM word is non zero. + StorageSlot::Node(StorageSlotNode::new_struct(storage_slot, evm_word)) + } +} + +impl MappingInfo for SimpleNestedMapping { + type Value = U256; + type Call = MappingOfSingleValueMappingsChange; + fn to_call(update: &MappingUpdate) -> MappingOfSingleValueMappingsChange { + let op: MappingOperation = update.into(); + + let (key, value) = update.to_tuple(); + + MappingOfSingleValueMappingsChange { + outerKey: key.outer, + innerKey: key.inner, + value, + operation: op.into(), + } + } + + async fn call_contract, N: Network>( + contract: &SimpleInstance, + changes: Vec, + ) { + let call = contract.changeMapping_2(changes); + call.send().await.unwrap().watch().await.unwrap(); + } +} + +impl StorageSlotMappingKey for SimpleMapping { + type Key = U256; + + const NO_KEYS: usize = 1; + + fn sample_key() -> Self { + SimpleMapping { + inner: sample_u256(), + } + } + fn slot_inputs(slot_inputs: Vec, length: Option) -> SlotInputs { + if let Some(length_slot) = length { + SlotInputs::MappingWithLength(slot_inputs, length_slot) + } else { + SlotInputs::Mapping(slot_inputs) + } + } + fn to_u256_vec(&self) -> Vec { + vec![self.inner] + } + fn storage_slot(&self, slot: u8, evm_word: u32) -> StorageSlot { + let storage_slot = StorageSlot::Mapping(self.inner.to_be_bytes_vec(), slot as usize); + if evm_word == 0 { + // We could construct the mapping slot for the EVM word of 0 directly even if the + // mapping value is a Struct, since the returned storage slot is only used to compute + // the slot location, and it's same with the Struct mapping and the EVM word of 0. + return storage_slot; + } + + // It's definitely a Struct if the EVM word is non zero. + StorageSlot::Node(StorageSlotNode::new_struct(storage_slot, evm_word)) + } +} + +impl MappingInfo for SimpleMapping { + type Value = Address; + type Call = MappingChange; + + fn to_call(update: &MappingUpdate) -> Self::Call { + let op: MappingOperation = update.into(); + + let (key, value) = update.to_tuple(); + + MappingChange { + key: key.inner, + value, + operation: op.into(), + } + } + + async fn call_contract, N: Network>( + contract: &SimpleInstance, + changes: Vec, + ) { + let call = contract.changeMapping_1(changes); + call.send().await.unwrap().watch().await.unwrap(); + } +} + +impl StorageSlotMappingKey for StructMapping { + type Key = U256; + + const NO_KEYS: usize = 1; + + fn sample_key() -> Self { + StructMapping { + inner: sample_u256(), + } + } + fn slot_inputs(slot_inputs: Vec, length: Option) -> SlotInputs { + if let Some(length_slot) = length { + SlotInputs::MappingWithLength(slot_inputs, length_slot) + } else { + SlotInputs::Mapping(slot_inputs) + } + } + fn to_u256_vec(&self) -> Vec { + vec![self.inner] + } + fn storage_slot(&self, slot: u8, evm_word: u32) -> StorageSlot { + let storage_slot = StorageSlot::Mapping(self.inner.to_be_bytes_vec(), slot as usize); + if evm_word == 0 { + // We could construct the mapping slot for the EVM word of 0 directly even if the + // mapping value is a Struct, since the returned storage slot is only used to compute + // the slot location, and it's same with the Struct mapping and the EVM word of 0. + return storage_slot; + } + + // It's definitely a Struct if the EVM word is non zero. + StorageSlot::Node(StorageSlotNode::new_struct(storage_slot, evm_word)) + } +} + +impl MappingInfo for StructMapping { + type Value = LargeStruct; + type Call = MappingStructChange; + + fn to_call(update: &MappingUpdate) -> MappingStructChange { + let op: MappingOperation = update.into(); + + let (key, value) = update.to_tuple(); + + MappingStructChange { + key: key.inner, + field1: value.field1, + field2: value.field2, + field3: value.field3, + operation: op.into(), + } + } + + async fn call_contract, N: Network>( + contract: &SimpleInstance, + changes: Vec, + ) { + let call = contract.changeMapping_3(changes); + call.send().await.unwrap().watch().await.unwrap(); + } +} /// Abstract for the mapping key of the storage slot. /// It could be a normal mapping key, or a pair of keys which identifies the /// mapping of mapppings key. -pub(crate) trait StorageSlotMappingKey: Clone + Debug + PartialOrd + Ord { +pub(crate) trait StorageSlotMappingKey: + Clone + Debug + PartialOrd + Ord + Send + Sync +{ + /// This is what the keys actually look like. + type Key; + + /// How many keys there are + const NO_KEYS: usize; + /// Generate a random key for testing. fn sample_key() -> Self; /// Construct an SlotInputs enum. - fn slot_inputs(slot_inputs: Vec) -> SlotInputs; + fn slot_inputs(slot_inputs: Vec, length: Option) -> SlotInputs; /// Convert into an Uint256 vector. fn to_u256_vec(&self) -> Vec; @@ -36,11 +348,19 @@ pub(crate) trait StorageSlotMappingKey: Clone + Debug + PartialOrd + Ord { pub(crate) type MappingKey = U256; impl StorageSlotMappingKey for MappingKey { + type Key = U256; + + const NO_KEYS: usize = 1; + fn sample_key() -> Self { sample_u256() } - fn slot_inputs(slot_inputs: Vec) -> SlotInputs { - SlotInputs::Mapping(slot_inputs) + fn slot_inputs(slot_inputs: Vec, length: Option) -> SlotInputs { + if let Some(length_slot) = length { + SlotInputs::MappingWithLength(slot_inputs, length_slot) + } else { + SlotInputs::Mapping(slot_inputs) + } } fn to_u256_vec(&self) -> Vec { vec![*self] @@ -68,12 +388,20 @@ pub(crate) struct MappingOfMappingsKey { } impl StorageSlotMappingKey for MappingOfMappingsKey { + type Key = U256; + + const NO_KEYS: usize = 2; + fn sample_key() -> Self { let [outer_key, inner_key] = array::from_fn(|_| MappingKey::sample_key()); Self::new(outer_key, inner_key) } - fn slot_inputs(slot_inputs: Vec) -> SlotInputs { - SlotInputs::MappingOfMappings(slot_inputs) + fn slot_inputs(slot_inputs: Vec, length: Option) -> SlotInputs { + if let Some(length_slot) = length { + SlotInputs::MappingWithLength(slot_inputs, length_slot) + } else { + SlotInputs::MappingOfMappings(slot_inputs) + } } fn to_u256_vec(&self) -> Vec { vec![self.outer_key, self.inner_key] @@ -100,7 +428,10 @@ impl StorageSlotMappingKey for MappingOfMappingsKey { /// Abstract for the value saved in the storage slot. /// It could be a single value as Uint256 or a Struct. -pub trait StorageSlotValue: Clone { +pub trait StorageSlotValue: Clone + Send + Sync { + /// The number of fields this value has. + const NUM_FIELDS: usize; + /// Generate a random value for testing. fn sample_value() -> Self; @@ -115,6 +446,8 @@ pub trait StorageSlotValue: Clone { } impl StorageSlotValue for Address { + const NUM_FIELDS: usize = 1; + fn sample_value() -> Self { Address::random() } @@ -139,6 +472,8 @@ impl StorageSlotValue for Address { } impl StorageSlotValue for U256 { + const NUM_FIELDS: usize = 1; + fn sample_value() -> Self { U256::from(sample_u128()) // sample as u128 to be safe for overflow in queries } @@ -172,7 +507,7 @@ fn sample_u128() -> u128 { rng.gen() } -#[derive(Clone, Debug, Default, Eq, PartialEq, Hash)] +#[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize, Copy)] pub struct LargeStruct { pub(crate) field1: U256, pub(crate) field2: u128, @@ -180,6 +515,8 @@ pub struct LargeStruct { } impl StorageSlotValue for LargeStruct { + const NUM_FIELDS: usize = 3; + fn sample_value() -> Self { let field1 = U256::from(sample_u128()); // sample as u128 to be safe for overflow in queries let [field2, field3] = array::from_fn(|_| sample_u128()); @@ -227,8 +564,6 @@ impl StorageSlotValue for LargeStruct { } impl LargeStruct { - pub const FIELD_NUM: usize = 3; - pub fn new(field1: U256, field2: u128, field3: u128) -> Self { Self { field1, @@ -279,7 +614,7 @@ impl From for LargeStruct { impl From<&[[u8; MAPPING_LEAF_VALUE_LEN]]> for LargeStruct { fn from(fields: &[[u8; MAPPING_LEAF_VALUE_LEN]]) -> Self { - assert_eq!(fields.len(), Self::FIELD_NUM); + assert_eq!(fields.len(), Self::NUM_FIELDS); let fields = fields .iter() @@ -297,3 +632,13 @@ impl From<&[[u8; MAPPING_LEAF_VALUE_LEN]]> for LargeStruct { } } } + +impl From for simple::Simple::LargeStruct { + fn from(value: LargeStruct) -> Self { + Self { + field1: value.field1, + field2: value.field2, + field3: value.field3, + } + } +} diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 25e88464d..3e1767bce 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -2,7 +2,7 @@ use std::{ array, assert_matches::assert_matches, collections::{BTreeSet, HashMap}, - marker::PhantomData, + future::Future, str::FromStr, sync::atomic::{AtomicU64, AtomicUsize}, }; @@ -16,12 +16,15 @@ use futures::{future::BoxFuture, FutureExt}; use itertools::Itertools; use log::{debug, info}; use mp2_common::{ - eth::{ProofQuery, StorageSlot, StorageSlotNode}, + eth::{EventLogInfo, ProofQuery, StorageSlot, StorageSlotNode}, proof::ProofWithVK, types::HashOutput, }; use mp2_v1::{ - api::{compute_table_info, merge_metadata_hash, metadata_hash, SlotInput, SlotInputs}, + api::{ + compute_table_info, merge_metadata_hash, metadata_hash as metadata_hash_function, + SlotInput, SlotInputs, + }, indexing::{ block::BlockPrimaryIndex, cell::Cell, @@ -39,26 +42,37 @@ use rand::{ rngs::StdRng, Rng, SeedableRng, }; -use serde::{Deserialize, Serialize}; use crate::common::{ final_extraction::{ExtractionProofInput, ExtractionTableProof, MergeExtractionProof}, proof_storage::{ProofKey, ProofStorage}, rowtree::SecondaryIndexCell, table::CellsUpdate, - TestContext, TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM, + Deserialize, MetadataHash, Serialize, TestContext, TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM, }; use super::{ - contract::{Contract, ContractController, MappingUpdate, SimpleSingleValues}, + contract::{Contract, ContractController, MappingUpdate, SimpleSingleValues, TestContract}, indexing::{ ChangeType, TableRowUpdate, TableRowValues, UpdateType, SINGLE_SLOTS, SINGLE_STRUCT_SLOT, }, - slot_info::{ - LargeStruct, MappingKey, MappingOfMappingsKey, StorageSlotMappingKey, StorageSlotValue, - }, + slot_info::{LargeStruct, MappingInfo, StorageSlotMappingKey, StorageSlotValue, StructMapping}, }; +fn metadata_hash( + slot_input: SlotInputs, + contract_address: &Address, + chain_id: u64, + extra: Vec, +) -> MetadataHash { + metadata_hash_function::( + slot_input, + contract_address, + chain_id, + extra, + ) +} + /// Save the columns information of same slot and EVM word. #[derive(Debug)] struct SlotEvmWordColumns(Vec); @@ -308,126 +322,126 @@ impl UniqueMappingEntry { } } -#[derive(Serialize, Deserialize, Debug, Hash, Clone, PartialEq, Eq)] -pub(crate) enum TableSource { - /// Test arguments for simple slots which stores both single values and Struct values - Single(SingleExtractionArgs), - /// Test arguments for mapping slots which stores single values - MappingValues( - MappingExtractionArgs, - Option, - ), - /// Test arguments for mapping slots which stores the Struct values - MappingStruct( - MappingExtractionArgs, - Option, - ), - /// Test arguments for mapping of mappings slot which stores single values - MappingOfSingleValueMappings(MappingExtractionArgs), - /// Test arguments for mapping of mappings slot which stores the Struct values - MappingOfStructMappings(MappingExtractionArgs), - /// Test arguments for the merge source of both simple and mapping values - Merge(MergeSource), +pub(crate) trait TableSource { + type Metadata; + + fn get_data(&self) -> Self::Metadata; + + fn init_contract_data<'a>( + &'a mut self, + ctx: &'a mut TestContext, + contract: &'a Contract, + ) -> BoxFuture<'a, Vec>>; + + fn generate_extraction_proof_inputs( + &self, + ctx: &mut TestContext, + contract: &Contract, + value_key: ProofKey, + ) -> impl Future>; + + fn random_contract_update<'a>( + &'a mut self, + ctx: &'a mut TestContext, + contract: &'a Contract, + c: ChangeType, + ) -> BoxFuture<'a, Vec>>; + + fn metadata_hash(&self, contract_address: Address, chain_id: u64) -> MetadataHash; + + fn can_query(&self) -> bool; } -impl TableSource { - pub async fn generate_extraction_proof_inputs( +impl TableSource for SingleExtractionArgs { + type Metadata = SlotInputs; + + fn get_data(&self) -> SlotInputs { + SlotInputs::Simple(self.slot_inputs.clone()) + } + + fn init_contract_data<'a>( + &'a mut self, + ctx: &'a mut TestContext, + contract: &'a Contract, + ) -> BoxFuture<'a, Vec>> { + async move { SingleExtractionArgs::init_contract_data(self, ctx, contract).await }.boxed() + } + + async fn generate_extraction_proof_inputs( &self, ctx: &mut TestContext, contract: &Contract, value_key: ProofKey, ) -> Result<(ExtractionProofInput, HashOutput)> { - match self { - TableSource::Single(ref args) => { - args.generate_extraction_proof_inputs(ctx, contract, value_key) - .await - } - TableSource::MappingValues(ref args, _) => { - args.generate_extraction_proof_inputs(ctx, contract, value_key) - .await - } - TableSource::MappingStruct(ref args, _) => { - args.generate_extraction_proof_inputs(ctx, contract, value_key) - .await - } - TableSource::MappingOfSingleValueMappings(ref args) => { - args.generate_extraction_proof_inputs(ctx, contract, value_key) - .await - } - TableSource::MappingOfStructMappings(ref args) => { - args.generate_extraction_proof_inputs(ctx, contract, value_key) - .await - } - TableSource::Merge(ref args) => { - args.generate_extraction_proof_inputs(ctx, contract, value_key) - .await - } - } + SingleExtractionArgs::generate_extraction_proof_inputs(self, ctx, contract, value_key).await } - #[allow(elided_named_lifetimes)] - pub fn init_contract_data<'a>( + fn random_contract_update<'a>( &'a mut self, ctx: &'a mut TestContext, contract: &'a Contract, - ) -> BoxFuture>> { - async move { - match self { - TableSource::Single(ref mut args) => args.init_contract_data(ctx, contract).await, - TableSource::MappingValues(ref mut args, _) => { - args.init_contract_data(ctx, contract).await - } - TableSource::MappingStruct(ref mut args, _) => { - args.init_contract_data(ctx, contract).await - } - TableSource::MappingOfSingleValueMappings(ref mut args) => { - args.init_contract_data(ctx, contract).await - } - TableSource::MappingOfStructMappings(ref mut args) => { - args.init_contract_data(ctx, contract).await - } - TableSource::Merge(ref mut args) => args.init_contract_data(ctx, contract).await, - } - } - .boxed() + c: ChangeType, + ) -> BoxFuture<'a, Vec>> { + async move { SingleExtractionArgs::random_contract_update(self, ctx, contract, c).await } + .boxed() } - #[allow(elided_named_lifetimes)] - pub fn random_contract_update<'a>( + fn metadata_hash(&self, contract_address: Address, chain_id: u64) -> MetadataHash { + let slot = self.get_data(); + metadata_hash(slot, &contract_address, chain_id, vec![]) + } + + fn can_query(&self) -> bool { + false + } +} + +impl TableSource for MergeSource { + type Metadata = (SlotInputs, SlotInputs); + fn get_data(&self) -> Self::Metadata { + (self.single.get_data(), self.mapping.get_data()) + } + + fn init_contract_data<'a>( &'a mut self, ctx: &'a mut TestContext, contract: &'a Contract, - change_type: ChangeType, - ) -> BoxFuture>> { - async move { - match self { - TableSource::Single(ref args) => { - args.random_contract_update(ctx, contract, change_type) - .await - } - TableSource::MappingValues(ref mut args, _) => { - args.random_contract_update(ctx, contract, change_type) - .await - } - TableSource::MappingStruct(ref mut args, _) => { - args.random_contract_update(ctx, contract, change_type) - .await - } - TableSource::MappingOfSingleValueMappings(ref mut args) => { - args.random_contract_update(ctx, contract, change_type) - .await - } - TableSource::MappingOfStructMappings(ref mut args) => { - args.random_contract_update(ctx, contract, change_type) - .await - } - TableSource::Merge(ref mut args) => { - args.random_contract_update(ctx, contract, change_type) - .await - } - } - } - .boxed() + ) -> BoxFuture<'a, Vec>> { + async move { self.init_contract_data(ctx, contract).await }.boxed() + } + + async fn generate_extraction_proof_inputs( + &self, + ctx: &mut TestContext, + contract: &Contract, + value_key: ProofKey, + ) -> Result<(ExtractionProofInput, HashOutput)> { + self.generate_extraction_proof_inputs(ctx, contract, value_key) + .await + } + + fn random_contract_update<'a>( + &'a mut self, + ctx: &'a mut TestContext, + contract: &'a Contract, + c: ChangeType, + ) -> BoxFuture<'a, Vec>> { + async move { self.random_contract_update(ctx, contract, c).await }.boxed() + } + + fn metadata_hash(&self, contract_address: Address, chain_id: u64) -> MetadataHash { + let (single, mapping) = self.get_data(); + merge_metadata_hash::( + contract_address, + chain_id, + vec![], + single, + mapping, + ) + } + + fn can_query(&self) -> bool { + true } } @@ -437,74 +451,17 @@ pub struct MergeSource { // Extending to full merge between any table is not far - it requires some quick changes in // circuit but quite a lot of changes in integrated test. pub(crate) single: SingleExtractionArgs, - pub(crate) mapping: MappingExtractionArgs, + pub(crate) mapping: MappingExtractionArgs, } impl MergeSource { pub fn new( single: SingleExtractionArgs, - mapping: MappingExtractionArgs, + mapping: MappingExtractionArgs, ) -> Self { Self { single, mapping } } - #[allow(elided_named_lifetimes)] - pub fn generate_extraction_proof_inputs<'a>( - &'a self, - ctx: &'a mut TestContext, - contract: &'a Contract, - proof_key: ProofKey, - ) -> BoxFuture> { - async move { - let ProofKey::ValueExtraction((id, bn)) = proof_key else { - bail!("key wrong"); - }; - let id_a = id.clone() + "_a"; - let id_b = id + "_b"; - // generate the value extraction proof for the both table individually - let (extract_single, _) = self - .single - .generate_extraction_proof_inputs( - ctx, - contract, - ProofKey::ValueExtraction((id_a, bn)), - ) - .await?; - let ExtractionProofInput::Single(extract_a) = extract_single else { - bail!("can't merge non single tables") - }; - let (extract_mappping, _) = self - .mapping - .generate_extraction_proof_inputs( - ctx, - contract, - ProofKey::ValueExtraction((id_b, bn)), - ) - .await?; - let ExtractionProofInput::Single(extract_b) = extract_mappping else { - bail!("can't merge non single tables") - }; - - // add the metadata hashes together - this is mostly for debugging - let md = merge_metadata_hash::( - contract.address, - contract.chain_id, - vec![], - SlotInputs::Simple(self.single.slot_inputs.clone()), - SlotInputs::Mapping(self.mapping.slot_inputs.clone()), - ); - assert!(extract_a != extract_b); - Ok(( - ExtractionProofInput::Merge(MergeExtractionProof { - single: extract_a, - mapping: extract_b, - }), - md, - )) - } - .boxed() - } - pub async fn init_contract_data( &mut self, ctx: &mut TestContext, @@ -645,10 +602,68 @@ impl MergeSource { } } } + + #[allow(elided_named_lifetimes)] + pub fn generate_extraction_proof_inputs<'a>( + &'a self, + ctx: &'a mut TestContext, + contract: &'a Contract, + proof_key: ProofKey, + ) -> BoxFuture> { + async move { + let ProofKey::ValueExtraction((id, bn)) = proof_key else { + bail!("key wrong"); + }; + let id_a = id.clone() + "_a"; + let id_b = id + "_b"; + // generate the value extraction proof for the both table individually + let (extract_single, _) = self + .single + .generate_extraction_proof_inputs( + ctx, + contract, + ProofKey::ValueExtraction((id_a, bn)), + ) + .await?; + let ExtractionProofInput::Single(extract_a) = extract_single else { + bail!("can't merge non single tables") + }; + let (extract_mappping, _) = self + .mapping + .generate_extraction_proof_inputs( + ctx, + contract, + ProofKey::ValueExtraction((id_b, bn)), + ) + .await?; + let ExtractionProofInput::Single(extract_b) = extract_mappping else { + bail!("can't merge non single tables") + }; + + // add the metadata hashes together - this is mostly for debugging + let (simple, mapping) = self.get_data(); + let md = merge_metadata_hash::( + contract.address, + contract.chain_id, + vec![], + simple, + mapping, + ); + assert!(extract_a != extract_b); + Ok(( + ExtractionProofInput::Merge(MergeExtractionProof { + single: extract_a, + mapping: extract_b, + }), + md, + )) + } + .boxed() + } } /// Length extraction arguments (C.2) -#[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone)] +#[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone, Copy)] pub(crate) struct LengthExtractionArgs { /// Length slot pub(crate) slot: u8, @@ -656,6 +671,15 @@ pub(crate) struct LengthExtractionArgs { pub(crate) value: u8, } +/// Receipt extraction arguments +#[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone, Copy)] +pub(crate) struct ReceiptExtractionArgs { + /// The event data + pub(crate) event: EventLogInfo, + /// column that will be the secondary index + pub(crate) index: u64, +} + /// Contract extraction arguments (C.3) #[derive(Debug)] pub(crate) struct ContractExtractionArgs { @@ -668,7 +692,7 @@ static ROTATOR: AtomicUsize = AtomicUsize::new(0); use lazy_static::lazy_static; lazy_static! { - pub(crate) static ref BASE_VALUE: U256 = U256::from(10); + pub(crate) static ref BASE_VALUE: U256 = U256::from(10u8); pub static ref DEFAULT_ADDRESS: Address = Address::from_str("0xBA401cdAc1A3B6AEede21c9C4A483bE6c29F88C4").unwrap(); } @@ -762,12 +786,8 @@ impl SingleExtractionArgs { } }; let slot_inputs = SlotInputs::Simple(self.slot_inputs.clone()); - let metadata_hash = metadata_hash::( - slot_inputs, - &contract.address, - contract.chain_id, - vec![], - ); + let metadata_hash = + metadata_hash(slot_inputs, &contract.address, contract.chain_id, vec![]); let input = ExtractionProofInput::Single(ExtractionTableProof { value_proof, length_proof: None, @@ -980,9 +1000,33 @@ impl SingleExtractionArgs { } } -/// Mapping extraction arguments +// /// Mapping extraction arguments +// #[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone)] +// pub(crate) struct MappingExtractionArgs +// where +// K: StorageSlotMappingKey, +// V: StorageSlotValue, +// { +// /// Mapping slot number +// slot: u8, +// /// Mapping index type +// index: MappingIndex, +// /// Slot input information +// slot_inputs: Vec, +// /// Mapping keys: they are useful for two things: +// /// * doing some controlled changes on the smart contract, since if we want to do an update we +// /// need to know an existing key +// /// * doing the MPT proofs over, since this test doesn't implement the copy on write for MPT +// /// (yet), we're just recomputing all the proofs at every block and we need the keys for that. +// mapping_keys: BTreeSet, +// /// The optional length extraction parameters +// length_args: Option, +// /// Phantom +// _phantom: PhantomData<(K, V)>, +// } + #[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone)] -pub(crate) struct MappingExtractionArgs { +pub(crate) struct MappingExtractionArgs { /// Mapping slot number slot: u8, /// Mapping index type @@ -994,208 +1038,60 @@ pub(crate) struct MappingExtractionArgs, - /// Phantom - _phantom: PhantomData<(K, V)>, + mapping_keys: BTreeSet, + /// The optional length extraction parameters + length_args: Option, } -impl MappingExtractionArgs +impl TableSource for MappingExtractionArgs where - K: StorageSlotMappingKey, - V: StorageSlotValue, - Vec>: ContractController, + T: MappingInfo, + Vec>: ContractController, { - pub fn new(slot: u8, index: MappingIndex, slot_inputs: Vec) -> Self { - Self { - slot, - index, - slot_inputs, - mapping_keys: BTreeSet::new(), - _phantom: Default::default(), - } - } - - pub fn slot_inputs(&self) -> &[SlotInput] { - &self.slot_inputs - } - - pub async fn init_contract_data( - &mut self, - ctx: &mut TestContext, - contract: &Contract, - ) -> Vec> { - let init_key_and_value: [_; 3] = array::from_fn(|_| (K::sample_key(), V::sample_value())); - // Save the mapping keys. - self.mapping_keys - .extend(init_key_and_value.iter().map(|u| u.0.clone()).collect_vec()); - let updates = init_key_and_value - .into_iter() - .map(|(key, value)| MappingUpdate::Insertion(key, value)) - .collect_vec(); - - updates.update_contract(ctx, contract).await; + type Metadata = SlotInputs; - let new_block_number = ctx.block_number().await as BlockPrimaryIndex; - self.mapping_to_table_update(new_block_number, contract, &updates) + fn get_data(&self) -> Self::Metadata { + if let Some(l_args) = self.length_args.as_ref() { + T::slot_inputs(self.slot_inputs.clone(), Some(l_args.slot)) + } else { + T::slot_inputs(self.slot_inputs.clone(), None) + } } - async fn random_contract_update( - &mut self, - ctx: &mut TestContext, - contract: &Contract, - c: ChangeType, - ) -> Vec> { - // NOTE 1: The first part is just trying to construct the right input to simulate any - // changes on a mapping. This is mostly irrelevant for dist system but needs to manually - // construct our test cases here. The second part is more interesting as it looks at - // "what to do when receiving an update from scrapper". The core of the function is in - // `mapping_to_table_update` - // - // NOTE 2: This implementation tries to emulate as much as possible what happens in dist - // system. To compute the set of updates, it first simulate an update on the contract - // and creates the signal "MappingUpdate" corresponding to the update. From that point - // onwards, the table row updates are manually created. - // Note this can actually lead to more work than necessary in some cases. - // Take an example where the mapping is storing (10->A), (11->A), and where the - // secondary index value is the value, i.e. A. - // Our table initially looks like `A | 10`, `A | 11`. - // Imagine an update where we want to change the first row to `A | 12`. In the "table" - // world, this is only a simple update of a simple cell, no index even involved. But - // from the perspective of mapping, the "scrapper" can only tells us : - // * Key 10 has been deleted - // * Key 12 has been added with value A - // In the backend, we translate that in the "table world" to a deletion and an insertion. - // Having such optimization could be done later on, need to properly evaluate the cost - // of it. - let current_key = self.mapping_keys.first().unwrap(); - let current_value = self.query_value(ctx, contract, current_key).await; - let new_key = K::sample_key(); - let updates = match c { - ChangeType::Silent => vec![], - ChangeType::Insertion => { - vec![MappingUpdate::Insertion(new_key, V::sample_value())] - } - ChangeType::Deletion => { - vec![MappingUpdate::Deletion(current_key.clone(), current_value)] - } - ChangeType::Update(u) => { - match u { - UpdateType::Rest => { - let new_value = V::sample_value(); - match self.index { - MappingIndex::OuterKey(_) | MappingIndex::InnerKey(_) => { - // we simply change the mapping value since the key is the secondary index - vec![MappingUpdate::Update( - current_key.clone(), - current_value, - new_value, - )] - } - MappingIndex::Value(_) => { - // TRICKY: in this case, the mapping key must change. But from the - // onchain perspective, it means a transfer mapping(old_key -> new_key,value) - vec![ - MappingUpdate::Deletion( - current_key.clone(), - current_value.clone(), - ), - MappingUpdate::Insertion(new_key, current_value), - ] - } - MappingIndex::None => { - // a random update of the mapping, we don't care which since it is - // not impacting the secondary index of the table since the mapping - // doesn't contain the column which is the secondary index, in case - // of the merge table case. - vec![MappingUpdate::Update( - current_key.clone(), - current_value, - new_value, - )] - } - } - } - UpdateType::SecondaryIndex => { - match self.index { - MappingIndex::OuterKey(_) | MappingIndex::InnerKey(_) => { - // TRICKY: if the mapping key changes, it's a deletion then - // insertion from onchain perspective - vec![ - MappingUpdate::Deletion( - current_key.clone(), - current_value.clone(), - ), - // we insert the same value but with a new mapping key - MappingUpdate::Insertion(new_key, current_value), - ] - } - MappingIndex::Value(secondary_value_id) => { - // We only update the second index value here. - let slot_input_to_update = self - .slot_inputs - .iter() - .find(|slot_input| { - identifier_for_value_column( - slot_input, - &contract.address, - contract.chain_id, - vec![], - ) == secondary_value_id - }) - .unwrap(); - let mut new_value = current_value.clone(); - new_value.random_update(slot_input_to_update); - // if the value changes, it's a simple update in mapping - vec![MappingUpdate::Update( - current_key.clone(), - current_value, - new_value, - )] - } - MappingIndex::None => { - // empty vec since this table has no secondary index so it should - // give no updates - vec![] - } - } - } - } - } - }; - // small iteration to always have a good updated list of mapping keys - for update in &updates { - match update { - MappingUpdate::Deletion(key_to_delete, _) => { - info!("Removing key {key_to_delete:?} from tracking mapping keys"); - self.mapping_keys.retain(|u| u != key_to_delete); - } - MappingUpdate::Insertion(key_to_insert, _) => { - info!("Inserting key {key_to_insert:?} to tracking mapping keys"); - self.mapping_keys.insert(key_to_insert.clone()); - } - // the mapping key doesn't change here so no need to update the list - MappingUpdate::Update(_, _, _) => {} - } + fn init_contract_data<'a>( + &'a mut self, + ctx: &'a mut TestContext, + contract: &'a Contract, + ) -> BoxFuture<'a, Vec>> { + async { + let init_key_and_value: [_; 3] = + array::from_fn(|_| (T::sample_key(), ::Value::sample_value())); + // Save the mapping keys. + self.mapping_keys + .extend(init_key_and_value.iter().map(|u| u.0.clone()).collect_vec()); + let updates = init_key_and_value + .into_iter() + .map(|(key, value)| MappingUpdate::Insertion(key, value)) + .collect_vec(); + + updates.update_contract(ctx, contract).await; + + let new_block_number = ctx.block_number().await as BlockPrimaryIndex; + self.mapping_to_table_update(new_block_number, contract, &updates) } - updates.update_contract(ctx, contract).await; - - let new_block_number = ctx.block_number().await as BlockPrimaryIndex; - // NOTE HERE is the interesting bit for dist system as this is the logic to execute - // on receiving updates from scapper. This only needs to have the relevant - // information from update and it will translate that to changes in the tree. - self.mapping_to_table_update(new_block_number, contract, &updates) + .boxed() } - pub async fn generate_extraction_proof_inputs( + async fn generate_extraction_proof_inputs( &self, ctx: &mut TestContext, contract: &Contract, - proof_key: ProofKey, + value_key: ProofKey, ) -> Result<(ExtractionProofInput, HashOutput)> { - let ProofKey::ValueExtraction((_, bn)) = proof_key.clone() else { + let ProofKey::ValueExtraction((_, bn)) = value_key.clone() else { bail!("invalid proof key"); }; - let mapping_root_proof = match ctx.storage.get_proof_exact(&proof_key) { + let mapping_root_proof = match ctx.storage.get_proof_exact(&value_key) { Ok(p) => p, Err(_) => { let storage_slot_info = self.all_storage_slot_info(contract); @@ -1207,7 +1103,7 @@ where ) .await; ctx.storage - .store_proof(proof_key, mapping_values_proof.clone())?; + .store_proof(value_key, mapping_values_proof.clone())?; info!("Generated Values Extraction proof for mapping slot"); { let pproof = ProofWithVK::deserialize(&mapping_values_proof).unwrap(); @@ -1230,12 +1126,7 @@ where mapping_values_proof } }; - let metadata_hash = metadata_hash::( - K::slot_inputs(self.slot_inputs.clone()), - &contract.address, - contract.chain_id, - vec![], - ); + let metadata_hash = self.metadata_hash(contract.address(), contract.chain_id()); // it's a compoound value type of proof since we're not using the length let input = ExtractionProofInput::Single(ExtractionTableProof { value_proof: mapping_root_proof, @@ -1244,12 +1135,189 @@ where Ok((input, metadata_hash)) } + fn random_contract_update<'a>( + &'a mut self, + ctx: &'a mut TestContext, + contract: &'a Contract, + c: ChangeType, + ) -> BoxFuture<'a, Vec>> { + async { + // NOTE 1: The first part is just trying to construct the right input to simulate any + // changes on a mapping. This is mostly irrelevant for dist system but needs to manually + // construct our test cases here. The second part is more interesting as it looks at + // "what to do when receiving an update from scrapper". The core of the function is in + // `mapping_to_table_update` + // + // NOTE 2: This implementation tries to emulate as much as possible what happens in dist + // system. To compute the set of updates, it first simulate an update on the contract + // and creates the signal "MappingUpdate" corresponding to the update. From that point + // onwards, the table row updates are manually created. + // Note this can actually lead to more work than necessary in some cases. + // Take an example where the mapping is storing (10->A), (11->A), and where the + // secondary index value is the value, i.e. A. + // Our table initially looks like `A | 10`, `A | 11`. + // Imagine an update where we want to change the first row to `A | 12`. In the "table" + // world, this is only a simple update of a simple cell, no index even involved. But + // from the perspective of mapping, the "scrapper" can only tells us : + // * Key 10 has been deleted + // * Key 12 has been added with value A + // In the backend, we translate that in the "table world" to a deletion and an insertion. + // Having such optimization could be done later on, need to properly evaluate the cost + // of it. + let current_key = self.mapping_keys.first().unwrap(); + let current_value = self.query_value(ctx, contract, current_key).await; + let new_key = T::sample_key(); + let updates = match c { + ChangeType::Silent => vec![], + ChangeType::Insertion => { + vec![MappingUpdate::Insertion( + new_key, + ::Value::sample_value(), + )] + } + ChangeType::Deletion => { + vec![MappingUpdate::Deletion(current_key.clone(), current_value)] + } + ChangeType::Update(u) => { + match u { + UpdateType::Rest => { + let new_value = ::Value::sample_value(); + match self.index { + MappingIndex::OuterKey(_) | MappingIndex::InnerKey(_) => { + // we simply change the mapping value since the key is the secondary index + vec![MappingUpdate::Update( + current_key.clone(), + current_value, + new_value, + )] + } + MappingIndex::Value(_) => { + // TRICKY: in this case, the mapping key must change. But from the + // onchain perspective, it means a transfer mapping(old_key -> new_key,value) + vec![ + MappingUpdate::Deletion( + current_key.clone(), + current_value.clone(), + ), + MappingUpdate::Insertion(new_key, current_value), + ] + } + MappingIndex::None => { + // a random update of the mapping, we don't care which since it is + // not impacting the secondary index of the table since the mapping + // doesn't contain the column which is the secondary index, in case + // of the merge table case. + vec![MappingUpdate::Update( + current_key.clone(), + current_value, + new_value, + )] + } + } + } + UpdateType::SecondaryIndex => { + match self.index { + MappingIndex::OuterKey(_) | MappingIndex::InnerKey(_) => { + // TRICKY: if the mapping key changes, it's a deletion then + // insertion from onchain perspective + vec![ + MappingUpdate::Deletion( + current_key.clone(), + current_value.clone(), + ), + // we insert the same value but with a new mapping key + MappingUpdate::Insertion(new_key, current_value), + ] + } + MappingIndex::Value(secondary_value_id) => { + // We only update the second index value here. + let slot_input_to_update = self + .slot_inputs + .iter() + .find(|slot_input| { + identifier_for_value_column( + slot_input, + &contract.address, + contract.chain_id, + vec![], + ) == secondary_value_id + }) + .unwrap(); + let mut new_value = current_value.clone(); + new_value.random_update(slot_input_to_update); + // if the value changes, it's a simple update in mapping + vec![MappingUpdate::Update( + current_key.clone(), + current_value, + new_value, + )] + } + MappingIndex::None => { + // empty vec since this table has no secondary index so it should + // give no updates + vec![] + } + } + } + } + } + }; + // small iteration to always have a good updated list of mapping keys + for update in &updates { + match update { + MappingUpdate::Deletion(key_to_delete, _) => { + info!("Removing key {key_to_delete:?} from tracking mapping keys"); + self.mapping_keys.retain(|u| u != key_to_delete); + } + MappingUpdate::Insertion(key_to_insert, _) => { + info!("Inserting key {key_to_insert:?} to tracking mapping keys"); + self.mapping_keys.insert(key_to_insert.clone()); + } + // the mapping key doesn't change here so no need to update the list + MappingUpdate::Update(_, _, _) => {} + } + } + updates.update_contract(ctx, contract).await; + + let new_block_number = ctx.block_number().await as BlockPrimaryIndex; + // NOTE HERE is the interesting bit for dist system as this is the logic to execute + // on receiving updates from scapper. This only needs to have the relevant + // information from update and it will translate that to changes in the tree. + self.mapping_to_table_update(new_block_number, contract, &updates) + } + .boxed() + } + + fn metadata_hash(&self, contract_address: Address, chain_id: u64) -> MetadataHash { + metadata_hash(self.get_data(), &contract_address, chain_id, vec![]) + } + + fn can_query(&self) -> bool { + true + } +} + +impl MappingExtractionArgs { + pub fn new( + slot: u8, + index: MappingIndex, + slot_inputs: Vec, + length_args: Option, + ) -> Self { + Self { + slot, + index, + slot_inputs, + mapping_keys: BTreeSet::new(), + length_args, + } + } /// The generic parameter `V` could be set to an Uint256 as single value or a Struct. pub fn mapping_to_table_update( &self, block_number: BlockPrimaryIndex, contract: &Contract, - updates: &[MappingUpdate], + updates: &[MappingUpdate], ) -> Vec> { updates .iter() @@ -1352,7 +1420,7 @@ where &self, evm_word: u32, table_info: Vec, - mapping_key: &K, + mapping_key: &T, ) -> StorageSlotInfo { let storage_slot = mapping_key.storage_slot(self.slot, evm_word); @@ -1373,7 +1441,12 @@ where } /// Query a storage slot value by a mapping key. - async fn query_value(&self, ctx: &mut TestContext, contract: &Contract, mapping_key: &K) -> V { + async fn query_value( + &self, + ctx: &mut TestContext, + contract: &Contract, + mapping_key: &T, + ) -> T::Value { let mut extracted_values = vec![]; let evm_word_cols = self.evm_word_column_info(contract); for evm_word_col in evm_word_cols { @@ -1398,7 +1471,7 @@ where }); } - V::from_u256_slice(&extracted_values) + ::Value::from_u256_slice(&extracted_values) } fn table_info(&self, contract: &Contract) -> Vec { diff --git a/mp2-v1/tests/common/context.rs b/mp2-v1/tests/common/context.rs index 16b501a5b..149ba80bb 100644 --- a/mp2-v1/tests/common/context.rs +++ b/mp2-v1/tests/common/context.rs @@ -12,7 +12,7 @@ use anyhow::{Context, Result}; use envconfig::Envconfig; use log::info; use mp2_common::eth::ProofQuery; -use mp2_v1::api::{build_circuits_params, PublicParameters}; +use mp2_v1::api::build_circuits_params; use std::{ fs::File, io::{BufReader, BufWriter}, diff --git a/mp2-v1/tests/common/mod.rs b/mp2-v1/tests/common/mod.rs index dc2c05a6e..16c16d533 100644 --- a/mp2-v1/tests/common/mod.rs +++ b/mp2-v1/tests/common/mod.rs @@ -2,8 +2,8 @@ use alloy::primitives::Address; use anyhow::Result; use cases::table_source::TableSource; -use mp2_v1::api::{merge_metadata_hash, metadata_hash, MetadataHash, SlotInputs}; -use serde::{Deserialize, Serialize}; +use mp2_v1::api::MetadataHash; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; use table::{TableColumns, TableRowUniqueID}; pub mod benchmarker; pub mod bindings; @@ -71,7 +71,8 @@ pub fn mkdir_all(params_path_str: &str) -> Result<()> { } #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct TableInfo { +#[serde(bound = "T: Serialize + DeserializeOwned")] +pub struct TableInfo { pub columns: TableColumns, pub row_unique_id: TableRowUniqueID, // column to do queries over for numerical values, NOT secondary index @@ -79,68 +80,70 @@ pub struct TableInfo { pub public_name: String, pub contract_address: Address, pub chain_id: u64, - pub source: TableSource, + pub source: T, } -impl TableInfo { +impl TableInfo { pub fn metadata_hash(&self) -> MetadataHash { - match &self.source { - TableSource::Single(args) => { - let slot = SlotInputs::Simple(args.slot_inputs.clone()); - metadata_hash::( - slot, - &self.contract_address, - self.chain_id, - vec![], - ) - } - TableSource::MappingValues(args, _) => { - let slot_inputs = SlotInputs::Mapping(args.slot_inputs().to_vec()); - metadata_hash::( - slot_inputs, - &self.contract_address, - self.chain_id, - vec![], - ) - } - TableSource::MappingStruct(args, _) => { - let slot_inputs = SlotInputs::Mapping(args.slot_inputs().to_vec()); - metadata_hash::( - slot_inputs, - &self.contract_address, - self.chain_id, - vec![], - ) - } - TableSource::MappingOfSingleValueMappings(args) => { - let slot_inputs = SlotInputs::MappingOfMappings(args.slot_inputs().to_vec()); - metadata_hash::( - slot_inputs, - &self.contract_address, - self.chain_id, - vec![], - ) - } - TableSource::MappingOfStructMappings(args) => { - let slot_inputs = SlotInputs::MappingOfMappings(args.slot_inputs().to_vec()); - metadata_hash::( - slot_inputs, - &self.contract_address, - self.chain_id, - vec![], - ) - } - TableSource::Merge(source) => { - let single = SlotInputs::Simple(source.single.slot_inputs.clone()); - let mapping = SlotInputs::Mapping(source.mapping.slot_inputs().to_vec()); - merge_metadata_hash::( - self.contract_address, - self.chain_id, - vec![], - single, - mapping, - ) - } - } + // match &self.source { + // TableSource::Single(args) => { + // let slot = SlotInputs::Simple(args.slot_inputs.clone()); + // metadata_hash::( + // slot, + // &self.contract_address, + // self.chain_id, + // vec![], + // ) + // } + // TableSource::MappingValues(args, _) => { + // let slot_inputs = SlotInputs::Mapping(args.slot_inputs().to_vec()); + // metadata_hash::( + // slot_inputs, + // &self.contract_address, + // self.chain_id, + // vec![], + // ) + // } + // TableSource::MappingStruct(args, _) => { + // let slot_inputs = SlotInputs::Mapping(args.slot_inputs().to_vec()); + // metadata_hash::( + // slot_inputs, + // &self.contract_address, + // self.chain_id, + // vec![], + // ) + // } + // TableSource::MappingOfSingleValueMappings(args) => { + // let slot_inputs = SlotInputs::MappingOfMappings(args.slot_inputs().to_vec()); + // metadata_hash::( + // slot_inputs, + // &self.contract_address, + // self.chain_id, + // vec![], + // ) + // } + // TableSource::MappingOfStructMappings(args) => { + // let slot_inputs = SlotInputs::MappingOfMappings(args.slot_inputs().to_vec()); + // metadata_hash::( + // slot_inputs, + // &self.contract_address, + // self.chain_id, + // vec![], + // ) + // } + // TableSource::Merge(source) => { + // let single = SlotInputs::Simple(source.single.slot_inputs.clone()); + // let mapping = SlotInputs::Mapping(source.mapping.slot_inputs().to_vec()); + // merge_metadata_hash::( + // self.contract_address, + // self.chain_id, + // vec![], + // single, + // mapping, + // ) + // } + // } + self.source + .metadata_hash(self.contract_address, self.chain_id) } } diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index 673b60a91..8cb2641f4 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -24,6 +24,8 @@ use common::{ MAX_NUM_ITEMS_PER_OUTPUT, MAX_NUM_OUTPUTS, MAX_NUM_PLACEHOLDERS, MAX_NUM_PREDICATE_OPS, MAX_NUM_RESULT_OPS, }, + slot_info::{SimpleMapping, SimpleNestedMapping, StructMapping, StructNestedMapping}, + table_source::{MappingExtractionArgs, MergeSource, SingleExtractionArgs, TableSource}, TableIndexing, }, context::{self, ParamsType, TestContextConfig}, @@ -41,6 +43,7 @@ use parsil::{ utils::ParsilSettingsBuilder, PlaceholderSettings, }; +use serde::{de::DeserializeOwned, Serialize}; use test_log::test; use verifiable_db::query::universal_circuit::universal_circuit_inputs::Placeholders; @@ -88,16 +91,17 @@ async fn integrated_indexing() -> Result<()> { info!("Params built"); // NOTE: to comment to avoid very long tests... - - let (mut single, genesis) = TableIndexing::single_value_test_case(&mut ctx).await?; + let (mut single, genesis) = + TableIndexing::::single_value_test_case(&mut ctx).await?; let changes = vec![ ChangeType::Update(UpdateType::Rest), ChangeType::Silent, ChangeType::Update(UpdateType::SecondaryIndex), ]; single.run(&mut ctx, genesis, changes.clone()).await?; - - let (mut mapping, genesis) = TableIndexing::mapping_value_test_case(&mut ctx).await?; + let (mut mapping, genesis) = + TableIndexing::>::mapping_value_test_case(&mut ctx) + .await?; let changes = vec![ ChangeType::Insertion, ChangeType::Update(UpdateType::Rest), @@ -107,18 +111,20 @@ async fn integrated_indexing() -> Result<()> { ]; mapping.run(&mut ctx, genesis, changes).await?; - let (mut mapping, genesis) = TableIndexing::mapping_struct_test_case(&mut ctx).await?; + let (mut mapping, genesis) = + TableIndexing::>::mapping_struct_test_case(&mut ctx) + .await?; let changes = vec![ ChangeType::Insertion, ChangeType::Update(UpdateType::Rest), - ChangeType::Update(UpdateType::SecondaryIndex), ChangeType::Deletion, ChangeType::Silent, ]; mapping.run(&mut ctx, genesis, changes).await?; let (mut mapping_of_single_value_mappings, genesis) = - TableIndexing::mapping_of_single_value_mappings_test_case(&mut ctx).await?; + TableIndexing::>::mapping_of_single_value_mappings_test_case(&mut ctx) + .await?; let changes = vec![ ChangeType::Insertion, ChangeType::Update(UpdateType::Rest), @@ -130,8 +136,12 @@ async fn integrated_indexing() -> Result<()> { .run(&mut ctx, genesis, changes) .await?; - let (mut mapping_of_struct_mappings, genesis) = - TableIndexing::mapping_of_struct_mappings_test_case(&mut ctx).await?; + let (mut mapping_of_struct_mappings, genesis) = TableIndexing::< + MappingExtractionArgs, + >::mapping_of_struct_mappings_test_case( + &mut ctx + ) + .await?; let changes = vec![ ChangeType::Insertion, ChangeType::Update(UpdateType::Rest), @@ -143,13 +153,9 @@ async fn integrated_indexing() -> Result<()> { .run(&mut ctx, genesis, changes) .await?; - let (mut merged, genesis) = TableIndexing::merge_table_test_case(&mut ctx).await?; - let changes = vec![ - ChangeType::Insertion, - ChangeType::Update(UpdateType::Rest), - ChangeType::Silent, - ChangeType::Deletion, - ]; + let (mut merged, genesis) = + TableIndexing::::merge_table_test_case(&mut ctx).await?; + let changes = vec![ChangeType::Update(UpdateType::Rest), ChangeType::Silent]; merged.run(&mut ctx, genesis, changes).await?; // save columns information and table information in JSON so querying test can pick up @@ -163,7 +169,7 @@ async fn integrated_indexing() -> Result<()> { Ok(()) } -async fn integrated_querying(table_info: TableInfo) -> Result<()> { +async fn integrated_querying(table_info: TableInfo) -> Result<()> { let storage = ProofKV::new_from_env(PROOF_STORE_FILE)?; info!("Loading Anvil and contract"); let mut ctx = context::new_local_chain(storage).await; @@ -186,7 +192,8 @@ async fn integrated_querying(table_info: TableInfo) -> Result<()> { async fn integrated_querying_mapping_table() -> Result<()> { let _ = env_logger::try_init(); info!("Running QUERY test for mapping table"); - let table_info = read_table_info(MAPPING_TABLE_INFO_FILE)?; + let table_info: TableInfo> = + read_table_info(MAPPING_TABLE_INFO_FILE)?; integrated_querying(table_info).await } @@ -195,7 +202,7 @@ async fn integrated_querying_mapping_table() -> Result<()> { async fn integrated_querying_merged_table() -> Result<()> { let _ = env_logger::try_init(); info!("Running QUERY test for merged table"); - let table_info = read_table_info(MERGE_TABLE_INFO_FILE)?; + let table_info: TableInfo = read_table_info(MERGE_TABLE_INFO_FILE)?; integrated_querying(table_info).await } @@ -220,7 +227,10 @@ fn table_info_path(f: &str) -> PathBuf { path } -fn write_table_info(f: &str, info: TableInfo) -> Result<()> { +fn write_table_info( + f: &str, + info: TableInfo, +) -> Result<()> { let full_path = table_info_path(f); let file = File::create(full_path)?; let writer = BufWriter::new(file); @@ -228,11 +238,11 @@ fn write_table_info(f: &str, info: TableInfo) -> Result<()> { Ok(()) } -fn read_table_info(f: &str) -> Result { +fn read_table_info(f: &str) -> Result> { let full_path = table_info_path(f); let file = File::open(full_path)?; let reader = BufReader::new(file); - let info = serde_json::from_reader(reader)?; + let info: TableInfo = serde_json::from_reader(reader)?; Ok(info) } From 39810b72ce5e270f0e601965939782d46dcf1be0 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 19 Dec 2024 13:02:38 +0000 Subject: [PATCH 19/47] Fixed stack too deep error --- mp2-v1/tests/common/cases/table_source.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 3e1767bce..81edab14f 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -407,7 +407,7 @@ impl TableSource for MergeSource { ctx: &'a mut TestContext, contract: &'a Contract, ) -> BoxFuture<'a, Vec>> { - async move { self.init_contract_data(ctx, contract).await }.boxed() + async move { MergeSource::init_contract_data(self, ctx, contract).await }.boxed() } async fn generate_extraction_proof_inputs( @@ -416,8 +416,7 @@ impl TableSource for MergeSource { contract: &Contract, value_key: ProofKey, ) -> Result<(ExtractionProofInput, HashOutput)> { - self.generate_extraction_proof_inputs(ctx, contract, value_key) - .await + MergeSource::generate_extraction_proof_inputs(self, ctx, contract, value_key).await } fn random_contract_update<'a>( @@ -426,7 +425,7 @@ impl TableSource for MergeSource { contract: &'a Contract, c: ChangeType, ) -> BoxFuture<'a, Vec>> { - async move { self.random_contract_update(ctx, contract, c).await }.boxed() + async move { MergeSource::random_contract_update(self, ctx, contract, c).await }.boxed() } fn metadata_hash(&self, contract_address: Address, chain_id: u64) -> MetadataHash { From 2e981837d35df86e1d3f22d5443ee0e9d1532eb9 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 19 Dec 2024 15:32:03 +0000 Subject: [PATCH 20/47] Changed final extraction circuit set size constant --- mp2-v1/src/final_extraction/api.rs | 2 +- mp2-v1/src/final_extraction/receipt_circuit.rs | 1 - mp2-v1/store/test_proofs.store | Bin 0 -> 524288 bytes 3 files changed, 1 insertion(+), 2 deletions(-) create mode 100644 mp2-v1/store/test_proofs.store diff --git a/mp2-v1/src/final_extraction/api.rs b/mp2-v1/src/final_extraction/api.rs index 51ea65f95..600fb24c7 100644 --- a/mp2-v1/src/final_extraction/api.rs +++ b/mp2-v1/src/final_extraction/api.rs @@ -57,7 +57,7 @@ pub struct PublicParameters { circuit_set: RecursiveCircuits, } -const FINAL_EXTRACTION_CIRCUIT_SET_SIZE: usize = 2; +const FINAL_EXTRACTION_CIRCUIT_SET_SIZE: usize = 4; pub(super) const NUM_IO: usize = PublicInputs::::TOTAL_LEN; impl PublicParameters { diff --git a/mp2-v1/src/final_extraction/receipt_circuit.rs b/mp2-v1/src/final_extraction/receipt_circuit.rs index 56a540370..a1366a2af 100644 --- a/mp2-v1/src/final_extraction/receipt_circuit.rs +++ b/mp2-v1/src/final_extraction/receipt_circuit.rs @@ -108,7 +108,6 @@ impl CircuitLogicWires for ReceiptRecursiveWires { _verified_proofs: [&plonky2::plonk::proof::ProofWithPublicInputsTarget; 0], builder_parameters: Self::CircuitBuilderParams, ) -> Self { - // value proof for table a and value proof for table b = 2 let verification = ReceiptCircuitProofInputs::build(builder, &builder_parameters); ReceiptExtractionCircuit::build( builder, diff --git a/mp2-v1/store/test_proofs.store b/mp2-v1/store/test_proofs.store new file mode 100644 index 0000000000000000000000000000000000000000..0324f3b4c1c0280b993b31ff7a7b5741d87516db GIT binary patch literal 524288 zcmeI)KS~2Z6bInV9}^1;As|W$&mdS@q!YZvHb%5s*(`b}PhjT-L=bP_0lb1p-ixqg z3n2j^-!{LQeZvg%^>2z|`3)l1#n2oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+z^_2|=?~*q&T~b+3oyly}#|sAW8yMQPDg^}KB6izc5V$R&~h0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5)`gupm%nZ5b# z{{Ng0c0$7?2@oJafB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ e009C72oNAZfB*pk1PBlyK!5-N0t5*BFM)4A2pR|g literal 0 HcmV?d00001 From b0b4c13ee7c7bb72f97375c1b594ef7774871927 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 23 Dec 2024 12:51:23 +0000 Subject: [PATCH 21/47] resolves CRY-23 --- mp2-common/src/keccak.rs | 2 +- mp2-common/src/types.rs | 7 +- mp2-v1/Makefile | 7 +- mp2-v1/src/api.rs | 290 +-- mp2-v1/src/lib.rs | 5 +- mp2-v1/src/values_extraction/api.rs | 1749 +++++++++-------- .../values_extraction/gadgets/column_info.rs | 597 ++++-- .../gadgets/metadata_gadget.rs | 855 +++++--- mp2-v1/src/values_extraction/gadgets/mod.rs | 1 - mp2-v1/src/values_extraction/leaf_mapping.rs | 281 ++- .../leaf_mapping_of_mappings.rs | 343 ++-- mp2-v1/src/values_extraction/leaf_receipt.rs | 632 +++--- mp2-v1/src/values_extraction/leaf_single.rs | 178 +- mp2-v1/src/values_extraction/mod.rs | 646 +++--- mp2-v1/store/test_proofs.store | Bin 524288 -> 0 bytes mp2-v1/tests/common/cases/contract.rs | 34 +- mp2-v1/tests/common/cases/indexing.rs | 360 +++- mp2-v1/tests/common/cases/table_source.rs | 236 ++- mp2-v1/tests/common/context.rs | 9 +- mp2-v1/tests/common/mod.rs | 2 +- mp2-v1/tests/common/rowtree.rs | 17 + mp2-v1/tests/common/storage_trie.rs | 16 +- mp2-v1/tests/common/table.rs | 20 +- mp2-v1/tests/integrated_tests.rs | 5 + 24 files changed, 3501 insertions(+), 2791 deletions(-) delete mode 100644 mp2-v1/store/test_proofs.store diff --git a/mp2-common/src/keccak.rs b/mp2-common/src/keccak.rs index e29ba48a9..18ad2f01c 100644 --- a/mp2-common/src/keccak.rs +++ b/mp2-common/src/keccak.rs @@ -87,7 +87,7 @@ pub struct KeccakCircuit { /// outside the circuit that requires the original input data. #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] pub struct KeccakWires { - input_array: VectorWire, + pub input_array: VectorWire, diff: Target, // 256/u32 = 8 pub output_array: OutputHash, diff --git a/mp2-common/src/types.rs b/mp2-common/src/types.rs index 933116eb0..516e27ba8 100644 --- a/mp2-common/src/types.rs +++ b/mp2-common/src/types.rs @@ -1,6 +1,9 @@ //! Custom types -use crate::{array::Array, D, F}; +use crate::{ + array::{Array, L32}, + D, F, +}; use anyhow::ensure; use derive_more::Deref; use plonky2::{ @@ -76,6 +79,8 @@ pub const MAX_BLOCK_LEN: usize = 650; /// value **not** RLP encoded,i.e. without the 1-byte RLP header. pub const MAPPING_LEAF_VALUE_LEN: usize = 32; +pub const MAPPING_LEAF_VALUE_LEN_PACKED: usize = L32(MAPPING_LEAF_VALUE_LEN); + /// The length of an EVM word pub const EVM_WORD_LEN: usize = 32; diff --git a/mp2-v1/Makefile b/mp2-v1/Makefile index f82a9538b..1c73912da 100644 --- a/mp2-v1/Makefile +++ b/mp2-v1/Makefile @@ -12,15 +12,12 @@ TEST_BINDINGS_OUT_PATH=$(TEST_CONTRACT_PATH)/out/$(TEST_BINDINGS_FOLDER) # Generate the integration test contract bindings. bindings: - rm -rf $(TEST_BINDINGS_MOD_PATH) $(TEST_BINDINGS_OUT_PATH) # Generate new bindings. forge install --root $(TEST_CONTRACT_PATH) - forge bind --alloy --module --root $(TEST_CONTRACT_PATH) - -# Move the bindings module to the integration test location. - mv -f $(TEST_BINDINGS_OUT_PATH) $(TEST_BINDINGS_MOD_PATH) + forge bind --bindings-path $(TEST_BINDINGS_MOD_PATH) --alloy --module --root $(TEST_CONTRACT_PATH) --extra-output abi --overwrite cargo fmt + # Declare phony targets .PHONY: bindings diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index fc95241a7..d10af5705 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -10,13 +10,14 @@ use crate::{ self, compute_metadata_digest as length_metadata_digest, LengthCircuitInput, }, values_extraction::{ - self, compute_leaf_mapping_metadata_digest, - compute_leaf_mapping_of_mappings_metadata_digest, compute_leaf_single_metadata_digest, - gadgets::column_info::ColumnInfo, identifier_block_column, - identifier_for_inner_mapping_key_column, identifier_for_mapping_key_column, - identifier_for_outer_mapping_key_column, identifier_for_value_column, + self, compute_id_with_prefix, + gadgets::column_info::{ExtractedColumnInfo, InputColumnInfo}, + identifier_block_column, identifier_for_inner_mapping_key_column, + identifier_for_mapping_key_column, identifier_for_outer_mapping_key_column, + identifier_for_value_column, ColumnMetadata, INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, + OUTER_KEY_ID_PREFIX, }, - MAX_LEAF_NODE_LEN, + MAX_LEAF_VALUE_LEN, MAX_RECEIPT_LEAF_NODE_LEN, }; use alloy::primitives::Address; use anyhow::Result; @@ -24,13 +25,11 @@ use itertools::Itertools; use log::debug; use mp2_common::{ digest::Digest, - group_hashing::map_to_curve_point, poseidon::H, types::HashOutput, utils::{Fieldable, ToFields}, }; use plonky2::{ - field::types::PrimeField64, iop::target::Target, plonk::config::{GenericHashOut, Hasher}, }; @@ -44,23 +43,28 @@ pub struct InputNode { // TODO: Specify `NODE_LEN = MAX_LEAF_NODE_LEN` in the generic parameter, // but it could not work for using `MAPPING_LEAF_NODE_LEN` constant directly. -type ValuesExtractionInput = - values_extraction::CircuitInput<69, MAX_COLUMNS, MAX_FIELD_PER_EVM>; -type ValuesExtractionParameters = - values_extraction::PublicParameters<69, MAX_COLUMNS, MAX_FIELD_PER_EVM>; +type ValuesExtractionInput = + values_extraction::CircuitInput<512, MAX_COLUMNS>; +type ValuesExtractionParameters = + values_extraction::PublicParameters<512, MAX_COLUMNS>; fn sanity_check() { - assert_eq!(MAX_LEAF_NODE_LEN, 69); + assert_eq!(MAX_RECEIPT_LEAF_NODE_LEN, 512); } /// Set of inputs necessary to generate proofs for each circuit employed in the /// pre-processing stage of LPN -pub enum CircuitInput { +pub enum CircuitInput +where + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, +{ /// Contract extraction input ContractExtraction(contract_extraction::CircuitInput), /// Length extraction input LengthExtraction(LengthCircuitInput), /// Values extraction input - ValuesExtraction(ValuesExtractionInput), + ValuesExtraction(ValuesExtractionInput), /// Block extraction necessary input BlockExtraction(block_extraction::CircuitInput), /// Final extraction input @@ -77,17 +81,25 @@ pub enum CircuitInput #[derive(Serialize, Deserialize)] /// Parameters defining all the circuits employed for the pre-processing stage of LPN -pub struct PublicParameters { +pub struct PublicParameters +where + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, +{ contract_extraction: contract_extraction::PublicParameters, length_extraction: length_extraction::PublicParameters, - values_extraction: ValuesExtractionParameters, + values_extraction: ValuesExtractionParameters, block_extraction: block_extraction::PublicParameters, final_extraction: final_extraction::PublicParameters, tree_creation: verifiable_db::api::PublicParameters>, } -impl - PublicParameters +impl PublicParameters +where + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, { pub fn get_params_info(&self) -> Result> { self.tree_creation.get_params_info() @@ -101,8 +113,12 @@ impl /// Instantiate the circuits employed for the pre-processing stage of LPN, /// returning their corresponding parameters -pub fn build_circuits_params( -) -> PublicParameters { +pub fn build_circuits_params() -> PublicParameters +where + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, +{ log::info!("Building contract_extraction parameters..."); let contract_extraction = contract_extraction::build_circuits_params(); log::info!("Building length_extraction parameters..."); @@ -135,10 +151,15 @@ pub fn build_circuits_params( - params: &PublicParameters, - input: CircuitInput, -) -> Result> { +pub fn generate_proof( + params: &PublicParameters, + input: CircuitInput, +) -> Result> +where + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, +{ match input { CircuitInput::ContractExtraction(input) => { contract_extraction::generate_proof(¶ms.contract_extraction, input) @@ -198,7 +219,7 @@ pub fn generate_proof( pub type MetadataHash = HashOutput; /// Enumeration to be employed to provide input slots for metadata hash computation -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum SlotInputs { /// Slots of a set of simple variables or Struct /// The slot number should be same for the fields of one Struct. @@ -216,7 +237,70 @@ pub enum SlotInputs { MappingWithLength(Vec, u8), } -#[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize)] +impl SlotInputs { + pub fn to_column_metadata( + &self, + contract_address: &Address, + chain_id: u64, + extra: Vec, + ) -> ColumnMetadata { + let (slot, extracted_columns) = match self { + SlotInputs::Simple(ref inner) + | SlotInputs::Mapping(ref inner) + | SlotInputs::MappingOfMappings(ref inner) + | SlotInputs::MappingWithLength(ref inner, ..) => ( + inner[0].slot, + compute_table_info(inner.to_vec(), contract_address, chain_id, extra.clone()), + ), + }; + + let num_mapping_keys = match self { + SlotInputs::Simple(..) => 0usize, + SlotInputs::Mapping(..) | SlotInputs::MappingWithLength(..) => 1, + SlotInputs::MappingOfMappings(..) => 2, + }; + + let input_columns = match num_mapping_keys { + 0 => vec![], + 1 => { + let identifier = compute_id_with_prefix( + KEY_ID_PREFIX, + slot, + contract_address, + chain_id, + extra.clone(), + ); + let input_column = InputColumnInfo::new(&[slot], identifier, KEY_ID_PREFIX, 32); + vec![input_column] + } + 2 => { + let outer_identifier = compute_id_with_prefix( + OUTER_KEY_ID_PREFIX, + slot, + contract_address, + chain_id, + extra.clone(), + ); + let inner_identifier = compute_id_with_prefix( + INNER_KEY_ID_PREFIX, + slot, + contract_address, + chain_id, + extra.clone(), + ); + vec![ + InputColumnInfo::new(&[slot], outer_identifier, OUTER_KEY_ID_PREFIX, 32), + InputColumnInfo::new(&[slot], inner_identifier, INNER_KEY_ID_PREFIX, 32), + ] + } + _ => vec![], + }; + + ColumnMetadata::new(input_columns, extracted_columns) + } +} + +#[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize, Copy)] pub struct SlotInput { /// Slot information of the variable pub(crate) slot: u8, @@ -230,14 +314,31 @@ pub struct SlotInput { pub(crate) evm_word: u32, } -impl From<&ColumnInfo> for SlotInput { - fn from(column_info: &ColumnInfo) -> Self { - let slot = u8::try_from(column_info.slot.to_canonical_u64()).unwrap(); - let [byte_offset, length] = [column_info.byte_offset, column_info.length] - .map(|f| usize::try_from(f.to_canonical_u64()).unwrap()); - let evm_word = u32::try_from(column_info.evm_word.to_canonical_u64()).unwrap(); +impl From for SlotInput { + fn from(value: ExtractedColumnInfo) -> Self { + let extraction_id = value.extraction_id(); + let slot = extraction_id[0].0 as u8; - SlotInput::new(slot, byte_offset, length, evm_word) + SlotInput { + slot, + byte_offset: value.byte_offset().0 as usize, + length: value.length().0 as usize, + evm_word: value.location_offset().0 as u32, + } + } +} + +impl From<&ExtractedColumnInfo> for SlotInput { + fn from(value: &ExtractedColumnInfo) -> Self { + let extraction_id = value.extraction_id(); + let slot = extraction_id[0].0 as u8; + + SlotInput { + slot, + byte_offset: value.byte_offset().0 as usize, + length: value.length().0 as usize, + evm_word: value.location_offset().0 as u32, + } } } @@ -270,21 +371,15 @@ impl SlotInput { /// Compute metadata hash for a "merge" table. Right now it supports only merging tables from the /// same address. -pub fn merge_metadata_hash( +pub fn merge_metadata_hash( contract: Address, chain_id: u64, extra: Vec, table_a: SlotInputs, table_b: SlotInputs, ) -> MetadataHash { - let md_a = value_metadata::( - table_a, - &contract, - chain_id, - extra.clone(), - ); - let md_b = - value_metadata::(table_b, &contract, chain_id, extra); + let md_a = value_metadata(table_a, &contract, chain_id, extra.clone()); + let md_b = value_metadata(table_b, &contract, chain_id, extra); let combined = map_to_curve_point(&md_a.to_fields()) + map_to_curve_point(&md_b.to_fields()); let contract_digest = contract_metadata_digest(&contract); // the block id is only added at the index tree level, the rest is combined at the final @@ -294,36 +389,22 @@ pub fn merge_metadata_hash( - inputs: SlotInputs, - contract: &Address, - chain_id: u64, - extra: Vec, -) -> Digest { - match inputs { - SlotInputs::Simple(inputs) => metadata_digest_simple::( - inputs, contract, chain_id, extra, - ), - SlotInputs::Mapping(inputs) => metadata_digest_mapping::( - inputs, contract, chain_id, extra, - ), - SlotInputs::MappingOfMappings(inputs) => metadata_digest_mapping_of_mappings::< - MAX_COLUMNS, - MAX_FIELD_PER_EVM, - >(inputs, contract, chain_id, extra), +fn value_metadata(inputs: SlotInputs, contract: &Address, chain_id: u64, extra: Vec) -> Digest { + let column_metadata = inputs.to_column_metadata(contract, chain_id, extra.clone()); + + let md = column_metadata.digest(); + + let length_digest = match inputs { + SlotInputs::Simple(..) | SlotInputs::Mapping(..) | SlotInputs::MappingOfMappings(..) => { + Digest::NEUTRAL + } SlotInputs::MappingWithLength(mapping_inputs, length_slot) => { assert!(!mapping_inputs.is_empty()); let mapping_slot = mapping_inputs[0].slot; - let mapping_digest = metadata_digest_mapping::( - mapping_inputs, - contract, - chain_id, - extra, - ); - let length_digest = length_metadata_digest(length_slot, mapping_slot); - mapping_digest + length_digest + length_metadata_digest(length_slot, mapping_slot) } - } + }; + md + length_digest } /// Compute the table information for the value columns. @@ -332,17 +413,16 @@ pub fn compute_table_info( address: &Address, chain_id: u64, extra: Vec, -) -> Vec { +) -> Vec { inputs .into_iter() .map(|input| { let id = identifier_for_value_column(&input, address, chain_id, extra.clone()); - ColumnInfo::new( - input.slot, + ExtractedColumnInfo::new( + &[input.slot], id, input.byte_offset, - 0, // bit_offset input.length, input.evm_word, ) @@ -350,60 +430,7 @@ pub fn compute_table_info( .collect_vec() } -fn metadata_digest_simple( - inputs: Vec, - contract: &Address, - chain_id: u64, - extra: Vec, -) -> Digest { - let table_info = compute_table_info(inputs, contract, chain_id, extra); - compute_leaf_single_metadata_digest::(table_info) -} - -fn metadata_digest_mapping( - inputs: Vec, - contract: &Address, - chain_id: u64, - extra: Vec, -) -> Digest { - assert!(!inputs.is_empty()); - let slot = inputs[0].slot; - - // Ensure the slot numbers must be same for mapping type. - let slots_equal = inputs[1..].iter().all(|input| input.slot == slot); - assert!(slots_equal); - - let table_info = compute_table_info(inputs, contract, chain_id, extra.clone()); - let key_id = identifier_for_mapping_key_column(slot, contract, chain_id, extra); - compute_leaf_mapping_metadata_digest::(table_info, slot, key_id) -} - -fn metadata_digest_mapping_of_mappings( - inputs: Vec, - contract: &Address, - chain_id: u64, - extra: Vec, -) -> Digest { - assert!(!inputs.is_empty()); - let slot = inputs[0].slot; - - // Ensure the slot numbers must be same for mapping type. - let slots_equal = inputs[1..].iter().all(|input| input.slot == slot); - assert!(slots_equal); - - let table_info = compute_table_info(inputs, contract, chain_id, extra.clone()); - let outer_key_id = - identifier_for_outer_mapping_key_column(slot, contract, chain_id, extra.clone()); - let inner_key_id = identifier_for_inner_mapping_key_column(slot, contract, chain_id, extra); - compute_leaf_mapping_of_mappings_metadata_digest::( - table_info, - slot, - outer_key_id, - inner_key_id, - ) -} - -fn combine_digest_and_block(digest: Digest) -> HashOutput { +pub fn combine_digest_and_block(digest: Digest) -> HashOutput { let block_id = identifier_block_column(); let inputs = digest .to_fields() @@ -414,19 +441,14 @@ fn combine_digest_and_block(digest: Digest) -> HashOutput { } /// Compute metadata hash for a table related to the provided inputs slots of the contract with /// address `contract_address` -pub fn metadata_hash( +pub fn metadata_hash( slot_input: SlotInputs, contract_address: &Address, chain_id: u64, extra: Vec, ) -> MetadataHash { // closure to compute the metadata digest associated to a mapping variable - let value_digest = value_metadata::( - slot_input, - contract_address, - chain_id, - extra, - ); + let value_digest = value_metadata(slot_input, contract_address, chain_id, extra); // Correspond to the computation of final extraction base circuit. let value_digest = map_to_curve_point(&value_digest.to_fields()); // add contract digest diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index e1defbc81..5ca55964f 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -8,7 +8,7 @@ #![feature(generic_arg_infer)] // stylistic feature #![feature(async_closure)] -use mp2_common::mpt_sequential::PAD_LEN; +use mp2_common::{array::L32, mpt_sequential::PAD_LEN}; pub const MAX_BRANCH_NODE_LEN: usize = 532; pub const MAX_BRANCH_NODE_LEN_PADDED: usize = PAD_LEN(532); @@ -17,6 +17,9 @@ pub const MAX_BRANCH_NODE_LEN_PADDED: usize = PAD_LEN(532); pub const MAX_EXTENSION_NODE_LEN: usize = 69; pub const MAX_EXTENSION_NODE_LEN_PADDED: usize = PAD_LEN(69); pub const MAX_LEAF_NODE_LEN: usize = MAX_EXTENSION_NODE_LEN; +pub const MAX_LEAF_NODE_LEN_PADDED: usize = PAD_LEN(MAX_LEAF_NODE_LEN); +pub const MAX_LEAF_VALUE_LEN: usize = 32; +pub const L32_LEAF_VALUE_LEN: usize = L32(MAX_LEAF_VALUE_LEN); pub const MAX_RECEIPT_LEAF_NODE_LEN: usize = 512; pub mod api; diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 268f1c10e..1ffb27522 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -3,15 +3,18 @@ use super::{ branch::{BranchCircuit, BranchWires}, extension::{ExtensionNodeCircuit, ExtensionNodeWires}, - gadgets::{column_gadget::filter_table_column_identifiers, metadata_gadget::ColumnsMetadata}, + gadgets::{ + column_info::{ExtractedColumnInfo, InputColumnInfo}, + metadata_gadget::TableMetadata, + }, leaf_mapping::{LeafMappingCircuit, LeafMappingWires}, leaf_mapping_of_mappings::{LeafMappingOfMappingsCircuit, LeafMappingOfMappingsWires}, leaf_receipt::{ReceiptLeafCircuit, ReceiptLeafWires}, leaf_single::{LeafSingleCircuit, LeafSingleWires}, public_inputs::PublicInputs, - ColumnId, ColumnInfo, MappingKey, + INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }; -use crate::{api::InputNode, MAX_BRANCH_NODE_LEN, MAX_LEAF_NODE_LEN, MAX_RECEIPT_LEAF_NODE_LEN}; +use crate::{api::InputNode, MAX_BRANCH_NODE_LEN}; use anyhow::{bail, ensure, Result}; use log::debug; use mp2_common::{ @@ -24,11 +27,7 @@ use mp2_common::{ C, D, F, }; use paste::paste; -use plonky2::{ - field::types::{Field, PrimeField64}, - hash::hash_types::HashOut, - plonk::config::Hasher, -}; +use plonky2::{field::types::PrimeField64, hash::hash_types::HashOut, plonk::config::Hasher}; #[cfg(test)] use recursion_framework::framework_testing::{ new_universal_circuit_builder_for_testing, TestingRecursiveCircuits, @@ -47,36 +46,36 @@ const NUM_IO: usize = PublicInputs::::TOTAL_LEN; /// CircuitInput is a wrapper around the different specialized circuits that can /// be used to prove a MPT node recursively. #[derive(Serialize, Deserialize)] -pub enum CircuitInput< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, -> where +pub enum CircuitInput +where [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, { - LeafSingle(LeafSingleCircuit), - LeafMapping(LeafMappingCircuit), - LeafMappingOfMappings(LeafMappingOfMappingsCircuit), - LeafReceipt(ReceiptLeafCircuit), + LeafSingle(LeafSingleCircuit), + LeafMapping(LeafMappingCircuit), + LeafMappingOfMappings(LeafMappingOfMappingsCircuit), + LeafReceipt(ReceiptLeafCircuit), Extension(ExtensionInput), Branch(BranchInput), } -impl - CircuitInput +impl CircuitInput where [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, { /// Create a circuit input for proving a leaf MPT node of single variable. pub fn new_single_variable_leaf( node: Vec, slot: u8, evm_word: u32, - table_info: Vec, + table_info: Vec, ) -> Self { - let extracted_column_identifiers = - filter_table_column_identifiers(&table_info, slot, evm_word); - let metadata = ColumnsMetadata::new(table_info, &extracted_column_identifiers, evm_word); + let metadata = TableMetadata::::new(&[], &table_info); let slot = SimpleSlot::new(slot); @@ -84,6 +83,7 @@ where node, slot, metadata, + offset: evm_word, }) } @@ -94,20 +94,19 @@ where mapping_key: Vec, key_id: u64, evm_word: u32, - table_info: Vec, + table_info: Vec, ) -> Self { - let extracted_column_identifiers = - filter_table_column_identifiers(&table_info, slot, evm_word); - let metadata = ColumnsMetadata::new(table_info, &extracted_column_identifiers, evm_word); + let input_column = InputColumnInfo::new(&[slot], key_id, KEY_ID_PREFIX, 32); + + let metadata = TableMetadata::::new(&[input_column], &table_info); let slot = MappingSlot::new(slot, mapping_key); - let key_id = F::from_canonical_u64(key_id); CircuitInput::LeafMapping(LeafMappingCircuit { node, slot, - key_id, metadata, + offset: evm_word, }) } @@ -119,23 +118,26 @@ where outer_key_data: (MappingKey, ColumnId), inner_key_data: (MappingKey, ColumnId), evm_word: u32, - table_info: Vec, + table_info: Vec, ) -> Self { - let extracted_column_identifiers = - filter_table_column_identifiers(&table_info, slot, evm_word); - let metadata = ColumnsMetadata::new(table_info, &extracted_column_identifiers, evm_word); + let outer_input_column = + InputColumnInfo::new(&[slot], outer_key_id, OUTER_KEY_ID_PREFIX, 32); + let inner_input_column = + InputColumnInfo::new(&[slot], inner_key_id, INNER_KEY_ID_PREFIX, 32); + + let metadata = TableMetadata::::new( + &[outer_input_column, inner_input_column], + &table_info, + ); - let slot = MappingSlot::new(slot, outer_key_data.0); - let [outer_key_id, inner_key_id] = - [outer_key_data.1, inner_key_data.1].map(F::from_canonical_u64); + let slot = MappingSlot::new(slot, outer_key); CircuitInput::LeafMappingOfMappings(LeafMappingOfMappingsCircuit { node, slot, - inner_key: inner_key_data.0, - outer_key_id, - inner_key_id, + inner_key, metadata, + evm_word: evm_word as u8, }) } @@ -143,9 +145,12 @@ where pub fn new_receipt_leaf( info: &ReceiptProofInfo, query: &ReceiptQuery, - ) -> Self { + ) -> Self + where + [(); 7 - 2 - NO_TOPICS - MAX_DATA]:, + { CircuitInput::LeafReceipt( - ReceiptLeafCircuit::::new::(info, query) + ReceiptLeafCircuit::::new::(info, query) .expect("Could not construct Receipt Leaf Circuit"), ) } @@ -172,35 +177,18 @@ where /// Most notably, it holds them in a way to use the recursion framework allowing /// us to specialize circuits according to the situation. #[derive(Eq, PartialEq, Serialize, Deserialize)] -pub struct PublicParameters< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, -> where +pub struct PublicParameters +where [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, { - leaf_single: CircuitWithUniversalVerifier< - F, - C, - D, - 0, - LeafSingleWires, - >, - leaf_mapping: CircuitWithUniversalVerifier< - F, - C, - D, - 0, - LeafMappingWires, - >, - leaf_mapping_of_mappings: CircuitWithUniversalVerifier< - F, - C, - D, - 0, - LeafMappingOfMappingsWires, - >, - leaf_receipt: CircuitWithUniversalVerifier>, + leaf_single: CircuitWithUniversalVerifier>, + leaf_mapping: CircuitWithUniversalVerifier>, + leaf_mapping_of_mappings: + CircuitWithUniversalVerifier>, + leaf_receipt: CircuitWithUniversalVerifier>, extension: CircuitWithUniversalVerifier, #[cfg(not(test))] branches: BranchCircuits, @@ -214,13 +202,13 @@ pub struct PublicParameters< /// Public API employed to build the MPT circuits, which are returned in /// serialized form. -pub fn build_circuits_params< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, ->() -> PublicParameters +pub fn build_circuits_params( +) -> PublicParameters where [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, { PublicParameters::build() } @@ -228,16 +216,15 @@ where /// Public API employed to generate a proof for the circuit specified by /// `CircuitInput`, employing the `circuit_params` generated with the /// `build_circuits_params` API. -pub fn generate_proof< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, ->( - circuit_params: &PublicParameters, - circuit_type: CircuitInput, +pub fn generate_proof( + circuit_params: &PublicParameters, + circuit_type: CircuitInput, ) -> Result> where [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, { circuit_params.generate_proof(circuit_type)?.serialize() } @@ -393,11 +380,13 @@ impl_branch_circuits!(TestBranchCircuits, 1, 4, 9); /// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping + 1 leaf mapping of mappings + 1 leaf receipt const MAPPING_CIRCUIT_SET_SIZE: usize = 8; -impl - PublicParameters +impl PublicParameters where [(); PAD_LEN(NODE_LEN)]:, [(); >::HASH_SIZE]:, + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, { /// Generates the circuit parameters for the MPT circuits. fn build() -> Self { @@ -414,24 +403,17 @@ where ); debug!("Building leaf single circuit"); - let leaf_single = circuit_builder - .build_circuit::>(()); + let leaf_single = circuit_builder.build_circuit::>(()); debug!("Building leaf mapping circuit"); - let leaf_mapping = circuit_builder.build_circuit::>(()); + let leaf_mapping = circuit_builder.build_circuit::>(()); debug!("Building leaf mapping of mappings circuit"); let leaf_mapping_of_mappings = - circuit_builder.build_circuit:: - >(()); + circuit_builder.build_circuit::>(()); debug!("Building leaf receipt circuit"); - let leaf_receipt = circuit_builder.build_circuit::>(()); + let leaf_receipt = circuit_builder.build_circuit::>(()); debug!("Building extension circuit"); let extension = circuit_builder.build_circuit::(()); @@ -468,7 +450,7 @@ where fn generate_proof( &self, - circuit_type: CircuitInput, + circuit_type: CircuitInput, ) -> Result { let set = &self.get_circuit_set(); match circuit_type { @@ -520,779 +502,806 @@ where } } -#[cfg(test)] -mod tests { - use super::{ - super::{public_inputs, StorageSlotInfo}, - *, - }; - use crate::{ - tests::{TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM}, - values_extraction::{ - compute_leaf_mapping_metadata_digest, compute_leaf_mapping_of_mappings_metadata_digest, - compute_leaf_mapping_of_mappings_values_digest, compute_leaf_mapping_values_digest, - compute_leaf_single_metadata_digest, compute_leaf_single_values_digest, - identifier_raw_extra, - }, - MAX_LEAF_NODE_LEN, - }; - use alloy::primitives::Address; - use eth_trie::{EthTrie, MemoryDB, Trie}; - use itertools::Itertools; - use log::info; - use mp2_common::{ - eth::{StorageSlot, StorageSlotNode}, - group_hashing::weierstrass_to_point, - mpt_sequential::utils::bytes_to_nibbles, - types::MAPPING_LEAF_VALUE_LEN, - }; - use mp2_test::{ - mpt_sequential::{generate_random_storage_mpt, generate_receipt_test_info}, - utils::random_vector, - }; - use plonky2::field::types::Field; - use plonky2_ecgfp5::curve::curve::Point; - use rand::{thread_rng, Rng}; - use std::{str::FromStr, sync::Arc}; - - type CircuitInput = - super::CircuitInput; - type PublicParameters = - super::PublicParameters; - - #[derive(Debug)] - struct TestEthTrie { - trie: EthTrie, - mpt_keys: Vec>, - } - - #[test] - fn test_values_extraction_api_single_variable() { - const TEST_SLOTS: [u8; 2] = [5, 10]; - - let _ = env_logger::try_init(); - - let storage_slot1 = StorageSlot::Simple(TEST_SLOTS[0] as usize); - let storage_slot2 = StorageSlot::Simple(TEST_SLOTS[1] as usize); - - let table_info = TEST_SLOTS - .into_iter() - .map(|slot| { - let mut col_info = ColumnInfo::sample(); - col_info.slot = F::from_canonical_u8(slot); - col_info.evm_word = F::ZERO; - - col_info - }) - .collect_vec(); - - let test_slots = [ - StorageSlotInfo::new(storage_slot1, table_info.clone()), - StorageSlotInfo::new(storage_slot2, table_info), - ]; - - test_api(test_slots); - } - - #[test] - fn test_values_extraction_api_single_struct() { - const TEST_SLOT: u8 = 2; - const TEST_EVM_WORDS: [u32; 2] = [10, 20]; - - let _ = env_logger::try_init(); - - let parent_slot = StorageSlot::Simple(TEST_SLOT as usize); - let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( - parent_slot.clone(), - TEST_EVM_WORDS[0], - )); - let storage_slot2 = - StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); - - let table_info = TEST_EVM_WORDS - .into_iter() - .map(|evm_word| { - let mut col_info = ColumnInfo::sample(); - col_info.slot = F::from_canonical_u8(TEST_SLOT); - col_info.evm_word = F::from_canonical_u32(evm_word); - - col_info - }) - .collect_vec(); - - let test_slots = [ - StorageSlotInfo::new(storage_slot1, table_info.clone()), - StorageSlotInfo::new(storage_slot2, table_info), - ]; - - test_api(test_slots); - } - - #[test] - fn test_values_extraction_api_mapping_variable() { - const TEST_SLOT: u8 = 2; - - let _ = env_logger::try_init(); - - let mapping_key1 = vec![10]; - let mapping_key2 = vec![20]; - let storage_slot1 = StorageSlot::Mapping(mapping_key1, TEST_SLOT as usize); - let storage_slot2 = StorageSlot::Mapping(mapping_key2, TEST_SLOT as usize); - - // The first and second column infos are same (only for testing). - let table_info = [0; 2] - .into_iter() - .map(|_| { - let mut col_info = ColumnInfo::sample(); - col_info.slot = F::from_canonical_u8(TEST_SLOT); - col_info.evm_word = F::ZERO; - - col_info - }) - .collect_vec(); - - let test_slots = [ - StorageSlotInfo::new(storage_slot1, table_info.clone()), - StorageSlotInfo::new(storage_slot2, table_info), - ]; - - test_api(test_slots); - } - - #[test] - fn test_values_extraction_api_mapping_struct() { - const TEST_SLOT: u8 = 2; - const TEST_EVM_WORDS: [u32; 2] = [10, 20]; - - let _ = env_logger::try_init(); - - let parent_slot = StorageSlot::Mapping(vec![10, 20], TEST_SLOT as usize); - let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( - parent_slot.clone(), - TEST_EVM_WORDS[0], - )); - let storage_slot2 = - StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); - - let table_info = TEST_EVM_WORDS - .into_iter() - .map(|evm_word| { - let mut col_info = ColumnInfo::sample(); - col_info.slot = F::from_canonical_u8(TEST_SLOT); - col_info.evm_word = F::from_canonical_u32(evm_word); - - col_info - }) - .collect_vec(); - - let test_slots = [ - StorageSlotInfo::new(storage_slot1, table_info.clone()), - StorageSlotInfo::new(storage_slot2, table_info), - ]; - - test_api(test_slots); - } - - #[test] - fn test_values_extraction_api_mapping_of_mappings() { - const TEST_SLOT: u8 = 2; - const TEST_EVM_WORDS: [u32; 2] = [10, 20]; - - let _ = env_logger::try_init(); - - let grand_slot = StorageSlot::Mapping(vec![10, 20], TEST_SLOT as usize); - let parent_slot = - StorageSlot::Node(StorageSlotNode::new_mapping(grand_slot, vec![30, 40]).unwrap()); - let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( - parent_slot.clone(), - TEST_EVM_WORDS[0], - )); - let storage_slot2 = - StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); - - let table_info = TEST_EVM_WORDS - .into_iter() - .map(|evm_word| { - let mut col_info = ColumnInfo::sample(); - col_info.slot = F::from_canonical_u8(TEST_SLOT); - col_info.evm_word = F::from_canonical_u32(evm_word); - - col_info - }) - .collect_vec(); - - let test_slots = [ - StorageSlotInfo::new(storage_slot1, table_info.clone()), - StorageSlotInfo::new(storage_slot2, table_info), - ]; - - test_api(test_slots); - } - - #[test] - fn test_values_extraction_api_branch_with_multiple_children() { - const TEST_SLOT: u8 = 2; - const NUM_CHILDREN: usize = 6; - - let _ = env_logger::try_init(); - - let storage_slot = StorageSlot::Simple(TEST_SLOT as usize); - let table_info = { - let mut col_info = ColumnInfo::sample(); - col_info.slot = F::from_canonical_u8(TEST_SLOT); - col_info.evm_word = F::ZERO; - - vec![col_info] - }; - let test_slot = StorageSlotInfo::new(storage_slot, table_info); - - test_branch_with_multiple_children(NUM_CHILDREN, test_slot); - } - - #[test] - fn test_values_extraction_api_serialization() { - const TEST_SLOT: u8 = 10; - const TEST_EVM_WORD: u32 = 5; - const TEST_OUTER_KEY: [u8; 2] = [10, 20]; - const TEST_INNER_KEY: [u8; 3] = [30, 40, 50]; - - let _ = env_logger::try_init(); - - let rng = &mut thread_rng(); - - // Test serialization for public parameters. - let params = PublicParameters::build(); - let encoded = bincode::serialize(¶ms).unwrap(); - let decoded_params: PublicParameters = bincode::deserialize(&encoded).unwrap(); - assert!(decoded_params == params); - - let test_circuit_input = |input: CircuitInput| { - // Test circuit input serialization. - let encoded_input = bincode::serialize(&input).unwrap(); - let decoded_input: CircuitInput = bincode::deserialize(&encoded_input).unwrap(); - - // Test proof serialization. - let proof = params.generate_proof(decoded_input).unwrap(); - let encoded_proof = bincode::serialize(&proof).unwrap(); - let decoded_proof: ProofWithVK = bincode::deserialize(&encoded_proof).unwrap(); - assert_eq!(proof, decoded_proof); - - encoded_proof - }; - - // Construct the table info for testing. - let table_info = { - let mut col_info = ColumnInfo::sample(); - col_info.slot = F::from_canonical_u8(TEST_SLOT); - col_info.evm_word = F::from_canonical_u32(TEST_EVM_WORD); - - vec![col_info] - }; - - // Test for single variable leaf. - let parent_slot = StorageSlot::Simple(TEST_SLOT as usize); - let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct( - parent_slot.clone(), - TEST_EVM_WORD, - )); - let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); - let mut test_trie = generate_test_trie(1, &test_slot); - let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); - test_circuit_input(CircuitInput::new_single_variable_leaf( - proof.last().unwrap().to_vec(), - TEST_SLOT, - TEST_EVM_WORD, - table_info.clone(), - )); - - // Test for mapping variable leaf. - let parent_slot = StorageSlot::Mapping(TEST_OUTER_KEY.to_vec(), TEST_SLOT as usize); - let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct( - parent_slot.clone(), - TEST_EVM_WORD, - )); - let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); - let mut test_trie = generate_test_trie(1, &test_slot); - let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); - let key_id = rng.gen(); - test_circuit_input(CircuitInput::new_mapping_variable_leaf( - proof.last().unwrap().to_vec(), - TEST_SLOT, - TEST_OUTER_KEY.to_vec(), - key_id, - TEST_EVM_WORD, - table_info.clone(), - )); - - // Test for mapping of mappings leaf. - let grand_slot = StorageSlot::Mapping(TEST_OUTER_KEY.to_vec(), TEST_SLOT as usize); - let parent_slot = StorageSlot::Node( - StorageSlotNode::new_mapping(grand_slot, TEST_INNER_KEY.to_vec()).unwrap(), - ); - let storage_slot = - StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORD)); - let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); - let mut test_trie = generate_test_trie(2, &test_slot); - let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); - let outer_key_id = rng.gen(); - let inner_key_id = rng.gen(); - let encoded = test_circuit_input(CircuitInput::new_mapping_of_mappings_leaf( - proof.last().unwrap().to_vec(), - TEST_SLOT, - (TEST_OUTER_KEY.to_vec(), outer_key_id), - (TEST_INNER_KEY.to_vec(), inner_key_id), - TEST_EVM_WORD, - table_info, - )); - - // Test for branch. - let branch_node = proof[proof.len() - 2].to_vec(); - test_circuit_input(CircuitInput::Branch(BranchInput { - input: InputNode { - node: branch_node.clone(), - }, - serialized_child_proofs: vec![encoded], - })); - } - - fn test_api(test_slots: [StorageSlotInfo; 2]) { - info!("Generating MPT proofs"); - let memdb = Arc::new(MemoryDB::new(true)); - let mut trie = EthTrie::new(memdb.clone()); - let mpt_keys = test_slots - .iter() - .map(|test_slot| { - let mpt_key = test_slot.slot.mpt_key(); - let value = random_vector(MAPPING_LEAF_VALUE_LEN); - trie.insert(&mpt_key, &rlp::encode(&value)).unwrap(); - mpt_key - }) - .collect_vec(); - trie.root_hash().unwrap(); - let mpt_proofs = mpt_keys - .into_iter() - .map(|key| trie.get_proof(&key).unwrap()) - .collect_vec(); - // Get the branch node. - let node_len = mpt_proofs[0].len(); - // Ensure both are located in the same branch. - assert_eq!(node_len, mpt_proofs[1].len()); - let branch_node = mpt_proofs[0][node_len - 2].clone(); - assert_eq!(branch_node, mpt_proofs[1][node_len - 2]); - - info!("Generating parameters"); - let params = build_circuits_params(); - - let leaf_proofs = test_slots - .into_iter() - .zip_eq(mpt_proofs) - .enumerate() - .map(|(i, (test_slot, mut leaf_proof))| { - info!("Proving leaf {i}"); - prove_leaf(¶ms, leaf_proof.pop().unwrap(), test_slot) - }) - .collect(); - - info!("Proving branch"); - let _branch_proof = prove_branch(¶ms, branch_node, leaf_proofs); - } - - /// Generate a branch proof. - fn prove_branch( - params: &PublicParameters, - node: Vec, - leaf_proofs: Vec>, - ) -> Vec { - let input = CircuitInput::new_branch(node, leaf_proofs); - generate_proof(params, input).unwrap() - } - #[test] - fn test_receipt_api() { - let receipt_proof_infos = generate_receipt_test_info::<1, 0>(); - let receipt_proofs = receipt_proof_infos.proofs(); - let query = receipt_proof_infos.query(); - // We check that we have enough receipts and then take the second and third info - // (the MPT proof for the first node is different). - // Then check that the node above both is a branch. - assert!(receipt_proofs.len() > 3); - let second_info = &receipt_proofs[1]; - let third_info = &receipt_proofs[2]; - - let proof_length_1 = second_info.mpt_proof.len(); - let proof_length_2 = third_info.mpt_proof.len(); - - let list_one = rlp::decode_list::>(&second_info.mpt_proof[proof_length_1 - 2]); - let list_two = rlp::decode_list::>(&third_info.mpt_proof[proof_length_2 - 2]); - - assert!(list_one == list_two); - assert!(list_one.len() == 17); - - println!("Generating params..."); - let params = build_circuits_params(); - - println!("Proving leaf 1..."); - let leaf_input_1 = CircuitInput::new_receipt_leaf(second_info, query); - let now = std::time::Instant::now(); - let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); - { - let lp = ProofWithVK::deserialize(&leaf_proof1).unwrap(); - let pub1 = PublicInputs::new(&lp.proof.public_inputs); - let (_, ptr) = pub1.mpt_key_info(); - println!("pointer: {}", ptr); - } - println!( - "Proof for leaf 1 generated in {} ms", - now.elapsed().as_millis() - ); - - println!("Proving leaf 2..."); - let leaf_input_2 = CircuitInput::new_receipt_leaf(third_info, query); - let now = std::time::Instant::now(); - let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); - println!( - "Proof for leaf 2 generated in {} ms", - now.elapsed().as_millis() - ); - - // The branch case for receipts is identical to that of a mapping so we use the same api. - println!("Proving branch..."); - let branch_input = CircuitInput::new_branch( - second_info.mpt_proof[proof_length_1 - 2].clone(), - vec![leaf_proof1, leaf_proof2], - ); - - let now = std::time::Instant::now(); - generate_proof(¶ms, branch_input).unwrap(); - println!( - "Proof for branch node generated in {} ms", - now.elapsed().as_millis() - ); - } - - /// Generate a leaf proof. - fn prove_leaf(params: &PublicParameters, node: Vec, test_slot: StorageSlotInfo) -> Vec { - // RLP(RLP(compact(partial_key_in_nibble)), RLP(value)) - let leaf_tuple: Vec> = rlp::decode_list(&node); - assert_eq!(leaf_tuple.len(), 2); - let value = leaf_tuple[1][1..].to_vec().try_into().unwrap(); - - let evm_word = test_slot.evm_word(); - let table_info = test_slot.table_info(); - let metadata = test_slot.metadata::(); - let extracted_column_identifiers = metadata.extracted_column_identifiers(); - - // Build the identifier extra data, it's used to compute the key IDs. - const TEST_CONTRACT_ADDRESS: &str = "0x105dD0eF26b92a3698FD5AaaF688577B9Cafd970"; - const TEST_CHAIN_ID: u64 = 1000; - let id_extra = identifier_raw_extra( - &Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(), - TEST_CHAIN_ID, - vec![], - ); - - let (expected_metadata_digest, expected_values_digest, circuit_input) = match &test_slot - .slot - { - // Simple variable slot - StorageSlot::Simple(slot) => { - let metadata_digest = compute_leaf_single_metadata_digest::< - TEST_MAX_COLUMNS, - TEST_MAX_FIELD_PER_EVM, - >(table_info.to_vec()); - - let values_digest = compute_leaf_single_values_digest::( - table_info.to_vec(), - &extracted_column_identifiers, - value, - ); - - let circuit_input = CircuitInput::new_single_variable_leaf( - node, - *slot as u8, - evm_word, - table_info.to_vec(), - ); - - (metadata_digest, values_digest, circuit_input) - } - // Mapping variable - StorageSlot::Mapping(mapping_key, slot) => { - let outer_key_id = test_slot.outer_key_id_raw(id_extra).unwrap(); - let metadata_digest = compute_leaf_mapping_metadata_digest::< - TEST_MAX_COLUMNS, - TEST_MAX_FIELD_PER_EVM, - >( - table_info.to_vec(), *slot as u8, outer_key_id - ); - - let values_digest = compute_leaf_mapping_values_digest::( - table_info.to_vec(), - &extracted_column_identifiers, - value, - mapping_key.clone(), - evm_word, - outer_key_id, - ); - - let circuit_input = CircuitInput::new_mapping_variable_leaf( - node, - *slot as u8, - mapping_key.clone(), - outer_key_id, - evm_word, - table_info.to_vec(), - ); - - (metadata_digest, values_digest, circuit_input) - } - StorageSlot::Node(StorageSlotNode::Struct(parent, _)) => match *parent.clone() { - // Simple Struct - StorageSlot::Simple(slot) => { - let metadata_digest = compute_leaf_single_metadata_digest::< - TEST_MAX_COLUMNS, - TEST_MAX_FIELD_PER_EVM, - >(table_info.to_vec()); - - let values_digest = compute_leaf_single_values_digest::( - table_info.to_vec(), - &extracted_column_identifiers, - value, - ); - - let circuit_input = CircuitInput::new_single_variable_leaf( - node, - slot as u8, - evm_word, - table_info.to_vec(), - ); - - (metadata_digest, values_digest, circuit_input) - } - // Mapping Struct - StorageSlot::Mapping(mapping_key, slot) => { - let outer_key_id = test_slot.outer_key_id_raw(id_extra).unwrap(); - let metadata_digest = - compute_leaf_mapping_metadata_digest::< - TEST_MAX_COLUMNS, - TEST_MAX_FIELD_PER_EVM, - >(table_info.to_vec(), slot as u8, outer_key_id); - - let values_digest = compute_leaf_mapping_values_digest::( - table_info.to_vec(), - &extracted_column_identifiers, - value, - mapping_key.clone(), - evm_word, - outer_key_id, - ); - - let circuit_input = CircuitInput::new_mapping_variable_leaf( - node, - slot as u8, - mapping_key, - outer_key_id, - evm_word, - table_info.to_vec(), - ); - - (metadata_digest, values_digest, circuit_input) - } - // Mapping of mappings Struct - StorageSlot::Node(StorageSlotNode::Mapping(grand, inner_mapping_key)) => { - match *grand { - StorageSlot::Mapping(outer_mapping_key, slot) => { - let outer_key_id = - test_slot.outer_key_id_raw(id_extra.clone()).unwrap(); - let inner_key_id = test_slot.inner_key_id_raw(id_extra).unwrap(); - let metadata_digest = - compute_leaf_mapping_of_mappings_metadata_digest::< - TEST_MAX_COLUMNS, - TEST_MAX_FIELD_PER_EVM, - >( - table_info.to_vec(), slot as u8, outer_key_id, inner_key_id - ); - - let values_digest = compute_leaf_mapping_of_mappings_values_digest::< - TEST_MAX_FIELD_PER_EVM, - >( - table_info.to_vec(), - &extracted_column_identifiers, - value, - evm_word, - (outer_mapping_key.clone(), outer_key_id), - (inner_mapping_key.clone(), inner_key_id), - ); - - let circuit_input = CircuitInput::new_mapping_of_mappings_leaf( - node, - slot as u8, - (outer_mapping_key, outer_key_id), - (inner_mapping_key, inner_key_id), - evm_word, - table_info.to_vec(), - ); - - (metadata_digest, values_digest, circuit_input) - } - _ => unreachable!(), - } - } - _ => unreachable!(), - }, - _ => unreachable!(), - }; - - let proof = generate_proof(params, circuit_input).unwrap(); - - // Check the metadata digest of public inputs. - let decoded_proof = ProofWithVK::deserialize(&proof).unwrap(); - let pi = PublicInputs::new(&decoded_proof.proof.public_inputs); - assert_eq!( - pi.metadata_digest(), - expected_metadata_digest.to_weierstrass() - ); - assert_eq!(pi.values_digest(), expected_values_digest.to_weierstrass()); - - proof - } - - /// Generate a MPT trie with sepcified number of children. - fn generate_test_trie(num_children: usize, storage_slot: &StorageSlotInfo) -> TestEthTrie { - let (mut trie, _) = generate_random_storage_mpt::<3, 32>(); - - let mut mpt_key = storage_slot.slot.mpt_key_vec(); - let mpt_len = mpt_key.len(); - let last_byte = mpt_key[mpt_len - 1]; - let first_nibble = last_byte & 0xF0; - let second_nibble = last_byte & 0x0F; - - // Generate the test MPT keys. - let mut mpt_keys = Vec::new(); - for i in 0..num_children { - // Only change the last nibble. - mpt_key[mpt_len - 1] = first_nibble + ((second_nibble + i as u8) & 0x0F); - mpt_keys.push(mpt_key.clone()); - } - - // Add the MPT keys to the trie. - let value = rlp::encode(&random_vector(32)).to_vec(); - mpt_keys - .iter() - .for_each(|key| trie.insert(key, &value).unwrap()); - trie.root_hash().unwrap(); - - TestEthTrie { trie, mpt_keys } - } - - /// Test the proof generation of one branch with the specified number of children. - fn test_branch_with_multiple_children(num_children: usize, test_slot: StorageSlotInfo) { - info!("Generating test trie"); - let mut test_trie = generate_test_trie(num_children, &test_slot); - - let mpt_key1 = test_trie.mpt_keys[0].as_slice(); - let mpt_key2 = test_trie.mpt_keys[1].as_slice(); - let proof1 = test_trie.trie.get_proof(mpt_key1).unwrap(); - let proof2 = test_trie.trie.get_proof(mpt_key2).unwrap(); - let node_len = proof1.len(); - // Get the branch node. - let branch_node = proof1[node_len - 2].clone(); - // Ensure both are located in the same branch. - assert_eq!(node_len, proof2.len()); - assert_eq!(branch_node, proof2[node_len - 2]); - - info!("Generating parameters"); - let params = build_circuits_params(); - - // Generate the branch proof with one leaf. - println!("Generating leaf proof"); - let leaf_proof_buf1 = prove_leaf(¶ms, proof1[node_len - 1].clone(), test_slot); - let leaf_proof1 = ProofWithVK::deserialize(&leaf_proof_buf1).unwrap(); - let pub1 = leaf_proof1.proof.public_inputs[..NUM_IO].to_vec(); - let pi1 = PublicInputs::new(&pub1); - assert_eq!(pi1.proof_inputs.len(), NUM_IO); - let (_, comp_ptr) = pi1.mpt_key_info(); - assert_eq!(comp_ptr, F::from_canonical_usize(63)); - println!("Generating branch proof with one leaf"); - let branch_proof = - prove_branch(¶ms, branch_node.clone(), vec![leaf_proof_buf1.clone()]); - let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); - let exp_vk = params.branches.b1.get_verifier_data(); - assert_eq!(branch_proof.verifier_data(), exp_vk); - - // Generate a fake proof for testing branch circuit. - let gen_fake_proof = |mpt_key| { - let mut pub2 = pub1.clone(); - assert_eq!(pub2.len(), NUM_IO); - pub2[public_inputs::K_RANGE].copy_from_slice( - &bytes_to_nibbles(mpt_key) - .into_iter() - .map(F::from_canonical_u8) - .collect_vec(), - ); - assert_eq!(pub2.len(), pub1.len()); - - let pi2 = PublicInputs::new(&pub2); - { - let (k1, p1) = pi1.mpt_key_info(); - let (k2, p2) = pi2.mpt_key_info(); - let (pt1, pt2) = ( - p1.to_canonical_u64() as usize, - p2.to_canonical_u64() as usize, - ); - assert!(pt1 < k1.len() && pt2 < k2.len()); - assert!(p1 == p2); - assert!(k1[..pt1] == k2[..pt2]); - } - let fake_proof = params - .set - .generate_input_proofs([pub2.clone().try_into().unwrap()]) - .unwrap(); - let vk = params.set.verifier_data_for_input_proofs::<1>()[0].clone(); - ProofWithVK::from((fake_proof[0].clone(), vk)) - .serialize() - .unwrap() - }; - - // Check the public input of branch proof. - let check_branch_public_inputs = |num_children, branch_proof: &ProofWithVK| { - let [leaf_pi, branch_pi] = [&leaf_proof1, branch_proof] - .map(|proof| PublicInputs::new(&proof.proof().public_inputs[..NUM_IO])); - - let leaf_metadata_digest = leaf_pi.metadata_digest(); - let leaf_values_digest = weierstrass_to_point(&leaf_pi.values_digest()); - let branch_values_digest = - (0..num_children).fold(Point::NEUTRAL, |acc, _| acc + leaf_values_digest); - assert_eq!(branch_pi.metadata_digest(), leaf_metadata_digest); - assert_eq!( - branch_pi.values_digest(), - branch_values_digest.to_weierstrass() - ); - assert_eq!(branch_pi.n(), F::from_canonical_usize(num_children)); - }; - - info!("Generating branch with two leaves"); - let leaf_proof_buf2 = gen_fake_proof(mpt_key2); - let branch_proof = prove_branch( - ¶ms, - branch_node.clone(), - vec![leaf_proof_buf1.clone(), leaf_proof_buf2.clone()], - ); - let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); - let exp_vk = params.branches.b4.get_verifier_data().clone(); - assert_eq!(branch_proof.verifier_data(), &exp_vk); - check_branch_public_inputs(2, &branch_proof); - - // Generate `num_children - 2`` fake proofs. - let mut leaf_proofs = vec![leaf_proof_buf1, leaf_proof_buf2]; - for i in 2..num_children { - let leaf_proof = gen_fake_proof(test_trie.mpt_keys[i].as_slice()); - leaf_proofs.push(leaf_proof); - } - info!("Generating branch proof with {num_children} leaves"); - let branch_proof = prove_branch(¶ms, branch_node, leaf_proofs); - let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); - let exp_vk = params.branches.b9.get_verifier_data().clone(); - assert_eq!(branch_proof.verifier_data(), &exp_vk); - check_branch_public_inputs(num_children, &branch_proof); - } -} +// #[cfg(test)] +// mod tests { +// use super::{ +// super::{public_inputs, StorageSlotInfo}, +// *, +// }; +// use crate::{ +// tests::{TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM}, +// values_extraction::{ +// compute_leaf_mapping_metadata_digest, compute_leaf_mapping_of_mappings_metadata_digest, +// compute_leaf_mapping_of_mappings_values_digest, compute_leaf_mapping_values_digest, +// compute_leaf_single_metadata_digest, compute_leaf_single_values_digest, +// identifier_raw_extra, +// }, +// MAX_LEAF_NODE_LEN, +// }; +// use alloy::primitives::Address; +// use eth_trie::{EthTrie, MemoryDB, Trie}; +// use itertools::Itertools; +// use log::info; +// use mp2_common::{ +// eth::{StorageSlot, StorageSlotNode}, +// group_hashing::weierstrass_to_point, +// mpt_sequential::utils::bytes_to_nibbles, +// types::MAPPING_LEAF_VALUE_LEN, +// }; +// use mp2_test::{ +// mpt_sequential::{generate_random_storage_mpt, generate_receipt_test_info}, +// utils::random_vector, +// }; +// use plonky2::field::types::Field; +// use plonky2_ecgfp5::curve::curve::Point; +// use rand::{thread_rng, Rng}; +// use std::{str::FromStr, sync::Arc}; + +// type CircuitInput = super::CircuitInput; +// type PublicParameters = super::PublicParameters; + +// #[derive(Debug)] +// struct TestEthTrie { +// trie: EthTrie, +// mpt_keys: Vec>, +// } + +// #[test] +// fn test_values_extraction_api_single_variable() { +// const TEST_SLOTS: [u8; 2] = [5, 10]; + +// let _ = env_logger::try_init(); + +// let storage_slot1 = StorageSlot::Simple(TEST_SLOTS[0] as usize); +// let storage_slot2 = StorageSlot::Simple(TEST_SLOTS[1] as usize); + +// let table_info = TEST_SLOTS +// .into_iter() +// .map(|slot| { +// let mut col_info = ColumnInfo::sample(); +// col_info.slot = F::from_canonical_u8(slot); +// col_info.evm_word = F::ZERO; + +// col_info +// }) +// .collect_vec(); + +// let test_slots = [ +// StorageSlotInfo::new(storage_slot1, table_info.clone()), +// StorageSlotInfo::new(storage_slot2, table_info), +// ]; + +// test_api(test_slots); +// } + +// #[test] +// fn test_values_extraction_api_single_struct() { +// const TEST_SLOT: u8 = 2; +// const TEST_EVM_WORDS: [u32; 2] = [10, 20]; + +// let _ = env_logger::try_init(); + +// let parent_slot = StorageSlot::Simple(TEST_SLOT as usize); +// let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( +// parent_slot.clone(), +// TEST_EVM_WORDS[0], +// )); +// let storage_slot2 = +// StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); + +// let table_info = TEST_EVM_WORDS +// .into_iter() +// .map(|evm_word| { +// let mut col_info = ColumnInfo::sample(); +// col_info.slot = F::from_canonical_u8(TEST_SLOT); +// col_info.evm_word = F::from_canonical_u32(evm_word); + +// col_info +// }) +// .collect_vec(); + +// let test_slots = [ +// StorageSlotInfo::new(storage_slot1, table_info.clone()), +// StorageSlotInfo::new(storage_slot2, table_info), +// ]; + +// test_api(test_slots); +// } + +// #[test] +// fn test_values_extraction_api_mapping_variable() { +// const TEST_SLOT: u8 = 2; + +// let _ = env_logger::try_init(); + +// let mapping_key1 = vec![10]; +// let mapping_key2 = vec![20]; +// let storage_slot1 = StorageSlot::Mapping(mapping_key1, TEST_SLOT as usize); +// let storage_slot2 = StorageSlot::Mapping(mapping_key2, TEST_SLOT as usize); + +// // The first and second column infos are same (only for testing). +// let table_info = [0; 2] +// .into_iter() +// .map(|_| { +// let mut col_info = ColumnInfo::sample(); +// col_info.slot = F::from_canonical_u8(TEST_SLOT); +// col_info.evm_word = F::ZERO; + +// col_info +// }) +// .collect_vec(); + +// let test_slots = [ +// StorageSlotInfo::new(storage_slot1, table_info.clone()), +// StorageSlotInfo::new(storage_slot2, table_info), +// ]; + +// test_api(test_slots); +// } + +// #[test] +// fn test_values_extraction_api_mapping_struct() { +// const TEST_SLOT: u8 = 2; +// const TEST_EVM_WORDS: [u32; 2] = [10, 20]; + +// let _ = env_logger::try_init(); + +// let parent_slot = StorageSlot::Mapping(vec![10, 20], TEST_SLOT as usize); +// let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( +// parent_slot.clone(), +// TEST_EVM_WORDS[0], +// )); +// let storage_slot2 = +// StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); + +// let table_info = TEST_EVM_WORDS +// .into_iter() +// .map(|evm_word| { +// let mut col_info = ColumnInfo::sample(); +// col_info.slot = F::from_canonical_u8(TEST_SLOT); +// col_info.evm_word = F::from_canonical_u32(evm_word); + +// col_info +// }) +// .collect_vec(); + +// let test_slots = [ +// StorageSlotInfo::new(storage_slot1, table_info.clone()), +// StorageSlotInfo::new(storage_slot2, table_info), +// ]; + +// test_api(test_slots); +// } + +// #[test] +// fn test_values_extraction_api_mapping_of_mappings() { +// const TEST_SLOT: u8 = 2; +// const TEST_EVM_WORDS: [u32; 2] = [10, 20]; + +// let _ = env_logger::try_init(); + +// let grand_slot = StorageSlot::Mapping(vec![10, 20], TEST_SLOT as usize); +// let parent_slot = +// StorageSlot::Node(StorageSlotNode::new_mapping(grand_slot, vec![30, 40]).unwrap()); +// let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( +// parent_slot.clone(), +// TEST_EVM_WORDS[0], +// )); +// let storage_slot2 = +// StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); + +// let table_info = TEST_EVM_WORDS +// .into_iter() +// .map(|evm_word| { +// let mut col_info = ColumnInfo::sample(); +// col_info.slot = F::from_canonical_u8(TEST_SLOT); +// col_info.evm_word = F::from_canonical_u32(evm_word); + +// col_info +// }) +// .collect_vec(); + +// let test_slots = [ +// StorageSlotInfo::new(storage_slot1, table_info.clone()), +// StorageSlotInfo::new(storage_slot2, table_info), +// ]; + +// test_api(test_slots); +// } + +// #[test] +// fn test_values_extraction_api_branch_with_multiple_children() { +// const TEST_SLOT: u8 = 2; +// const NUM_CHILDREN: usize = 6; + +// let _ = env_logger::try_init(); + +// let storage_slot = StorageSlot::Simple(TEST_SLOT as usize); +// let table_info = { +// let mut col_info = ColumnInfo::sample(); +// col_info.slot = F::from_canonical_u8(TEST_SLOT); +// col_info.evm_word = F::ZERO; + +// vec![col_info] +// }; +// let test_slot = StorageSlotInfo::new(storage_slot, table_info); + +// test_branch_with_multiple_children(NUM_CHILDREN, test_slot); +// } + +// #[test] +// fn test_values_extraction_api_serialization() { +// const TEST_SLOT: u8 = 10; +// const TEST_EVM_WORD: u32 = 5; +// const TEST_OUTER_KEY: [u8; 2] = [10, 20]; +// const TEST_INNER_KEY: [u8; 3] = [30, 40, 50]; + +// let _ = env_logger::try_init(); + +// let rng = &mut thread_rng(); + +// // Test serialization for public parameters. +// let params = PublicParameters::build(); +// let encoded = bincode::serialize(¶ms).unwrap(); +// let decoded_params: PublicParameters = bincode::deserialize(&encoded).unwrap(); +// assert!(decoded_params == params); + +// let test_circuit_input = |input: CircuitInput| { +// // Test circuit input serialization. +// let encoded_input = bincode::serialize(&input).unwrap(); +// let decoded_input: CircuitInput = bincode::deserialize(&encoded_input).unwrap(); + +// // Test proof serialization. +// let proof = params.generate_proof(decoded_input).unwrap(); +// let encoded_proof = bincode::serialize(&proof).unwrap(); +// let decoded_proof: ProofWithVK = bincode::deserialize(&encoded_proof).unwrap(); +// assert_eq!(proof, decoded_proof); + +// encoded_proof +// }; + +// // Construct the table info for testing. +// let table_info = { +// vec![ExtractedColumnInfo::sample( +// true, +// &[ +// F::ZERO, +// F::ZERO, +// F::ZERO, +// F::ZERO, +// F::ZERO, +// F::ZERO, +// F::ZERO, +// F::from_canonical_u8(TEST_SLOT), +// ], +// F::from_canonical_u32(TEST_EVM_WORD), +// )] +// }; + +// // Test for single variable leaf. +// let parent_slot = StorageSlot::Simple(TEST_SLOT as usize); +// let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct( +// parent_slot.clone(), +// TEST_EVM_WORD, +// )); +// let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); +// let mut test_trie = generate_test_trie(1, &test_slot); +// let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); +// test_circuit_input(CircuitInput::new_single_variable_leaf( +// proof.last().unwrap().to_vec(), +// TEST_SLOT, +// TEST_EVM_WORD, +// table_info.clone(), +// )); + +// // Test for mapping variable leaf. +// let parent_slot = StorageSlot::Mapping(TEST_OUTER_KEY.to_vec(), TEST_SLOT as usize); +// let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct( +// parent_slot.clone(), +// TEST_EVM_WORD, +// )); +// let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); +// let mut test_trie = generate_test_trie(1, &test_slot); +// let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); +// let key_id = rng.gen(); +// test_circuit_input(CircuitInput::new_mapping_variable_leaf( +// proof.last().unwrap().to_vec(), +// TEST_SLOT, +// TEST_OUTER_KEY.to_vec(), +// key_id, +// TEST_EVM_WORD, +// table_info.clone(), +// )); + +// // Test for mapping of mappings leaf. +// let grand_slot = StorageSlot::Mapping(TEST_OUTER_KEY.to_vec(), TEST_SLOT as usize); +// let parent_slot = StorageSlot::Node( +// StorageSlotNode::new_mapping(grand_slot, TEST_INNER_KEY.to_vec()).unwrap(), +// ); +// let storage_slot = +// StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORD)); +// let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); +// let mut test_trie = generate_test_trie(2, &test_slot); +// let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); +// let outer_key_id = rng.gen(); +// let inner_key_id = rng.gen(); +// let encoded = test_circuit_input(CircuitInput::new_mapping_of_mappings_leaf( +// proof.last().unwrap().to_vec(), +// TEST_SLOT, +// TEST_OUTER_KEY.to_vec(), +// TEST_INNER_KEY.to_vec(), +// outer_key_id, +// inner_key_id, +// TEST_EVM_WORD, +// table_info, +// )); + +// // Test for branch. +// let branch_node = proof[proof.len() - 2].to_vec(); +// test_circuit_input(CircuitInput::Branch(BranchInput { +// input: InputNode { +// node: branch_node.clone(), +// }, +// serialized_child_proofs: vec![encoded], +// })); +// } + +// fn test_api(test_slots: [StorageSlotInfo; 2]) { +// info!("Generating MPT proofs"); +// let memdb = Arc::new(MemoryDB::new(true)); +// let mut trie = EthTrie::new(memdb.clone()); +// let mpt_keys = test_slots +// .iter() +// .map(|test_slot| { +// let mpt_key = test_slot.slot.mpt_key(); +// let value = random_vector(MAPPING_LEAF_VALUE_LEN); +// trie.insert(&mpt_key, &rlp::encode(&value)).unwrap(); +// mpt_key +// }) +// .collect_vec(); +// trie.root_hash().unwrap(); +// let mpt_proofs = mpt_keys +// .into_iter() +// .map(|key| trie.get_proof(&key).unwrap()) +// .collect_vec(); +// // Get the branch node. +// let node_len = mpt_proofs[0].len(); +// // Ensure both are located in the same branch. +// assert_eq!(node_len, mpt_proofs[1].len()); +// let branch_node = mpt_proofs[0][node_len - 2].clone(); +// assert_eq!(branch_node, mpt_proofs[1][node_len - 2]); + +// info!("Generating parameters"); +// let params = build_circuits_params(); + +// let leaf_proofs = test_slots +// .into_iter() +// .zip_eq(mpt_proofs) +// .enumerate() +// .map(|(i, (test_slot, mut leaf_proof))| { +// info!("Proving leaf {i}"); +// prove_leaf(¶ms, leaf_proof.pop().unwrap(), test_slot) +// }) +// .collect(); + +// info!("Proving branch"); +// let _branch_proof = prove_branch(¶ms, branch_node, leaf_proofs); +// } + +// /// Generate a branch proof. +// fn prove_branch( +// params: &PublicParameters, +// node: Vec, +// leaf_proofs: Vec>, +// ) -> Vec { +// let input = CircuitInput::new_branch(node, leaf_proofs); +// generate_proof(params, input).unwrap() +// } +// #[test] +// fn test_receipt_api() { +// let receipt_proof_infos = generate_receipt_test_info::<1, 0>(); +// let receipt_proofs = receipt_proof_infos.proofs(); +// let query = receipt_proof_infos.query(); +// // We need two nodes that are children of the same branch so we compare the last but two nodes for each of them until we find a case that works +// let (info_one, info_two) = if let Some((one, two)) = receipt_proofs +// .iter() +// .enumerate() +// .find_map(|(i, current_proof)| { +// let current_node_second_to_last = +// current_proof.mpt_proof[current_proof.mpt_proof.len() - 2].clone(); +// receipt_proofs +// .iter() +// .skip(i + 1) +// .find(|find_info| { +// find_info.mpt_proof[find_info.mpt_proof.len() - 2].clone() +// == current_node_second_to_last +// }) +// .map(|matching| (current_proof, matching)) +// }) { +// (one, two) +// } else { +// panic!("No relevant events with same branch node parent") +// }; + +// let proof_length_1 = info_one.mpt_proof.len(); +// let proof_length_2 = info_two.mpt_proof.len(); + +// let list_one = rlp::decode_list::>(&info_one.mpt_proof[proof_length_1 - 2]); +// let list_two = rlp::decode_list::>(&info_two.mpt_proof[proof_length_2 - 2]); + +// assert_eq!(list_one, list_two); +// assert!(list_one.len() == 17); + +// println!("Generating params..."); +// let params = build_circuits_params(); + +// println!("Proving leaf 1..."); +// let leaf_input_1 = CircuitInput::new_receipt_leaf(info_one, query); +// let now = std::time::Instant::now(); +// let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); +// { +// let lp = ProofWithVK::deserialize(&leaf_proof1).unwrap(); +// let pub1 = PublicInputs::new(&lp.proof.public_inputs); +// let (_, ptr) = pub1.mpt_key_info(); +// println!("pointer: {}", ptr); +// } +// println!( +// "Proof for leaf 1 generated in {} ms", +// now.elapsed().as_millis() +// ); + +// println!("Proving leaf 2..."); +// let leaf_input_2 = CircuitInput::new_receipt_leaf(info_two, query); +// let now = std::time::Instant::now(); +// let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); +// println!( +// "Proof for leaf 2 generated in {} ms", +// now.elapsed().as_millis() +// ); + +// // The branch case for receipts is identical to that of a mapping so we use the same api. +// println!("Proving branch..."); +// let branch_input = CircuitInput::new_branch( +// info_two.mpt_proof[proof_length_1 - 2].clone(), +// vec![leaf_proof1, leaf_proof2], +// ); + +// let now = std::time::Instant::now(); +// generate_proof(¶ms, branch_input).unwrap(); +// println!( +// "Proof for branch node generated in {} ms", +// now.elapsed().as_millis() +// ); +// } + +// /// Generate a leaf proof. +// fn prove_leaf(params: &PublicParameters, node: Vec, test_slot: StorageSlotInfo) -> Vec { +// // RLP(RLP(compact(partial_key_in_nibble)), RLP(value)) +// let leaf_tuple: Vec> = rlp::decode_list(&node); +// assert_eq!(leaf_tuple.len(), 2); +// let value = leaf_tuple[1][1..].to_vec().try_into().unwrap(); + +// let evm_word = test_slot.evm_word(); +// let table_info = test_slot.table_info(); +// let metadata = test_slot.metadata::(); +// let extracted_column_identifiers = metadata.extracted_column_identifiers(); + +// // Build the identifier extra data, it's used to compute the key IDs. +// const TEST_CONTRACT_ADDRESS: &str = "0x105dD0eF26b92a3698FD5AaaF688577B9Cafd970"; +// const TEST_CHAIN_ID: u64 = 1000; +// let id_extra = identifier_raw_extra( +// &Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(), +// TEST_CHAIN_ID, +// vec![], +// ); + +// let (expected_metadata_digest, expected_values_digest, circuit_input) = match &test_slot +// .slot +// { +// // Simple variable slot +// StorageSlot::Simple(slot) => { +// let metadata_digest = compute_leaf_single_metadata_digest::< +// TEST_MAX_COLUMNS, +// TEST_MAX_FIELD_PER_EVM, +// >(table_info.to_vec()); + +// let values_digest = compute_leaf_single_values_digest::( +// table_info.to_vec(), +// &extracted_column_identifiers, +// value, +// ); + +// let circuit_input = CircuitInput::new_single_variable_leaf( +// node, +// *slot as u8, +// evm_word, +// table_info.to_vec(), +// ); + +// (metadata_digest, values_digest, circuit_input) +// } +// // Mapping variable +// StorageSlot::Mapping(mapping_key, slot) => { +// let outer_key_id = test_slot.outer_key_id_raw(id_extra).unwrap(); +// let metadata_digest = compute_leaf_mapping_metadata_digest::< +// TEST_MAX_COLUMNS, +// TEST_MAX_FIELD_PER_EVM, +// >( +// table_info.to_vec(), *slot as u8, outer_key_id +// ); + +// let values_digest = compute_leaf_mapping_values_digest::( +// table_info.to_vec(), +// &extracted_column_identifiers, +// value, +// mapping_key.clone(), +// evm_word, +// outer_key_id, +// ); + +// let circuit_input = CircuitInput::new_mapping_variable_leaf( +// node, +// *slot as u8, +// mapping_key.clone(), +// outer_key_id, +// evm_word, +// table_info.to_vec(), +// ); + +// (metadata_digest, values_digest, circuit_input) +// } +// StorageSlot::Node(StorageSlotNode::Struct(parent, _)) => match *parent.clone() { +// // Simple Struct +// StorageSlot::Simple(slot) => { +// let metadata_digest = compute_leaf_single_metadata_digest::< +// TEST_MAX_COLUMNS, +// TEST_MAX_FIELD_PER_EVM, +// >(table_info.to_vec()); + +// let values_digest = compute_leaf_single_values_digest::( +// table_info.to_vec(), +// &extracted_column_identifiers, +// value, +// ); + +// let circuit_input = CircuitInput::new_single_variable_leaf( +// node, +// slot as u8, +// evm_word, +// table_info.to_vec(), +// ); + +// (metadata_digest, values_digest, circuit_input) +// } +// // Mapping Struct +// StorageSlot::Mapping(mapping_key, slot) => { +// let outer_key_id = test_slot.outer_key_id_raw(id_extra).unwrap(); +// let metadata_digest = +// compute_leaf_mapping_metadata_digest::< +// TEST_MAX_COLUMNS, +// TEST_MAX_FIELD_PER_EVM, +// >(table_info.to_vec(), slot as u8, outer_key_id); + +// let values_digest = compute_leaf_mapping_values_digest::( +// table_info.to_vec(), +// &extracted_column_identifiers, +// value, +// mapping_key.clone(), +// evm_word, +// outer_key_id, +// ); + +// let circuit_input = CircuitInput::new_mapping_variable_leaf( +// node, +// slot as u8, +// mapping_key, +// outer_key_id, +// evm_word, +// table_info.to_vec(), +// ); + +// (metadata_digest, values_digest, circuit_input) +// } +// // Mapping of mappings Struct +// StorageSlot::Node(StorageSlotNode::Mapping(grand, inner_mapping_key)) => { +// match *grand { +// StorageSlot::Mapping(outer_mapping_key, slot) => { +// let outer_key_id = +// test_slot.outer_key_id_raw(id_extra.clone()).unwrap(); +// let inner_key_id = test_slot.inner_key_id_raw(id_extra).unwrap(); +// let metadata_digest = +// compute_leaf_mapping_of_mappings_metadata_digest::< +// TEST_MAX_COLUMNS, +// TEST_MAX_FIELD_PER_EVM, +// >( +// table_info.to_vec(), slot as u8, outer_key_id, inner_key_id +// ); + +// let values_digest = compute_leaf_mapping_of_mappings_values_digest::< +// TEST_MAX_FIELD_PER_EVM, +// >( +// table_info.to_vec(), +// &extracted_column_identifiers, +// value, +// evm_word, +// outer_mapping_key.clone(), +// inner_mapping_key.clone(), +// outer_key_id, +// inner_key_id, +// ); + +// let circuit_input = CircuitInput::new_mapping_of_mappings_leaf( +// node, +// slot as u8, +// outer_mapping_key, +// inner_mapping_key, +// outer_key_id, +// inner_key_id, +// evm_word, +// table_info.to_vec(), +// ); + +// (metadata_digest, values_digest, circuit_input) +// } +// _ => unreachable!(), +// } +// } +// _ => unreachable!(), +// }, +// _ => unreachable!(), +// }; + +// let proof = generate_proof(params, circuit_input).unwrap(); + +// // Check the metadata digest of public inputs. +// let decoded_proof = ProofWithVK::deserialize(&proof).unwrap(); +// let pi = PublicInputs::new(&decoded_proof.proof.public_inputs); +// assert_eq!( +// pi.metadata_digest(), +// expected_metadata_digest.to_weierstrass() +// ); +// assert_eq!(pi.values_digest(), expected_values_digest.to_weierstrass()); + +// proof +// } + +// /// Generate a MPT trie with sepcified number of children. +// fn generate_test_trie(num_children: usize, storage_slot: &StorageSlotInfo) -> TestEthTrie { +// let (mut trie, _) = generate_random_storage_mpt::<3, 32>(); + +// let mut mpt_key = storage_slot.slot.mpt_key_vec(); +// let mpt_len = mpt_key.len(); +// let last_byte = mpt_key[mpt_len - 1]; +// let first_nibble = last_byte & 0xF0; +// let second_nibble = last_byte & 0x0F; + +// // Generate the test MPT keys. +// let mut mpt_keys = Vec::new(); +// for i in 0..num_children { +// // Only change the last nibble. +// mpt_key[mpt_len - 1] = first_nibble + ((second_nibble + i as u8) & 0x0F); +// mpt_keys.push(mpt_key.clone()); +// } + +// // Add the MPT keys to the trie. +// let value = rlp::encode(&random_vector(32)).to_vec(); +// mpt_keys +// .iter() +// .for_each(|key| trie.insert(key, &value).unwrap()); +// trie.root_hash().unwrap(); + +// TestEthTrie { trie, mpt_keys } +// } + +// /// Test the proof generation of one branch with the specified number of children. +// fn test_branch_with_multiple_children(num_children: usize, test_slot: StorageSlotInfo) { +// info!("Generating test trie"); +// let mut test_trie = generate_test_trie(num_children, &test_slot); + +// let mpt_key1 = test_trie.mpt_keys[0].as_slice(); +// let mpt_key2 = test_trie.mpt_keys[1].as_slice(); +// let proof1 = test_trie.trie.get_proof(mpt_key1).unwrap(); +// let proof2 = test_trie.trie.get_proof(mpt_key2).unwrap(); +// let node_len = proof1.len(); +// // Get the branch node. +// let branch_node = proof1[node_len - 2].clone(); +// // Ensure both are located in the same branch. +// assert_eq!(node_len, proof2.len()); +// assert_eq!(branch_node, proof2[node_len - 2]); + +// info!("Generating parameters"); +// let params = build_circuits_params(); + +// // Generate the branch proof with one leaf. +// println!("Generating leaf proof"); +// let leaf_proof_buf1 = prove_leaf(¶ms, proof1[node_len - 1].clone(), test_slot); +// let leaf_proof1 = ProofWithVK::deserialize(&leaf_proof_buf1).unwrap(); +// let pub1 = leaf_proof1.proof.public_inputs[..NUM_IO].to_vec(); +// let pi1 = PublicInputs::new(&pub1); +// assert_eq!(pi1.proof_inputs.len(), NUM_IO); +// let (_, comp_ptr) = pi1.mpt_key_info(); +// assert_eq!(comp_ptr, F::from_canonical_usize(63)); +// println!("Generating branch proof with one leaf"); +// let branch_proof = +// prove_branch(¶ms, branch_node.clone(), vec![leaf_proof_buf1.clone()]); +// let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); +// let exp_vk = params.branches.b1.get_verifier_data(); +// assert_eq!(branch_proof.verifier_data(), exp_vk); + +// // Generate a fake proof for testing branch circuit. +// let gen_fake_proof = |mpt_key| { +// let mut pub2 = pub1.clone(); +// assert_eq!(pub2.len(), NUM_IO); +// pub2[public_inputs::K_RANGE].copy_from_slice( +// &bytes_to_nibbles(mpt_key) +// .into_iter() +// .map(F::from_canonical_u8) +// .collect_vec(), +// ); +// assert_eq!(pub2.len(), pub1.len()); + +// let pi2 = PublicInputs::new(&pub2); +// { +// let (k1, p1) = pi1.mpt_key_info(); +// let (k2, p2) = pi2.mpt_key_info(); +// let (pt1, pt2) = ( +// p1.to_canonical_u64() as usize, +// p2.to_canonical_u64() as usize, +// ); +// assert!(pt1 < k1.len() && pt2 < k2.len()); +// assert!(p1 == p2); +// assert!(k1[..pt1] == k2[..pt2]); +// } +// let fake_proof = params +// .set +// .generate_input_proofs([pub2.clone().try_into().unwrap()]) +// .unwrap(); +// let vk = params.set.verifier_data_for_input_proofs::<1>()[0].clone(); +// ProofWithVK::from((fake_proof[0].clone(), vk)) +// .serialize() +// .unwrap() +// }; + +// // Check the public input of branch proof. +// let check_branch_public_inputs = |num_children, branch_proof: &ProofWithVK| { +// let [leaf_pi, branch_pi] = [&leaf_proof1, branch_proof] +// .map(|proof| PublicInputs::new(&proof.proof().public_inputs[..NUM_IO])); + +// let leaf_metadata_digest = leaf_pi.metadata_digest(); +// let leaf_values_digest = weierstrass_to_point(&leaf_pi.values_digest()); +// let branch_values_digest = +// (0..num_children).fold(Point::NEUTRAL, |acc, _| acc + leaf_values_digest); +// assert_eq!(branch_pi.metadata_digest(), leaf_metadata_digest); +// assert_eq!( +// branch_pi.values_digest(), +// branch_values_digest.to_weierstrass() +// ); +// assert_eq!(branch_pi.n(), F::from_canonical_usize(num_children)); +// }; + +// info!("Generating branch with two leaves"); +// let leaf_proof_buf2 = gen_fake_proof(mpt_key2); +// let branch_proof = prove_branch( +// ¶ms, +// branch_node.clone(), +// vec![leaf_proof_buf1.clone(), leaf_proof_buf2.clone()], +// ); +// let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); +// let exp_vk = params.branches.b4.get_verifier_data().clone(); +// assert_eq!(branch_proof.verifier_data(), &exp_vk); +// check_branch_public_inputs(2, &branch_proof); + +// // Generate `num_children - 2`` fake proofs. +// let mut leaf_proofs = vec![leaf_proof_buf1, leaf_proof_buf2]; +// for i in 2..num_children { +// let leaf_proof = gen_fake_proof(test_trie.mpt_keys[i].as_slice()); +// leaf_proofs.push(leaf_proof); +// } +// info!("Generating branch proof with {num_children} leaves"); +// let branch_proof = prove_branch(¶ms, branch_node, leaf_proofs); +// let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); +// let exp_vk = params.branches.b9.get_verifier_data().clone(); +// assert_eq!(branch_proof.verifier_data(), &exp_vk); +// check_branch_public_inputs(num_children, &branch_proof); +// } +// } diff --git a/mp2-v1/src/values_extraction/gadgets/column_info.rs b/mp2-v1/src/values_extraction/gadgets/column_info.rs index 8ef2c176c..6def2b067 100644 --- a/mp2-v1/src/values_extraction/gadgets/column_info.rs +++ b/mp2-v1/src/values_extraction/gadgets/column_info.rs @@ -1,111 +1,237 @@ //! Column information for values extraction -use crate::api::SlotInput; use itertools::{zip_eq, Itertools}; use mp2_common::{ - group_hashing::map_to_curve_point, + eth::{left_pad, left_pad32}, + group_hashing::{map_to_curve_point, CircuitBuilderGroupHashing}, poseidon::H, types::{CBuilder, MAPPING_LEAF_VALUE_LEN}, - F, + utils::{Endianness, Packer}, + CHasher, F, }; use plonky2::{ field::types::{Field, Sample}, - hash::hash_types::HashOut, + hash::hash_types::{HashOut, HashOutTarget}, iop::{target::Target, witness::WitnessWrite}, plonk::config::Hasher, }; -use plonky2_ecgfp5::curve::curve::Point; +use plonky2_ecgfp5::{curve::curve::Point, gadgets::curve::CurveTarget}; use rand::{thread_rng, Rng}; use serde::{Deserialize, Serialize}; use std::{array, iter::once}; +/// Trait defining common functionality between [`InputColumnInfo`] and [`ExtractedColumnInfo`] +pub trait ColumnInfo { + /// Getter for the column identifier as a field element + fn identifier_field(&self) -> F; + + /// Getter for the identifier as a [`u64`] + fn identifier(&self) -> u64; +} + /// Column info #[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize)] -pub struct ColumnInfo { - /// Slot information of the variable - pub(crate) slot: F, +pub struct InputColumnInfo { + /// This is the information used to identify the data relative to the contract, + /// for storage extraction its the slot, for receipts its the event signature for example + pub extraction_identifier: [F; 8], /// Column identifier - pub(crate) identifier: F, - /// The offset in bytes where to extract this column in a given EVM word - pub(crate) byte_offset: F, - /// The starting offset in `byte_offset` of the bits to be extracted for this column. - /// The column bits will start at `byte_offset * 8 + bit_offset`. - pub(crate) bit_offset: F, + pub identifier: F, + /// Prefix used in computing mpt metadata + pub metadata_prefix: [u8; 32], /// The length (in bits) of the field to extract in the EVM word - pub(crate) length: F, - /// At which EVM word is this column extracted from. For simple variables, - /// this value should always be 0. For structs that spans more than one EVM word - // that value should be depending on which section of the struct we are in. - pub(crate) evm_word: F, + pub length: F, } -impl ColumnInfo { +impl InputColumnInfo { + /// Construct a new instance of [`ColumnInfo`] pub fn new( - slot: u8, + extraction_identifier: &[u8], identifier: u64, - byte_offset: usize, - bit_offset: usize, + metadata_prefix: &[u8], length: usize, - evm_word: u32, ) -> Self { - let slot = F::from_canonical_u8(slot); + let mut extraction_vec = extraction_identifier.pack(Endianness::Little); + extraction_vec.resize(8, 0u32); + extraction_vec.reverse(); + let extraction_identifier = extraction_vec + .into_iter() + .map(F::from_canonical_u32) + .collect::>() + .try_into() + .expect("This should never fail"); let identifier = F::from_canonical_u64(identifier); - let [byte_offset, bit_offset, length] = - [byte_offset, bit_offset, length].map(F::from_canonical_usize); - let evm_word = F::from_canonical_u32(evm_word); + let length = F::from_canonical_usize(length); Self { - slot, + extraction_identifier, identifier, - byte_offset, - bit_offset, + metadata_prefix: left_pad::<32>(metadata_prefix), length, - evm_word, } } - pub fn new_from_slot_input(identifier: u64, slot_input: &SlotInput) -> Self { - Self::new( - slot_input.slot, + /// Compute the MPT metadata. + pub fn mpt_metadata(&self) -> HashOut { + // key_column_md = H( "\0KEY" || slot) + let inputs = [ + self.metadata_prefix().as_slice(), + self.extraction_id().as_slice(), + ] + .concat(); + H::hash_no_pad(&inputs) + } + + /// Compute the column information digest. + pub fn digest(&self) -> Point { + let metadata = self.mpt_metadata(); + + // digest = D(mpt_metadata || info.identifier) + let inputs = [metadata.elements.as_slice(), &[self.identifier()]].concat(); + + map_to_curve_point(&inputs) + } + + pub fn extraction_id(&self) -> [F; 8] { + self.extraction_identifier + } + + pub fn identifier(&self) -> F { + self.identifier + } + + pub fn metadata_prefix(&self) -> Vec { + self.metadata_prefix + .as_slice() + .pack(Endianness::Big) + .into_iter() + .map(F::from_canonical_u32) + .collect() + } + + pub fn length(&self) -> F { + self.length + } + + pub fn value_digest(&self, value: &[u8]) -> Point { + let bytes = left_pad32(value); + + let inputs = once(self.identifier()) + .chain( + bytes + .pack(Endianness::Big) + .into_iter() + .map(F::from_canonical_u32), + ) + .collect_vec(); + map_to_curve_point(&inputs) + } +} + +/// Column info +#[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize, Copy)] +pub struct ExtractedColumnInfo { + /// This is the information used to identify the data relative to the contract, + /// for storage extraction its the slot, for receipts its the event signature for example + pub extraction_identifier: [F; 8], + /// Column identifier + pub identifier: F, + /// The offset in bytes where to extract this column from some predetermined start point, + /// for storage this would be the byte offset from the start of the given EVM word, for Receipts + /// this would be either the offset from the start of the receipt or from the start of the + /// relevant log + pub byte_offset: F, + /// The length (in bits) of the field to extract in the EVM word + pub length: F, + /// For storage this is the EVM word, for receipts this is either 1 or 0 and indicates whether to + /// use the relevant log offset or not. + pub location_offset: F, +} + +impl PartialOrd for ExtractedColumnInfo { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for ExtractedColumnInfo { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.location_offset + .0 + .cmp(&other.location_offset.0) + .then(self.byte_offset.0.cmp(&other.byte_offset.0)) + } +} + +impl ExtractedColumnInfo { + /// Construct a new instance of [`ColumnInfo`] + pub fn new( + extraction_identifier: &[u8], + identifier: u64, + byte_offset: usize, + length: usize, + location_offset: u32, + ) -> Self { + let mut extraction_vec = extraction_identifier.pack(Endianness::Little); + extraction_vec.resize(8, 0u32); + extraction_vec.reverse(); + let extraction_identifier = extraction_vec + .into_iter() + .map(F::from_canonical_u32) + .collect::>() + .try_into() + .expect("This should never fail"); + let identifier = F::from_canonical_u64(identifier); + let [byte_offset, length] = [byte_offset, length].map(F::from_canonical_usize); + let location_offset = F::from_canonical_u32(location_offset); + + Self { + extraction_identifier, identifier, - slot_input.byte_offset, - // TODO: Will remove this bit_offset from the internal data structures and the circuit. - 0, - slot_input.length, - slot_input.evm_word, - ) + byte_offset, + length, + location_offset, + } } /// Create a sample column info. It could be used in integration tests. - pub fn sample() -> Self { + pub fn sample_storage(extraction_identifier: &[F; 8], location_offset: F) -> Self { let rng = &mut thread_rng(); - let bit_offset = F::from_canonical_u8(rng.gen_range(0..8)); let length: usize = rng.gen_range(1..=MAPPING_LEAF_VALUE_LEN); - let max_byte_offset = MAPPING_LEAF_VALUE_LEN - length.div_ceil(8); + let max_byte_offset = MAPPING_LEAF_VALUE_LEN - length; let byte_offset = F::from_canonical_usize(rng.gen_range(0..=max_byte_offset)); let length = F::from_canonical_usize(length); - let [slot, identifier, evm_word] = array::from_fn(|_| F::rand()); + let identifier = F::rand(); Self { - slot, + extraction_identifier: *extraction_identifier, identifier, byte_offset, - bit_offset, length, - evm_word, + location_offset, + } + } + + /// Sample a ne [`ExtractedColumnInfo`] at random, if `flag` is `true` then it will be for storage extraction, + /// if false it will be for receipt extraction. + pub fn sample(flag: bool, extraction_identifier: &[F; 8], location_offset: F) -> Self { + if flag { + ExtractedColumnInfo::sample_storage(extraction_identifier, location_offset) + } else { + unimplemented!() } } + /// Compute the MPT metadata. pub fn mpt_metadata(&self) -> HashOut { - // metadata = H(info.slot || info.evm_word || info.byte_offset || info.bit_offset || info.length) - let inputs = vec![ - self.slot, - self.evm_word, - self.byte_offset, - self.bit_offset, - self.length, - ]; + // metadata = H(info.extraction_id || info.location_offset || info.byte_offset || info.length) + let inputs = [ + self.extraction_id().as_slice(), + &[self.location_offset(), self.byte_offset(), self.length()], + ] + .concat(); + H::hash_no_pad(&inputs) } @@ -114,16 +240,13 @@ impl ColumnInfo { let metadata = self.mpt_metadata(); // digest = D(mpt_metadata || info.identifier) - let inputs = metadata - .elements - .into_iter() - .chain(once(self.identifier)) - .collect_vec(); + let inputs = [metadata.elements.as_slice(), &[self.identifier()]].concat(); + map_to_curve_point(&inputs) } - pub fn slot(&self) -> F { - self.slot + pub fn extraction_id(&self) -> [F; 8] { + self.extraction_identifier } pub fn identifier(&self) -> F { @@ -134,138 +257,302 @@ impl ColumnInfo { self.byte_offset } - pub fn bit_offset(&self) -> F { - self.bit_offset - } - pub fn length(&self) -> F { self.length } - pub fn evm_word(&self) -> F { - self.evm_word + pub fn location_offset(&self) -> F { + self.location_offset + } + + pub fn extract_value(&self, value: &[u8]) -> [u8; 32] { + left_pad32( + &value[self.byte_offset().0 as usize + ..self.byte_offset().0 as usize + self.length.0 as usize], + ) + } + + pub fn value_digest(&self, value: &[u8]) -> Point { + if self.identifier().0 == 0 { + Point::NEUTRAL + } else { + let bytes = left_pad32( + &value[self.byte_offset().0 as usize + ..self.byte_offset().0 as usize + self.length.0 as usize], + ); + + let inputs = once(self.identifier()) + .chain( + bytes + .pack(Endianness::Big) + .into_iter() + .map(F::from_canonical_u32), + ) + .collect_vec(); + map_to_curve_point(&inputs) + } + } + + pub fn receipt_value_digest(&self, value: &[u8], offset: usize) -> Point { + if self.identifier().0 == 0 { + Point::NEUTRAL + } else { + let start = offset + self.byte_offset().0 as usize; + let bytes = left_pad32(&value[start..start + self.length.0 as usize]); + + let inputs = once(self.identifier()) + .chain( + bytes + .pack(Endianness::Big) + .into_iter() + .map(F::from_canonical_u32), + ) + .collect_vec(); + map_to_curve_point(&inputs) + } + } +} + +impl ColumnInfo for InputColumnInfo { + fn identifier_field(&self) -> F { + self.identifier + } + + fn identifier(&self) -> u64 { + self.identifier.0 } } -/// Column info target -#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct ColumnInfoTarget { - pub(crate) slot: Target, +impl ColumnInfo for ExtractedColumnInfo { + fn identifier_field(&self) -> F { + self.identifier + } + + fn identifier(&self) -> u64 { + self.identifier.0 + } +} +/// Column info +#[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize, Copy)] +pub struct ExtractedColumnInfoTarget { + /// This is the information used to identify the data relative to the contract, + /// for storage extraction its the slot, for receipts its the event signature for example + pub(crate) extraction_identifier: [Target; 8], + /// Column identifier pub(crate) identifier: Target, + /// The offset in bytes where to extract this column from some predetermined start point, + /// for storage this would be the byte offset from the start of the given EVM word, for Receipts + /// this would be either the offset from the start of the receipt or from the start of the + /// relevant log pub(crate) byte_offset: Target, - pub(crate) bit_offset: Target, + /// The length (in bits) of the field to extract in the EVM word pub(crate) length: Target, - pub(crate) evm_word: Target, + /// For storage this is the EVM word, for receipts this is either 1 or 0 and indicates whether to + /// use the relevant log offset or not. + pub(crate) location_offset: Target, +} + +impl ExtractedColumnInfoTarget { + /// Compute the MPT metadata. + pub fn mpt_metadata(&self, b: &mut CBuilder) -> HashOutTarget { + // metadata = H(info.extraction_id || info.location_offset || info.byte_offset || info.length) + let inputs = [ + self.extraction_id().as_slice(), + &[self.location_offset(), self.byte_offset(), self.length()], + ] + .concat(); + + b.hash_n_to_hash_no_pad::(inputs) + } + + /// Compute the column information digest. + pub fn digest(&self, b: &mut CBuilder) -> CurveTarget { + let metadata = self.mpt_metadata(b); + + // digest = D(mpt_metadata || info.identifier) + let inputs = [metadata.elements.as_slice(), &[self.identifier()]].concat(); + + b.map_to_curve_point(&inputs) + } + + pub fn extraction_id(&self) -> [Target; 8] { + self.extraction_identifier + } + + pub fn identifier(&self) -> Target { + self.identifier + } + + pub fn byte_offset(&self) -> Target { + self.byte_offset + } + + pub fn length(&self) -> Target { + self.length + } + + pub fn location_offset(&self) -> Target { + self.location_offset + } +} + +/// Column info +#[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize)] +pub struct InputColumnInfoTarget { + /// This is the information used to identify the data relative to the contract, + /// for storage extraction its the slot, for receipts its the event signature for example + pub extraction_identifier: [Target; 8], + /// Column identifier + pub identifier: Target, + /// Prefix used in computing mpt metadata + pub metadata_prefix: [Target; 8], + /// The length of the field to extract in the EVM word + pub length: Target, +} + +impl InputColumnInfoTarget { + /// Compute the MPT metadata. + pub fn mpt_metadata(&self, b: &mut CBuilder) -> HashOutTarget { + // key_column_md = H( "\0KEY" || slot) + let inputs = [self.metadata_prefix(), self.extraction_id().as_slice()].concat(); + + b.hash_n_to_hash_no_pad::(inputs) + } + + /// Compute the column information digest. + pub fn digest(&self, b: &mut CBuilder) -> CurveTarget { + let metadata = self.mpt_metadata(b); + + // digest = D(mpt_metadata || info.identifier) + let inputs = [metadata.elements.as_slice(), &[self.identifier()]].concat(); + + b.map_to_curve_point(&inputs) + } + + pub fn extraction_id(&self) -> [Target; 8] { + self.extraction_identifier + } + + pub fn identifier(&self) -> Target { + self.identifier + } + + pub fn metadata_prefix(&self) -> &[Target] { + self.metadata_prefix.as_slice() + } + + pub fn length(&self) -> Target { + self.length + } } pub trait CircuitBuilderColumnInfo { - /// Add a virtual column info target. - fn add_virtual_column_info(&mut self) -> ColumnInfoTarget; + /// Add a virtual extracted column info target. + fn add_virtual_extracted_column_info(&mut self) -> ExtractedColumnInfoTarget; + + /// Add a virtual input column info target. + fn add_virtual_input_column_info(&mut self) -> InputColumnInfoTarget; } impl CircuitBuilderColumnInfo for CBuilder { - fn add_virtual_column_info(&mut self) -> ColumnInfoTarget { - let [slot, identifier, byte_offset, bit_offset, length, evm_word] = + fn add_virtual_extracted_column_info(&mut self) -> ExtractedColumnInfoTarget { + let extraction_identifier: [Target; 8] = array::from_fn(|_| self.add_virtual_target()); + let [identifier, byte_offset, length, location_offset] = array::from_fn(|_| self.add_virtual_target()); - ColumnInfoTarget { - slot, + ExtractedColumnInfoTarget { + extraction_identifier, identifier, byte_offset, - bit_offset, length, - evm_word, + location_offset, + } + } + + fn add_virtual_input_column_info(&mut self) -> InputColumnInfoTarget { + let extraction_identifier: [Target; 8] = array::from_fn(|_| self.add_virtual_target()); + let metadata_prefix: [Target; 8] = array::from_fn(|_| self.add_virtual_target()); + let [identifier, length] = array::from_fn(|_| self.add_virtual_target()); + + InputColumnInfoTarget { + extraction_identifier, + identifier, + metadata_prefix, + length, } } } pub trait WitnessWriteColumnInfo { - fn set_column_info_target(&mut self, target: &ColumnInfoTarget, value: &ColumnInfo); + fn set_extracted_column_info_target( + &mut self, + target: &ExtractedColumnInfoTarget, + value: &ExtractedColumnInfo, + ); + + fn set_extracted_column_info_target_arr( + &mut self, + targets: &[ExtractedColumnInfoTarget], + values: &[ExtractedColumnInfo], + ) { + zip_eq(targets, values) + .for_each(|(target, value)| self.set_extracted_column_info_target(target, value)); + } - fn set_column_info_target_arr(&mut self, targets: &[ColumnInfoTarget], values: &[ColumnInfo]) { + fn set_input_column_info_target( + &mut self, + target: &InputColumnInfoTarget, + value: &InputColumnInfo, + ); + + fn set_input_column_info_target_arr( + &mut self, + targets: &[InputColumnInfoTarget], + values: &[InputColumnInfo], + ) { zip_eq(targets, values) - .for_each(|(target, value)| self.set_column_info_target(target, value)); + .for_each(|(target, value)| self.set_input_column_info_target(target, value)); } } impl> WitnessWriteColumnInfo for T { - fn set_column_info_target(&mut self, target: &ColumnInfoTarget, value: &ColumnInfo) { + fn set_extracted_column_info_target( + &mut self, + target: &ExtractedColumnInfoTarget, + value: &ExtractedColumnInfo, + ) { + target + .extraction_identifier + .iter() + .zip(value.extraction_identifier.iter()) + .for_each(|(t, v)| self.set_target(*t, *v)); [ - (target.slot, value.slot), (target.identifier, value.identifier), (target.byte_offset, value.byte_offset), - (target.bit_offset, value.bit_offset), (target.length, value.length), - (target.evm_word, value.evm_word), + (target.location_offset, value.location_offset), ] .into_iter() .for_each(|(t, v)| self.set_target(t, v)); } -} - -#[cfg(test)] -pub(crate) mod tests { - use super::*; - use mp2_common::{C, D}; - use mp2_test::circuit::{run_circuit, UserCircuit}; - use plonky2::iop::witness::PartialWitness; - - impl ColumnInfo { - fn to_vec(&self) -> Vec { - vec![ - self.slot, - self.identifier, - self.byte_offset, - self.bit_offset, - self.length, - self.evm_word, - ] - } - } - - impl ColumnInfoTarget { - fn to_vec(&self) -> Vec { - vec![ - self.slot, - self.identifier, - self.byte_offset, - self.bit_offset, - self.length, - self.evm_word, - ] - } - } - - #[derive(Clone, Debug)] - struct TestColumnInfoCircuit { - column_info: ColumnInfo, - } - - impl UserCircuit for TestColumnInfoCircuit { - type Wires = ColumnInfoTarget; - - fn build(b: &mut CBuilder) -> Self::Wires { - let column_info = b.add_virtual_column_info(); - - // Register as public inputs to check equivalence. - b.register_public_inputs(&column_info.to_vec()); - - column_info - } - - fn prove(&self, pw: &mut PartialWitness, column_info_target: &ColumnInfoTarget) { - pw.set_column_info_target(column_info_target, &self.column_info); - } - } - - #[test] - fn test_values_extraction_column_info() { - let column_info = ColumnInfo::sample(); - let expected_pi = column_info.to_vec(); - - let test_circuit = TestColumnInfoCircuit { column_info }; - let proof = run_circuit::(test_circuit); - assert_eq!(proof.public_inputs, expected_pi); + fn set_input_column_info_target( + &mut self, + target: &InputColumnInfoTarget, + value: &InputColumnInfo, + ) { + target + .extraction_identifier + .iter() + .zip(value.extraction_identifier.iter()) + .for_each(|(t, v)| self.set_target(*t, *v)); + target + .metadata_prefix + .iter() + .zip(value.metadata_prefix().iter()) + .for_each(|(t, v)| self.set_target(*t, *v)); + + self.set_target(target.length, value.length()); + self.set_target(target.identifier, value.identifier()); } } diff --git a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs index 892be71dd..cac385112 100644 --- a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs +++ b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs @@ -1,25 +1,38 @@ //! The metadata gadget is used to ensure the correct extraction from the set of all identifiers. +use crate::values_extraction::{DATA_PREFIX, GAS_USED_PREFIX, TOPIC_PREFIX, TX_INDEX_PREFIX}; + use super::column_info::{ - CircuitBuilderColumnInfo, ColumnInfo, ColumnInfoTarget, WitnessWriteColumnInfo, + CircuitBuilderColumnInfo, ExtractedColumnInfo, ExtractedColumnInfoTarget, InputColumnInfo, + InputColumnInfoTarget, WitnessWriteColumnInfo, +}; +use alloy::{ + primitives::{Log, B256}, + rlp::Decodable, }; use itertools::Itertools; use mp2_common::{ + array::{Array, Targetable, L32}, + eth::EventLogInfo, group_hashing::CircuitBuilderGroupHashing, - serialization::{ - deserialize_array, deserialize_long_array, serialize_array, serialize_long_array, + poseidon::H, + serialization::{deserialize_long_array, serialize_array, serialize_long_array}, + types::{CBuilder, HashOutput}, + u256::{CircuitBuilderU256, UInt256Target}, + utils::{ + less_than_or_equal_to_unsafe, Endianness, FromTargets, Packer, TargetsConnector, ToFields, }, - types::CBuilder, - utils::less_than_or_equal_to_unsafe, CHasher, F, }; use plonky2::{ field::types::{Field, PrimeField64}, iop::{ - target::{BoolTarget, Target}, + target::Target, witness::{PartialWitness, WitnessWrite}, }, + plonk::config::Hasher, }; +use plonky2_crypto::u32::arithmetic_u32::U32Target; use plonky2_ecgfp5::{ curve::curve::Point, gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, @@ -28,299 +41,673 @@ use rand::{thread_rng, Rng}; use serde::{Deserialize, Serialize}; use std::{array, iter::once}; -#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)] -pub struct ColumnsMetadata { +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TableMetadata +where + [(); MAX_COLUMNS - INPUT_COLUMNS]:, +{ + /// Columns that aren't extracted from the node, like the mapping keys #[serde( serialize_with = "serialize_long_array", deserialize_with = "deserialize_long_array" )] - /// Information about all columns of the table - pub(crate) table_info: [ColumnInfo; MAX_COLUMNS], + pub(crate) input_columns: [InputColumnInfo; INPUT_COLUMNS], + /// The extracted column info + #[serde( + serialize_with = "serialize_long_array", + deserialize_with = "deserialize_long_array" + )] + pub(crate) extracted_columns: [ExtractedColumnInfo; MAX_COLUMNS - INPUT_COLUMNS], /// Actual column number pub(crate) num_actual_columns: usize, - /// Column number to be extracted - pub(crate) num_extracted_columns: usize, - /// EVM word that should be the same for all extracted columns - pub(crate) evm_word: u32, } -impl - ColumnsMetadata +impl TableMetadata +where + [(); MAX_COLUMNS - INPUT_COLUMNS]:, { - /// Create a new MPT metadata. + /// Create a new instance of [`TableColumns`] from a slice of [`ColumnInfo`] we assume that the columns are sorted into a predetermined order. pub fn new( - mut table_info: Vec, - extracted_column_identifiers: &[u64], - evm_word: u32, - ) -> Self { - let num_actual_columns = table_info.len(); + input_columns: &[InputColumnInfo; INPUT_COLUMNS], + extracted_columns: &[ExtractedColumnInfo], + ) -> TableMetadata { + let num_actual_columns = extracted_columns.len() + INPUT_COLUMNS; + // Check that we don't have too many columns assert!(num_actual_columns <= MAX_COLUMNS); - let num_extracted_columns = extracted_column_identifiers.len(); - assert!(num_extracted_columns <= MAX_FIELD_PER_EVM); - - // Move the extracted columns to the front the vector of column information. - table_info.sort_by_key(|column_info| { - !extracted_column_identifiers.contains(&column_info.identifier.to_canonical_u64()) - }); - - // Extend the column information vector with the last element. - let last_column_info = table_info.last().cloned().unwrap_or(ColumnInfo::default()); - table_info.resize(MAX_COLUMNS, last_column_info); - let table_info = table_info.try_into().unwrap(); + // We order the columns so that the location_offset increases, then if two columns have the same location offset + // they are ordered by increasing byte offset. Then if byte offset is the same they are ordered such that if `self.is_extracted` is + // false they appear first. + let mut table_info = [ExtractedColumnInfo::default(); { MAX_COLUMNS - INPUT_COLUMNS }]; + table_info + .iter_mut() + .zip(extracted_columns.into_iter()) + .for_each(|(ti, &column)| *ti = column); - Self { - table_info, + TableMetadata:: { + input_columns: input_columns.clone(), + extracted_columns: table_info, num_actual_columns, - num_extracted_columns, - evm_word, } } - /// Get the actual column information. - pub fn actual_table_info(&self) -> &[ColumnInfo] { - &self.table_info[..self.num_actual_columns] - } + /// Create a sample MPT metadata. It could be used in integration tests. + pub fn sample( + flag: bool, + input_prefixes: &[&[u8]; INPUT_COLUMNS], + extraction_identifier: &[u8], + location_offset: F, + ) -> Self { + let rng = &mut thread_rng(); - /// Get the extracted column information. - pub fn extracted_table_info(&self) -> &[ColumnInfo] { - &self.table_info[..self.num_extracted_columns] - } + let input_columns = input_prefixes + .map(|prefix| InputColumnInfo::new(extraction_identifier, rng.gen(), prefix, 32)); - /// Get the extracted column identifiers. - pub fn extracted_column_identifiers(&self) -> Vec { - self.table_info[..self.num_extracted_columns] - .iter() - .map(|column_info| column_info.identifier.to_canonical_u64()) - .collect_vec() - } + let num_actual_columns = rng.gen_range(1..=MAX_COLUMNS - INPUT_COLUMNS); - /// Create a sample MPT metadata. It could be used in integration tests. - pub fn sample(slot: u8, evm_word: u32) -> Self { - let rng = &mut thread_rng(); + let mut extraction_vec = extraction_identifier.pack(Endianness::Little); + extraction_vec.resize(8, 0u32); + extraction_vec.reverse(); + let extraction_id: [F; 8] = extraction_vec + .into_iter() + .map(F::from_canonical_u32) + .collect::>() + .try_into() + .expect("This should never fail"); - let mut table_info = array::from_fn(|_| ColumnInfo::sample()); - let num_actual_columns = rng.gen_range(1..=MAX_COLUMNS); - let max_extracted_columns = num_actual_columns.min(MAX_FIELD_PER_EVM); - let num_extracted_columns = rng.gen_range(1..=max_extracted_columns); + let extracted_columns = (0..num_actual_columns) + .map(|_| ExtractedColumnInfo::sample(flag, &extraction_id, location_offset)) + .collect::>(); - // if is_extracted: - // evm_word == info.evm_word && slot == info.slot - let evm_word_field = F::from_canonical_u32(evm_word); - let slot_field = F::from_canonical_u8(slot); - table_info[..num_extracted_columns] - .iter_mut() - .for_each(|column_info| { - column_info.evm_word = evm_word_field; - column_info.slot = slot_field; - }); + TableMetadata::::new(&input_columns, &extracted_columns) + } - Self { - table_info, - num_actual_columns, - num_extracted_columns, - evm_word, - } + /// Get the input columns + pub fn input_columns(&self) -> &[InputColumnInfo] { + self.input_columns.as_slice() + } + + /// Get the columns we actually extract from + pub fn extracted_columns(&self) -> &[ExtractedColumnInfo] { + &self.extracted_columns[..self.num_actual_columns - INPUT_COLUMNS] } /// Compute the metadata digest. pub fn digest(&self) -> Point { - self.table_info[..self.num_actual_columns] + let input_iter = self + .input_columns() .iter() - .fold(Point::NEUTRAL, |acc, info| acc + info.digest()) + .map(|column| column.digest()) + .collect::>(); + + let extracted_iter = self + .extracted_columns() + .iter() + .map(|column| column.digest()) + .collect::>(); + + input_iter + .into_iter() + .chain(extracted_iter) + .fold(Point::NEUTRAL, |acc, b| acc + b) } - pub fn table_info(&self) -> &[ColumnInfo; MAX_COLUMNS] { - &self.table_info + /// Computes the value digest for a provided value array and the unique row_id + pub fn input_value_digest( + &self, + input_vals: &[&[u8; 32]; INPUT_COLUMNS], + ) -> (Point, HashOutput) { + let point = self + .input_columns() + .iter() + .zip(input_vals.iter()) + .fold(Point::NEUTRAL, |acc, (column, value)| { + acc + column.value_digest(value.as_slice()) + }); + + let row_id_input = input_vals + .map(|key| { + key.pack(Endianness::Big) + .into_iter() + .map(F::from_canonical_u32) + }) + .into_iter() + .flatten() + .collect::>(); + + (point, H::hash_no_pad(&row_id_input).into()) } - pub fn num_actual_columns(&self) -> usize { - self.num_actual_columns + pub fn extracted_value_digest( + &self, + value: &[u8], + extraction_id: &[u8], + location_offset: F, + ) -> Point { + let mut extraction_vec = extraction_id.pack(Endianness::Little); + extraction_vec.resize(8, 0u32); + extraction_vec.reverse(); + let extraction_id: [F; 8] = extraction_vec + .into_iter() + .map(F::from_canonical_u32) + .collect::>() + .try_into() + .expect("This should never fail"); + + self.extracted_columns() + .iter() + .fold(Point::NEUTRAL, |acc, column| { + let correct_id = extraction_id == column.extraction_id(); + let correct_offset = location_offset == column.location_offset(); + let correct_location = correct_id && correct_offset; + + if correct_location { + acc + column.value_digest(value) + } else { + acc + } + }) } - pub fn num_extracted_columns(&self) -> usize { - self.num_extracted_columns + pub fn extracted_receipt_value_digest( + &self, + value: &[u8], + event: &EventLogInfo, + ) -> Point { + // Convert to Rlp form so we can use provided methods. + let node_rlp = rlp::Rlp::new(value); + + // The actual receipt data is item 1 in the list + let (receipt_rlp, receipt_off) = node_rlp.at_with_offset(1).unwrap(); + // The rlp encoded Receipt is not a list but a string that is formed of the `tx_type` followed by the remaining receipt + // data rlp encoded as a list. We retrieve the payload info so that we can work out relevant offsets later. + let receipt_str_payload = receipt_rlp.payload_info().unwrap(); + + // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` + let receipt_list = rlp::Rlp::new(&receipt_rlp.data().unwrap()[1..]); + + // The logs themselves start are the item at index 3 in this list + let (logs_rlp, logs_off) = receipt_list.at_with_offset(3).unwrap(); + + // We calculate the offset the that the logs are at from the start of the node + let logs_offset = receipt_off + receipt_str_payload.header_len + 1 + logs_off; + + // Now we produce an iterator over the logs with each logs offset. + let relevant_log_offset = std::iter::successors(Some(0usize), |i| Some(i + 1)) + .map_while(|i| logs_rlp.at_with_offset(i).ok()) + .find_map(|(log_rlp, log_off)| { + let mut bytes = log_rlp.as_raw(); + let log = Log::decode(&mut bytes).expect("Couldn't decode log"); + + if log.address == event.address + && log + .data + .topics() + .contains(&B256::from(event.event_signature)) + { + Some(logs_offset + log_off) + } else { + Some(0usize) + } + }) + .expect("No relevant log in the provided value"); + + self.extracted_columns() + .iter() + .fold(Point::NEUTRAL, |acc, column| { + acc + column.receipt_value_digest(value, relevant_log_offset) + }) } - pub fn evm_word(&self) -> u32 { - self.evm_word + pub fn num_actual_columns(&self) -> usize { + self.num_actual_columns } } -pub struct MetadataGadget; +pub struct TableMetadataGadget; -impl - MetadataGadget +impl + TableMetadataGadget +where + [(); MAX_COLUMNS - INPUT_COLUMNS]:, { - pub(crate) fn build(b: &mut CBuilder) -> MetadataTarget { - let table_info = array::from_fn(|_| b.add_virtual_column_info()); - let [is_actual_columns, is_extracted_columns] = - array::from_fn(|_| array::from_fn(|_| b.add_virtual_bool_target_safe())); - let evm_word = b.add_virtual_target(); - - MetadataTarget { - table_info, - is_actual_columns, - is_extracted_columns, - evm_word, + pub(crate) fn build(b: &mut CBuilder) -> TableMetadataTarget { + TableMetadataTarget { + input_columns: array::from_fn(|_| b.add_virtual_input_column_info()), + extracted_columns: array::from_fn(|_| b.add_virtual_extracted_column_info()), + num_actual_columns: b.add_virtual_target(), } } pub(crate) fn assign( pw: &mut PartialWitness, - columns_metadata: &ColumnsMetadata, - metadata_target: &MetadataTarget, + columns_metadata: &TableMetadata, + metadata_target: &TableMetadataTarget, ) { - pw.set_column_info_target_arr(&metadata_target.table_info, &columns_metadata.table_info); - metadata_target - .is_actual_columns + pw.set_input_column_info_target_arr( + metadata_target.input_columns.as_slice(), + columns_metadata.input_columns.as_slice(), + ); + + pw.set_extracted_column_info_target_arr( + metadata_target.extracted_columns.as_slice(), + columns_metadata.extracted_columns.as_slice(), + ); + pw.set_target( + metadata_target.num_actual_columns, + F::from_canonical_usize(columns_metadata.num_actual_columns), + ); + } +} + +impl + From> for TableMetadata +where + [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA]:, +{ + fn from(event: EventLogInfo) -> Self { + let extraction_id = event.event_signature; + + let tx_index_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + TX_INDEX_PREFIX, + ] + .concat() + .into_iter() + .map(F::from_canonical_u8) + .collect::>(); + let tx_index_column_id = H::hash_no_pad(&tx_index_input).elements[0].to_canonical_u64(); + + let gas_used_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + GAS_USED_PREFIX, + ] + .concat() + .into_iter() + .map(F::from_canonical_u8) + .collect::>(); + let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0].to_canonical_u64(); + + let tx_index_input_column = InputColumnInfo::new( + extraction_id.as_slice(), + tx_index_column_id, + TX_INDEX_PREFIX, + 32, + ); + let gas_used_index_column = InputColumnInfo::new( + extraction_id.as_slice(), + gas_used_column_id, + GAS_USED_PREFIX, + 32, + ); + + let topic_columns = event + .topics .iter() .enumerate() - .for_each(|(i, t)| pw.set_bool_target(*t, i < columns_metadata.num_actual_columns)); - metadata_target - .is_extracted_columns + .map(|(j, &offset)| { + let input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + TOPIC_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(F::from_canonical_u8) + .collect::>(); + + let topic_id = H::hash_no_pad(&input).elements[0].to_canonical_u64(); + ExtractedColumnInfo::new(extraction_id.as_slice(), topic_id, offset, 32, 0) + }) + .collect::>(); + + let data_columns = event + .data .iter() .enumerate() - .for_each(|(i, t)| pw.set_bool_target(*t, i < columns_metadata.num_extracted_columns)); - pw.set_target( - metadata_target.evm_word, - F::from_canonical_u32(columns_metadata.evm_word), - ); + .map(|(j, &offset)| { + let input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + DATA_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(F::from_canonical_u8) + .collect::>(); + + let data_id = H::hash_no_pad(&input).elements[0].to_canonical_u64(); + ExtractedColumnInfo::new(extraction_id.as_slice(), data_id, offset, 32, 0) + }) + .collect::>(); + + let extracted_columns = [topic_columns, data_columns].concat(); + + TableMetadata::::new( + &[tx_index_input_column, gas_used_index_column], + &extracted_columns, + ) } } +// impl TryFrom for TableMetadata { +// type Error = anyhow::Error; +// fn try_from(value: StorageSlot) -> Result { +// match value { +// StorageSlot::Node(inner_slot) => {match inner_slot { +// StorageSlotNode:: +// }} +// } +// } +// } + #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub(crate) struct MetadataTarget { +pub(crate) struct TableMetadataTarget +where + [(); MAX_COLUMNS - INPUT_COLUMNS]:, +{ #[serde( serialize_with = "serialize_long_array", deserialize_with = "deserialize_long_array" )] - /// Information about all columns of the table - pub(crate) table_info: [ColumnInfoTarget; MAX_COLUMNS], - #[serde( - serialize_with = "serialize_array", - deserialize_with = "deserialize_array" - )] - /// Boolean flags specifying whether the i-th column is actual or not - pub(crate) is_actual_columns: [BoolTarget; MAX_COLUMNS], + /// Information about all input columns of the table + pub(crate) input_columns: [InputColumnInfoTarget; INPUT_COLUMNS], #[serde( - serialize_with = "serialize_array", - deserialize_with = "deserialize_array" + serialize_with = "serialize_long_array", + deserialize_with = "deserialize_long_array" )] - /// Boolean flags specifying whether the i-th field being processed has to be extracted into a column or not - pub(crate) is_extracted_columns: [BoolTarget; MAX_COLUMNS], - /// EVM word that should be the same for all columns we’re extracting here - pub(crate) evm_word: Target, + /// Information about all extracted columns of the table + pub(crate) extracted_columns: [ExtractedColumnInfoTarget; MAX_COLUMNS - INPUT_COLUMNS], + /// The number of actual columns + pub(crate) num_actual_columns: Target, } -impl - MetadataTarget +type ReceiptExtractedOutput = ( + Array, + Array, + CurveTarget, + CurveTarget, +); + +impl + TableMetadataTarget +where + [(); MAX_COLUMNS - INPUT_COLUMNS]:, { - /// Compute the metadata digest and number of actual columns. - pub(crate) fn digest_info(&self, b: &mut CBuilder, slot: Target) -> (CurveTarget, Target) { + pub fn metadata_digest(&self, b: &mut CBuilder) -> CurveTarget { + let input_points = self + .input_columns + .iter() + .map(|column| column.digest(b)) + .collect::>(); let zero = b.zero(); + let curve_zero = b.curve_zero(); + let extracted_points = self + .extracted_columns + .iter() + .map(|column| { + let selector = b.is_equal(zero, column.identifier()); + let poss_digest = column.digest(b); + b.select_curve_point(selector, curve_zero, poss_digest) + }) + .collect::>(); - let mut partial = b.curve_zero(); - let mut non_extracted_column_found = b._false(); - let mut num_extracted_columns = zero; - let mut num_actual_columns = zero; - - for i in 0..MAX_COLUMNS { - let info = &self.table_info[i]; - let is_actual = self.is_actual_columns[i]; - let is_extracted = self.is_extracted_columns[i]; - - // If the current column has to be extracted, we check that: - // - The EVM word associated to this column is the same as the EVM word we are extracting data from. - // - The slot associated to this column is the same as the slot we are extracting data from. - // - Ensure that we extract only from non-dummy columns. - // if is_extracted: - // evm_word == info.evm_word && slot == info.slot && is_actual - let is_evm_word_eq = b.is_equal(self.evm_word, info.evm_word); - let is_slot_eq = b.is_equal(slot, info.slot); - let acc = [is_extracted, is_actual, is_evm_word_eq, is_slot_eq] - .into_iter() - .reduce(|acc, flag| b.and(acc, flag)) - .unwrap(); - b.connect(acc.target, is_extracted.target); - - // Ensure that once we found a non-extracted column, then there are no - // extracted columns left. - // if non_extracted_column_found: - // is_extracted == false - // => non_extracted_column_found == non_extracted_column_found * (1 - is_extracted) - let acc = b.arithmetic( - F::NEG_ONE, - F::ONE, - is_extracted.target, - non_extracted_column_found.target, - non_extracted_column_found.target, - ); - b.connect(acc, non_extracted_column_found.target); - - // non_extracted_column_found |= not is_extracted - // => non_extracted_column_found = - // non_extracted_column_found + (1 - is_extracted) - - // non_extracted_column_found * (1 - is_extracted) - // => non_extracted_column_found = - // 1 - is_extracted + non_extracted_column_found * is_extracted - let acc = b.arithmetic( - F::ONE, - F::NEG_ONE, - non_extracted_column_found.target, - is_extracted.target, - is_extracted.target, - ); - let acc = b.add_const(acc, F::ONE); - non_extracted_column_found = BoolTarget::new_unsafe(acc); - // num_extracted_columns += is_extracted - num_extracted_columns = b.add(num_extracted_columns, is_extracted.target); - num_actual_columns = b.add(num_actual_columns, is_actual.target); - - // Compute the partial digest of all columns. - // mpt_metadata = H(info.slot || info.evm_word || info.byte_offset || info.bit_offset || info.length) - let inputs = vec![ - info.slot, - info.evm_word, - info.byte_offset, - info.bit_offset, - info.length, - ]; - let mpt_metadata = b.hash_n_to_hash_no_pad::(inputs); - // mpt_digest = D(mpt_metadata || info.identifier) - let inputs = mpt_metadata - .elements - .into_iter() - .chain(once(info.identifier)) - .collect_vec(); - let mpt_digest = b.map_to_curve_point(&inputs); - // acc = partial + mpt_digest - let acc = b.add_curve_point(&[partial, mpt_digest]); - // partial = is_actual ? acc : partial - partial = b.curve_select(is_actual, acc, partial); - } + let points = [input_points, extracted_points].concat(); + + b.add_curve_point(&points) + } + + /// Computes the value digest and metadata digest for the input columns from the supplied inputs. + /// Outputs are ordered as `(MetadataDigest, ValueDigest)`. + pub(crate) fn inputs_digests( + &self, + b: &mut CBuilder, + input_values: &[Array; INPUT_COLUMNS], + ) -> (CurveTarget, CurveTarget) { + let (metadata_points, value_points): (Vec, Vec) = self + .input_columns + .iter() + .zip(input_values.iter()) + .map(|(column, input_val)| { + let inputs = once(column.identifier) + .chain(input_val.arr.iter().map(|t| t.to_target())) + .collect_vec(); + (column.digest(b), b.map_to_curve_point(&inputs)) + }) + .unzip(); + + ( + b.add_curve_point(&metadata_points), + b.add_curve_point(&value_points), + ) + } - // num_extracted_columns <= MAX_FIELD_PER_EVM - let max_field_per_evm = b.constant(F::from_canonical_usize(MAX_FIELD_PER_EVM)); - let num_extracted_lt_or_eq_max = - less_than_or_equal_to_unsafe(b, num_extracted_columns, max_field_per_evm, 8); - b.assert_one(num_extracted_lt_or_eq_max.target); + /// Computes the value digest and metadata digest for the extracted columns from the supplied value + /// Outputs are ordered as `(MetadataDigest, ValueDigest)`. + pub(crate) fn extracted_digests( + &self, + b: &mut CBuilder, + value: &Array, + location_no_offset: &UInt256Target, + location: &UInt256Target, + extraction_id: &[Target; 8], + ) -> (CurveTarget, CurveTarget) { + let zero = b.zero(); + let one = b.one(); + + let curve_zero = b.curve_zero(); + + let ex_id_arr = Array::::from(*extraction_id); + + let (metadata_points, value_points): (Vec, Vec) = self + .extracted_columns + .into_iter() + .map(|column| { + // Calculate the column digest + let column_digest = column.digest(b); + // The column is real if the identifier is non-zero so we use it as a selector + let selector = b.is_equal(zero, column.identifier()); + + // Now we work out if the column is to be extracted, if it is we will take the value we recover from `value[column.byte_offset..column.byte_offset + column.length]` + // left padded. + let loc_offset_u256 = + UInt256Target::new_from_target_unsafe(b, column.location_offset()); + let (sum, _) = b.add_u256(&loc_offset_u256, location_no_offset); + let correct_offset = b.is_equal_u256(&sum, location); + + // We check that we have the correct base extraction id + let column_ex_id_arr = Array::::from(column.extraction_id()); + let correct_extraction_id = column_ex_id_arr.equals(b, &ex_id_arr); + + // We only extract if we are in the correct location AND `column.is_extracted` is true + let correct_location = b.and(correct_offset, correct_extraction_id); + + // last_byte_found lets us know whether we continue extracting or not. + // Hence if we want to extract values `extract` will be true so `last_byte_found` should be false + let mut last_byte_found = b.not(correct_location); + + // Even if the constant `VALUE_LEN` is larger than 32 this is the maximum size in bytes + // of data that we extract per column + let mut result_bytes = [zero; 32]; + + // We iterate over the result bytes in reverse order, the first element that we want to access + // from `value` is `value[MAPPING_LEAF_VALUE_LEN - column.byte_offset - column.length]` and then + // we keep extracting until we reach `value[column.byte_offset]`. + // let mapping_leaf_val_len = b.constant(F::from_canonical_usize(VALUE_LEN)); + let last_byte_offset = b.add(column.byte_offset, column.length); + // let to_sub = b.sub(mapping_leaf_val_len, last_byte_offset); + // let last_index = b.constant(F::from_canonical_usize(VALUE_LEN - 1)); + let start = b.sub(last_byte_offset, one); + + result_bytes + .iter_mut() + .rev() + .enumerate() + .for_each(|(i, out_byte)| { + // offset = info.byte_offset + i + let index = b.constant(F::from_canonical_usize(i)); + let offset = b.sub(start, index); + // Set to 0 if found the last byte. + let offset = b.select(last_byte_found, zero, offset); + + // Since VALUE_LEN is a constant that is determined at compile time this conditional won't + // cause any issues with the circuit. + let byte = if VALUE_LEN < 64 { + b.random_access(offset, value.arr.to_vec()) + } else { + value.random_access_large_array(b, offset) + }; + + // Now if `last_byte_found` is true we add zero, otherwise add `byte` + let to_add = b.select(last_byte_found, zero, byte); + + *out_byte = b.add(*out_byte, to_add); + // is_last_byte = offset == last_byte_offset + let is_last_byte = b.is_equal(offset, column.byte_offset); + // last_byte_found |= is_last_byte + last_byte_found = b.or(last_byte_found, is_last_byte); + }); + + let result_arr = Array::::from_array(result_bytes); + + let result_packed: Array = + Array::::pack(&result_arr, b, Endianness::Big); + + let inputs = once(column.identifier) + .chain(result_packed.arr.iter().map(|t| t.to_target())) + .collect_vec(); + let value_digest = b.map_to_curve_point(&inputs); + let negated = b.not(correct_location); + let value_selector = b.or(negated, selector); + ( + b.curve_select(selector, curve_zero, column_digest), + b.curve_select(value_selector, curve_zero, value_digest), + ) + }) + .unzip(); + + ( + b.add_curve_point(&metadata_points), + b.add_curve_point(&value_points), + ) + } - (partial, num_actual_columns) + /// Computes the value digest and metadata digest for the extracted columns from the supplied value + /// Outputs are ordered as `(MetadataDigest, ValueDigest)`. + pub(crate) fn extracted_receipt_digests( + &self, + b: &mut CBuilder, + value: &Array, + log_offset: Target, + address_offset: Target, + signature_offset: Target, + ) -> ReceiptExtractedOutput { + let zero = b.zero(); + let one = b.one(); + let curve_zero = b.curve_zero(); + + let address_start = b.add(log_offset, address_offset); + let address = value.extract_array_large::<_, _, 20>(b, address_start); + + let signature_start = b.add(log_offset, signature_offset); + let signature = value.extract_array_large::<_, _, 32>(b, signature_start); + + let (metadata_points, value_points): (Vec, Vec) = self + .extracted_columns + .into_iter() + .map(|column| { + // Calculate the column digest + let column_digest = column.digest(b); + // The column is real if the identifier is non-zero so we use it as a selector + let selector = b.is_equal(zero, column.identifier()); + + let location = b.add(log_offset, column.byte_offset()); + + // last_byte_found lets us know whether we continue extracting or not. + // If `selector` is false then we have data to extract + let mut last_byte_found = selector; + + // Even if the constant `VALUE_LEN` is larger than 32 this is the maximum size in bytes + // of data that we extract per column + let mut result_bytes = [zero; 32]; + + // We iterate over the result bytes in reverse order, the first element that we want to access + // from `value` is `value[location + column.length - 1]` and then + // we keep extracting until we reach `value[location]`. + + let last_byte_offset = b.add(location, column.length); + + let start = b.sub(last_byte_offset, one); + + result_bytes + .iter_mut() + .rev() + .enumerate() + .for_each(|(i, out_byte)| { + // offset = info.byte_offset + i + let index = b.constant(F::from_canonical_usize(i)); + let offset = b.sub(start, index); + // Set to 0 if found the last byte. + let offset = b.select(last_byte_found, zero, offset); + + // Since VALUE_LEN is a constant that is determined at compile time this conditional won't + // cause any issues with the circuit. + let byte = if VALUE_LEN < 64 { + b.random_access(offset, value.arr.to_vec()) + } else { + value.random_access_large_array(b, offset) + }; + + // Now if `last_byte_found` is true we add zero, otherwise add `byte` + let to_add = b.select(last_byte_found, zero, byte); + + *out_byte = b.add(*out_byte, to_add); + // is_last_byte = offset == last_byte_offset + let is_last_byte = b.is_equal(offset, column.byte_offset); + // last_byte_found |= is_last_byte + last_byte_found = b.or(last_byte_found, is_last_byte); + }); + + let result_arr = Array::::from_array(result_bytes); + + let result_packed: Array = + Array::::pack(&result_arr, b, Endianness::Big); + + let inputs = once(column.identifier) + .chain(result_packed.arr.iter().map(|t| t.to_target())) + .collect_vec(); + let value_digest = b.map_to_curve_point(&inputs); + ( + b.curve_select(selector, curve_zero, column_digest), + b.curve_select(selector, curve_zero, value_digest), + ) + }) + .unzip(); + + ( + address, + signature, + b.add_curve_point(&metadata_points), + b.add_curve_point(&value_points), + ) } } #[cfg(test)] pub(crate) mod tests { use super::*; - use crate::tests::{TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM}; + use crate::tests::TEST_MAX_COLUMNS; use mp2_common::{C, D}; use mp2_test::circuit::{run_circuit, UserCircuit}; use plonky2_ecgfp5::gadgets::curve::PartialWitnessCurve; #[derive(Clone, Debug)] struct TestMedataCircuit { - columns_metadata: ColumnsMetadata, + columns_metadata: TableMetadata, slot: u8, expected_num_actual_columns: usize, expected_metadata_digest: Point, @@ -329,21 +716,26 @@ pub(crate) mod tests { impl UserCircuit for TestMedataCircuit { // Metadata target + slot + expected number of actual columns + expected metadata digest type Wires = ( - MetadataTarget, + TableMetadataTarget, Target, Target, CurveTarget, ); fn build(b: &mut CBuilder) -> Self::Wires { - let metadata_target = MetadataGadget::build(b); + let metadata_target = TableMetadataGadget::build(b); let slot = b.add_virtual_target(); let expected_num_actual_columns = b.add_virtual_target(); let expected_metadata_digest = b.add_virtual_curve_target(); - let (metadata_digest, num_actual_columns) = metadata_target.digest_info(b, slot); + let metadata_digest = metadata_target.metadata_digest(b); + b.connect_curve_points(metadata_digest, expected_metadata_digest); - b.connect(num_actual_columns, expected_num_actual_columns); + + b.connect( + metadata_target.num_actual_columns, + expected_num_actual_columns, + ); ( metadata_target, @@ -354,7 +746,8 @@ pub(crate) mod tests { } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - MetadataGadget::assign(pw, &self.columns_metadata, &wires.0); + TableMetadataGadget::assign(pw, &self.columns_metadata, &wires.0); + pw.set_target(wires.1, F::from_canonical_u8(self.slot)); pw.set_target( wires.2, @@ -371,12 +764,18 @@ pub(crate) mod tests { let slot = rng.gen(); let evm_word = rng.gen(); - let metadata_gadget = ColumnsMetadata::sample(slot, evm_word); - let expected_num_actual_columns = metadata_gadget.num_actual_columns(); - let expected_metadata_digest = metadata_gadget.digest(); + let metadata = TableMetadata::::sample( + true, + &[], + &[slot], + F::from_canonical_u32(evm_word), + ); + + let expected_num_actual_columns = metadata.num_actual_columns(); + let expected_metadata_digest = metadata.digest(); let test_circuit = TestMedataCircuit { - columns_metadata: metadata_gadget, + columns_metadata: metadata, slot, expected_num_actual_columns, expected_metadata_digest, diff --git a/mp2-v1/src/values_extraction/gadgets/mod.rs b/mp2-v1/src/values_extraction/gadgets/mod.rs index 08059cda0..c8a4684de 100644 --- a/mp2-v1/src/values_extraction/gadgets/mod.rs +++ b/mp2-v1/src/values_extraction/gadgets/mod.rs @@ -1,3 +1,2 @@ -pub mod column_gadget; pub mod column_info; pub mod metadata_gadget; diff --git a/mp2-v1/src/values_extraction/leaf_mapping.rs b/mp2-v1/src/values_extraction/leaf_mapping.rs index ccb5660de..1f09f3b9c 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping.rs @@ -1,17 +1,13 @@ //! Module handling the mapping entries inside a storage trie use crate::values_extraction::{ - gadgets::{ - column_gadget::ColumnGadget, - metadata_gadget::{ColumnsMetadata, MetadataTarget}, - }, public_inputs::{PublicInputs, PublicInputsArgs}, KEY_ID_PREFIX, }; use anyhow::Result; use itertools::Itertools; use mp2_common::{ - array::{Array, Vector, VectorWire}, + array::{Array, Targetable, Vector, VectorWire}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires}, mpt_sequential::{ @@ -21,7 +17,8 @@ use mp2_common::{ public_inputs::PublicInputCommon, storage_key::{MappingSlot, MappingStructSlotWires}, types::{CBuilder, GFp, MAPPING_LEAF_VALUE_LEN}, - utils::{Endianness, PackerTarget, ToTargets}, + u256::UInt256Target, + utils::{Endianness, ToTargets}, CHasher, D, F, }; use plonky2::{ @@ -32,130 +29,123 @@ use plonky2::{ }, plonk::proof::ProofWithPublicInputsTarget, }; + use plonky2_ecdsa::gadgets::nonnative::CircuitBuilderNonNative; use plonky2_ecgfp5::gadgets::curve::CircuitBuilderEcGFp5; use recursion_framework::circuit_builder::CircuitLogicWires; use serde::{Deserialize, Serialize}; -use std::{iter, iter::once}; +use std::iter::once; -use super::gadgets::metadata_gadget::MetadataGadget; +use super::gadgets::metadata_gadget::{TableMetadata, TableMetadataGadget, TableMetadataTarget}; #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct LeafMappingWires< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, -> where - [(); PAD_LEN(NODE_LEN)]:, +pub struct LeafMappingWires +where + [(); MAX_COLUMNS - 1]:, { /// Full node from the MPT proof - pub(crate) node: VectorWire, + pub(crate) node: VectorWire, /// Leaf value - pub(crate) value: Array, + pub(crate) value: Array, /// MPT root - pub(crate) root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + pub(crate) root: KeccakWires<{ PAD_LEN(69) }>, /// Storage mapping variable slot pub(crate) slot: MappingStructSlotWires, - /// Identifier of the column of the table storing the key of the current mapping entry - pub(crate) key_id: Target, /// MPT metadata - metadata: MetadataTarget, + metadata: TableMetadataTarget, + /// The offset from the base slot + offset: Target, } /// Circuit to prove the correct derivation of the MPT key from a mapping slot #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct LeafMappingCircuit< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, -> where - [(); PAD_LEN(NODE_LEN)]:, +pub struct LeafMappingCircuit +where + [(); MAX_COLUMNS - 1]:, { pub(crate) node: Vec, pub(crate) slot: MappingSlot, - pub(crate) key_id: F, - pub(crate) metadata: ColumnsMetadata, + pub(crate) metadata: TableMetadata, + pub(crate) offset: u32, } -impl - LeafMappingCircuit +impl LeafMappingCircuit where - [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 1]:, { - pub fn build(b: &mut CBuilder) -> LeafMappingWires { + pub fn build(b: &mut CBuilder) -> LeafMappingWires { let zero = b.zero(); - let one = b.one(); - let key_id = b.add_virtual_target(); - let metadata = MetadataGadget::build(b); - let slot = MappingSlot::build_struct(b, metadata.evm_word); + let metadata = TableMetadataGadget::build(b); + let offset = b.add_virtual_target(); + let slot = MappingSlot::build_struct(b, offset); // Build the node wires. - let wires = - MPTLeafOrExtensionNode::build_and_advance_key::<_, D, NODE_LEN, MAX_LEAF_VALUE_LEN>( - b, - &slot.keccak_mpt.base.mpt_key, - ); + let wires = MPTLeafOrExtensionNode::build_and_advance_key::<_, D, 69, MAX_LEAF_VALUE_LEN>( + b, + &slot.keccak_mpt.base.mpt_key, + ); let node = wires.node; let root = wires.root; + let key_input_no_offset = slot + .keccak_mpt + .base + .keccak_location + .output + .pack(b, Endianness::Big); + let key_input_with_offset = slot.keccak_mpt.location_bytes.pack(b, Endianness::Big); + + let u256_no_off = + UInt256Target::new_from_be_limbs(key_input_no_offset.arr.as_slice()).unwrap(); + let u256_loc = + UInt256Target::new_from_be_limbs(key_input_with_offset.arr.as_slice()).unwrap(); + // Left pad the leaf value. - let value: Array = left_pad_leaf_value(b, &wires.value); - - // Compute the metadata digest and number of actual columns. - let (metadata_digest, num_actual_columns) = metadata.digest_info(b, slot.mapping_slot); - // We add key column to number of actual columns. - let num_actual_columns = b.add(num_actual_columns, one); - - // key_column_md = H( "\0KEY" || slot) - let key_id_prefix = b.constant(F::from_canonical_u32(u32::from_be_bytes( - KEY_ID_PREFIX.try_into().unwrap(), - ))); - let inputs = vec![key_id_prefix, slot.mapping_slot]; - let key_column_md = b.hash_n_to_hash_no_pad::(inputs); - // Add the information related to the key to the metadata. - // metadata_digest += D(key_column_md || key_id) - let inputs = key_column_md - .to_targets() - .into_iter() - .chain(once(key_id)) - .collect_vec(); - let metadata_key_digest = b.map_to_curve_point(&inputs); - let metadata_digest = b.add_curve_point(&[metadata_digest, metadata_key_digest]); - - // Compute the values digest. - let values_digest = ColumnGadget::::new( - &value.arr, - &metadata.table_info[..MAX_FIELD_PER_EVM], - &metadata.is_extracted_columns[..MAX_FIELD_PER_EVM], - ) - .build(b); - - // values_digest += evm_word == 0 ? D(key_id || pack(left_pad32(key))) : CURVE_ZERO - let packed_mapping_key = slot.mapping_key.arr.pack(b, Endianness::Big); - let inputs = iter::once(key_id) - .chain(packed_mapping_key.clone()) - .collect_vec(); - let values_key_digest = b.map_to_curve_point(&inputs); - let is_evm_word_zero = b.is_equal(metadata.evm_word, zero); + let value: Array = left_pad_leaf_value(b, &wires.value); + + // Compute the metadata digest and the value digest + let packed_mapping_key = Array::::pack(&slot.mapping_key, b, Endianness::Big); + + let (input_metadata_digest, input_value_digest) = + metadata.inputs_digests(b, &[packed_mapping_key.clone()]); + let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests( + b, + &value, + &u256_no_off, + &u256_loc, + &[zero, zero, zero, zero, zero, zero, zero, slot.mapping_slot], + ); + + let selector = b.is_equal(zero, offset); let curve_zero = b.curve_zero(); - let values_key_digest = b.curve_select(is_evm_word_zero, values_key_digest, curve_zero); - let values_digest = b.add_curve_point(&[values_digest, values_key_digest]); + let selected_input_value_digest = b.curve_select(selector, input_value_digest, curve_zero); + let value_digest = + b.add_curve_point(&[selected_input_value_digest, extracted_value_digest]); + let metadata_digest = + b.add_curve_point(&[input_metadata_digest, extracted_metadata_digest]); + // Compute the unique data to identify a row is the mapping key. // row_unique_data = H(pack(left_pad32(key)) - let row_unique_data = b.hash_n_to_hash_no_pad::(packed_mapping_key); + let row_unique_data = b.hash_n_to_hash_no_pad::( + packed_mapping_key + .arr + .iter() + .map(|t| t.to_target()) + .collect::>(), + ); // row_id = H2int(row_unique_data || num_actual_columns) let inputs = row_unique_data .to_targets() .into_iter() - .chain(once(num_actual_columns)) + .chain(once(metadata.num_actual_columns)) .collect(); let hash = b.hash_n_to_hash_no_pad::(inputs); let row_id = hash_to_int_target(b, hash); // values_digest = values_digest * row_id let row_id = b.biguint_to_nonnative(&row_id); - let values_digest = b.curve_scalar_mul(values_digest, &row_id); + let values_digest = b.curve_scalar_mul(value_digest, &row_id); // Only one leaf in this node. let n = b.one(); @@ -175,39 +165,34 @@ where value, root, slot, - key_id, metadata, + offset, } } - pub fn assign( - &self, - pw: &mut PartialWitness, - wires: &LeafMappingWires, - ) { + pub fn assign(&self, pw: &mut PartialWitness, wires: &LeafMappingWires) { let padded_node = - Vector::::from_vec(&self.node).expect("Invalid node"); + Vector::::from_vec(&self.node).expect("Invalid node"); wires.node.assign(pw, &padded_node); - KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::assign( + KeccakCircuit::<{ PAD_LEN(69) }>::assign( pw, &wires.root, &InputData::Assigned(&padded_node), ); - pw.set_target(wires.key_id, self.key_id); - self.slot - .assign_struct(pw, &wires.slot, self.metadata.evm_word); - MetadataGadget::assign(pw, &self.metadata, &wires.metadata); + + self.slot.assign_struct(pw, &wires.slot, self.offset); + TableMetadataGadget::assign(pw, &self.metadata, &wires.metadata); + pw.set_target(wires.offset, F::from_canonical_u32(self.offset)); } } /// Num of children = 0 -impl - CircuitLogicWires for LeafMappingWires +impl CircuitLogicWires for LeafMappingWires where - [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 1]:, { type CircuitBuilderParams = (); - type Inputs = LeafMappingCircuit; + type Inputs = LeafMappingCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; @@ -228,20 +213,15 @@ where #[cfg(test)] mod tests { use super::*; - use crate::{ - tests::{TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM}, - values_extraction::{ - compute_leaf_mapping_metadata_digest, compute_leaf_mapping_values_digest, - }, - MAX_LEAF_NODE_LEN, - }; + use crate::tests::TEST_MAX_COLUMNS; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, eth::{StorageSlot, StorageSlotNode}, mpt_sequential::utils::bytes_to_nibbles, + poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, - utils::{keccak256, Endianness, Packer}, + utils::{keccak256, Endianness, Packer, ToFields}, C, D, F, }; use mp2_test::{ @@ -251,13 +231,15 @@ mod tests { }; use plonky2::{ field::types::Field, + hash::hash_types::HashOut, iop::{target::Target, witness::PartialWitness}, + plonk::config::Hasher, }; + use plonky2_ecgfp5::curve::scalar_field::Scalar; use rand::{thread_rng, Rng}; - type LeafCircuit = - LeafMappingCircuit; - type LeafWires = LeafMappingWires; + type LeafCircuit = LeafMappingCircuit; + type LeafWires = LeafMappingWires; #[derive(Clone, Debug)] struct TestLeafMappingCircuit { @@ -285,9 +267,7 @@ mod tests { } } - fn test_circuit_for_storage_slot(mapping_key: Vec, storage_slot: StorageSlot) { - let rng = &mut thread_rng(); - + fn test_circuit_for_storage_slot(mapping_key: &[u8; 32], storage_slot: StorageSlot) { let (mut trie, _) = generate_random_storage_mpt::<3, MAPPING_LEAF_VALUE_LEN>(); let value = random_vector(MAPPING_LEAF_VALUE_LEN); let encoded_value: Vec = rlp::encode(&value).to_vec(); @@ -301,32 +281,47 @@ mod tests { let slot = storage_slot.slot(); let evm_word = storage_slot.evm_offset(); - let key_id = rng.gen(); - let metadata = - ColumnsMetadata::::sample(slot, evm_word); + // Compute the metadata digest. - let table_info = metadata.actual_table_info().to_vec(); - let extracted_column_identifiers = metadata.extracted_column_identifiers(); - let metadata_digest = compute_leaf_mapping_metadata_digest::< - TEST_MAX_COLUMNS, - TEST_MAX_FIELD_PER_EVM, - >(table_info.clone(), slot, key_id); - // Compute the values digest. - let values_digest = compute_leaf_mapping_values_digest::( - table_info, - &extracted_column_identifiers, - value.clone().try_into().unwrap(), - mapping_key.clone(), - evm_word, - key_id, + let table_metadata = TableMetadata::::sample( + true, + &[KEY_ID_PREFIX], + &[slot], + F::from_canonical_u32(evm_word), ); - let slot = MappingSlot::new(slot, mapping_key.clone()); - let c = LeafCircuit { + + let metadata_digest = table_metadata.digest(); + let (input_val_digest, row_unique_data) = table_metadata.input_value_digest(&[mapping_key]); + let extracted_val_digest = + table_metadata.extracted_value_digest(&value, &[slot], F::from_canonical_u32(evm_word)); + + let slot = MappingSlot::new(slot, mapping_key.to_vec()); + // row_id = H2int(row_unique_data || num_actual_columns) + let inputs = HashOut::from(row_unique_data) + .to_fields() + .into_iter() + .chain(once(F::from_canonical_usize( + table_metadata.num_actual_columns, + ))) + .collect_vec(); + let hash = H::hash_no_pad(&inputs); + let row_id = hash_to_int_value(hash); + + // values_digest = values_digest * row_id + let row_id = Scalar::from_noncanonical_biguint(row_id); + let values_digest = if evm_word == 0 { + (extracted_val_digest + input_val_digest) * row_id + } else { + extracted_val_digest * row_id + }; + + let c = LeafMappingCircuit:: { node: node.clone(), - slot, - key_id: F::from_canonical_u64(key_id), - metadata, + slot: slot.clone(), + metadata: table_metadata, + offset: evm_word, }; + let test_circuit = TestLeafMappingCircuit { c, exp_value: value.clone(), @@ -364,18 +359,20 @@ mod tests { #[test] fn test_values_extraction_leaf_mapping_variable() { - let mapping_key = random_vector(10); - let storage_slot = StorageSlot::Mapping(mapping_key.clone(), 2); + let rng = &mut thread_rng(); + let mapping_key: [u8; 32] = std::array::from_fn(|_| rng.gen()); + let storage_slot = StorageSlot::Mapping(mapping_key.to_vec(), 2); - test_circuit_for_storage_slot(mapping_key, storage_slot); + test_circuit_for_storage_slot(&mapping_key, storage_slot); } #[test] fn test_values_extraction_leaf_mapping_struct() { - let mapping_key = random_vector(20); - let parent = StorageSlot::Mapping(mapping_key.clone(), 5); + let rng = &mut thread_rng(); + let mapping_key: [u8; 32] = std::array::from_fn(|_| rng.gen()); + let parent = StorageSlot::Mapping(mapping_key.to_vec(), 5); let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct(parent, 20)); - test_circuit_for_storage_slot(mapping_key, storage_slot); + test_circuit_for_storage_slot(&mapping_key, storage_slot); } } diff --git a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs index b05797d53..7bd8ecef4 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs @@ -3,26 +3,22 @@ //! outer key, while the key for the mapping stored in the entry mapping is referred to as inner key. use crate::values_extraction::{ - gadgets::{ - column_gadget::ColumnGadget, - metadata_gadget::{ColumnsMetadata, MetadataTarget}, - }, + gadgets::metadata_gadget::{TableMetadataGadget, TableMetadataTarget}, public_inputs::{PublicInputs, PublicInputsArgs}, INNER_KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }; use anyhow::Result; use itertools::Itertools; use mp2_common::{ - array::{Array, Vector, VectorWire}, + array::{Array, Targetable, Vector, VectorWire, L32}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires}, - mpt_sequential::{ - utils::left_pad_leaf_value, MPTLeafOrExtensionNode, MAX_LEAF_VALUE_LEN, PAD_LEN, - }, + mpt_sequential::{utils::left_pad_leaf_value, MPTLeafOrExtensionNode, PAD_LEN}, poseidon::hash_to_int_target, public_inputs::PublicInputCommon, storage_key::{MappingOfMappingsSlotWires, MappingSlot}, types::{CBuilder, GFp, MAPPING_LEAF_VALUE_LEN}, + u256::UInt256Target, utils::{Endianness, ToTargets}, CHasher, D, F, }; @@ -34,162 +30,127 @@ use plonky2::{ }, plonk::proof::ProofWithPublicInputsTarget, }; +use plonky2_crypto::u32::arithmetic_u32::U32Target; use plonky2_ecdsa::gadgets::nonnative::CircuitBuilderNonNative; -use plonky2_ecgfp5::gadgets::curve::CircuitBuilderEcGFp5; +use plonky2_ecgfp5::{curve::scalar_field::Scalar, gadgets::curve::CircuitBuilderEcGFp5}; use recursion_framework::circuit_builder::CircuitLogicWires; use serde::{Deserialize, Serialize}; -use std::{iter, iter::once}; +use std::iter::once; -use super::gadgets::metadata_gadget::MetadataGadget; +use super::gadgets::metadata_gadget::TableMetadata; #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct LeafMappingOfMappingsWires< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, -> where - [(); PAD_LEN(NODE_LEN)]:, +pub struct LeafMappingOfMappingsWires +where + [(); MAX_COLUMNS - 2]:, { /// Full node from the MPT proof - pub(crate) node: VectorWire, + pub(crate) node: VectorWire, /// Leaf value - pub(crate) value: Array, + pub(crate) value: Array, /// MPT root - pub(crate) root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + pub(crate) root: KeccakWires<{ PAD_LEN(69) }>, /// Mapping slot associating wires including outer and inner mapping keys pub(crate) slot: MappingOfMappingsSlotWires, - /// Identifier of the column of the table storing the outer key of the current mapping entry - pub(crate) outer_key_id: Target, - /// Identifier of the column of the table storing the inner key of the indexed mapping entry - pub(crate) inner_key_id: Target, /// MPT metadata - metadata: MetadataTarget, + metadata: TableMetadataTarget, + offset: Target, } /// Circuit to prove the correct derivation of the MPT key from mappings where /// the value stored in each mapping entry is another mapping #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct LeafMappingOfMappingsCircuit< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, -> where - [(); PAD_LEN(NODE_LEN)]:, +pub struct LeafMappingOfMappingsCircuit +where + [(); MAX_COLUMNS - 2]:, { pub(crate) node: Vec, pub(crate) slot: MappingSlot, pub(crate) inner_key: Vec, - pub(crate) outer_key_id: F, - pub(crate) inner_key_id: F, - pub(crate) metadata: ColumnsMetadata, + pub(crate) metadata: TableMetadata, + pub(crate) evm_word: u8, } -impl - LeafMappingOfMappingsCircuit +impl LeafMappingOfMappingsCircuit where - [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, { - pub fn build( - b: &mut CBuilder, - ) -> LeafMappingOfMappingsWires { + pub fn build(b: &mut CBuilder) -> LeafMappingOfMappingsWires { + let offset = b.add_virtual_target(); + let metadata = TableMetadataGadget::::build(b); + let slot = MappingSlot::build_mapping_of_mappings(b, offset); + let zero = b.zero(); - let two = b.two(); - let [outer_key_id, inner_key_id] = b.add_virtual_target_arr(); - let metadata = MetadataGadget::build(b); - let slot = MappingSlot::build_mapping_of_mappings(b, metadata.evm_word); + let key_input_no_offset = slot + .keccak_mpt + .base + .keccak_location + .output + .pack(b, Endianness::Big); + let key_input_with_offset = slot.keccak_mpt.location_bytes.pack(b, Endianness::Big); + + let u256_no_off = + UInt256Target::new_from_be_limbs(key_input_no_offset.arr.as_slice()).unwrap(); + let u256_loc = + UInt256Target::new_from_be_limbs(key_input_with_offset.arr.as_slice()).unwrap(); // Build the node wires. - let wires = - MPTLeafOrExtensionNode::build_and_advance_key::<_, D, NODE_LEN, MAX_LEAF_VALUE_LEN>( - b, - &slot.keccak_mpt.base.mpt_key, - ); - let node = wires.node; + let wires = MPTLeafOrExtensionNode::build_and_advance_key::<_, D, 69, 33>( + b, + &slot.keccak_mpt.base.mpt_key, + ); + let node: VectorWire = wires.node; let root = wires.root; // Left pad the leaf value. - let value: Array = left_pad_leaf_value(b, &wires.value); - - // Compute the metadata digest and number of actual columns. - let (metadata_digest, num_actual_columns) = metadata.digest_info(b, slot.mapping_slot); - // Add inner key and outer key columns to the number of actual columns. - let num_actual_columns = b.add(num_actual_columns, two); - - // Compute the outer and inner key metadata digests. - let [outer_key_digest, inner_key_digest] = [ - (OUTER_KEY_ID_PREFIX, outer_key_id), - (INNER_KEY_ID_PREFIX, inner_key_id), - ] - .map(|(prefix, key_id)| { - let prefix = b.constant(F::from_canonical_u64(u64::from_be_bytes( - prefix.try_into().unwrap(), - ))); - - // key_column_md = H(KEY_ID_PREFIX || slot) - let inputs = vec![prefix, slot.mapping_slot]; - let key_column_md = b.hash_n_to_hash_no_pad::(inputs); - - // key_digest = D(key_column_md || key_id) - let inputs = key_column_md - .to_targets() - .into_iter() - .chain(once(key_id)) - .collect_vec(); - b.map_to_curve_point(&inputs) - }); + let value: Array = left_pad_leaf_value(b, &wires.value); + + // Compute the metadata digest and the value digest + let input_values: [Array; 2] = [&slot.outer_key, &slot.inner_key] + .map(|key| Array::::pack(key, b, Endianness::Big)); + + let (input_metadata_digest, input_value_digest) = metadata.inputs_digests(b, &input_values); + let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests( + b, + &value, + &u256_no_off, + &u256_loc, + &[zero, zero, zero, zero, zero, zero, zero, slot.mapping_slot], + ); - // Add the outer and inner key digests into the metadata digest. - // metadata_digest += outer_key_digest + inner_key_digest let metadata_digest = - b.add_curve_point(&[metadata_digest, inner_key_digest, outer_key_digest]); + b.add_curve_point(&[input_metadata_digest, extracted_metadata_digest]); - // Compute the values digest. - let values_digest = ColumnGadget::::new( - &value.arr, - &metadata.table_info[..MAX_FIELD_PER_EVM], - &metadata.is_extracted_columns[..MAX_FIELD_PER_EVM], - ) - .build(b); - - // Compute the outer and inner key values digests. + let input_selector = b.is_equal(zero, offset); let curve_zero = b.curve_zero(); - let [packed_outer_key, packed_inner_key] = - [&slot.outer_key, &slot.inner_key].map(|key| key.pack(b, Endianness::Big).to_targets()); - let is_evm_word_zero = b.is_equal(metadata.evm_word, zero); - let [outer_key_digest, inner_key_digest] = [ - (outer_key_id, packed_outer_key.clone()), - (inner_key_id, packed_inner_key.clone()), - ] - .map(|(key_id, packed_key)| { - // D(key_id || pack(key)) - let inputs = iter::once(key_id).chain(packed_key).collect_vec(); - let key_digest = b.map_to_curve_point(&inputs); - // key_digest = evm_word == 0 ? key_digset : CURVE_ZERO - b.curve_select(is_evm_word_zero, key_digest, curve_zero) - }); - // values_digest += outer_key_digest + inner_key_digest - let values_digest = b.add_curve_point(&[values_digest, inner_key_digest, outer_key_digest]); + let input_value_digest = b.curve_select(input_selector, input_value_digest, curve_zero); + let value_digest = b.add_curve_point(&[input_value_digest, extracted_value_digest]); // Compute the unique data to identify a row is the mapping key: // row_unique_data = H(outer_key || inner_key) - let inputs = packed_outer_key - .into_iter() - .chain(packed_inner_key) - .collect(); + let inputs = input_values + .iter() + .flat_map(|arr| { + arr.arr + .iter() + .map(|t| t.to_target()) + .collect::>() + }) + .collect::>(); let row_unique_data = b.hash_n_to_hash_no_pad::(inputs); // row_id = H2int(row_unique_data || num_actual_columns) let inputs = row_unique_data .to_targets() .into_iter() - .chain(once(num_actual_columns)) + .chain(once(metadata.num_actual_columns)) .collect(); let hash = b.hash_n_to_hash_no_pad::(inputs); let row_id = hash_to_int_target(b, hash); // values_digest = values_digest * row_id let row_id = b.biguint_to_nonnative(&row_id); - let values_digest = b.curve_scalar_mul(values_digest, &row_id); + let values_digest = b.curve_scalar_mul(value_digest, &row_id); // Only one leaf in this node. let n = b.one(); @@ -209,46 +170,44 @@ where value, root, slot, - outer_key_id, - inner_key_id, metadata, + offset, } } pub fn assign( &self, pw: &mut PartialWitness, - wires: &LeafMappingOfMappingsWires, + wires: &LeafMappingOfMappingsWires, ) { let padded_node = - Vector::::from_vec(&self.node).expect("Invalid node"); + Vector::::from_vec(&self.node).expect("Invalid node"); wires.node.assign(pw, &padded_node); - KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::assign( + KeccakCircuit::<{ PAD_LEN(69) }>::assign( pw, &wires.root, &InputData::Assigned(&padded_node), ); - pw.set_target(wires.outer_key_id, self.outer_key_id); - pw.set_target(wires.inner_key_id, self.inner_key_id); + self.slot.assign_mapping_of_mappings( pw, &wires.slot, &self.inner_key, - self.metadata.evm_word, + self.evm_word as u32, ); - MetadataGadget::assign(pw, &self.metadata, &wires.metadata); + TableMetadataGadget::::assign(pw, &self.metadata, &wires.metadata); + pw.set_target(wires.offset, F::from_canonical_u8(self.evm_word)); } } /// Num of children = 0 -impl - CircuitLogicWires - for LeafMappingOfMappingsWires +impl CircuitLogicWires + for LeafMappingOfMappingsWires where - [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, { type CircuitBuilderParams = (); - type Inputs = LeafMappingOfMappingsCircuit; + type Inputs = LeafMappingOfMappingsCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; @@ -269,21 +228,15 @@ where #[cfg(test)] mod tests { use super::*; - use crate::{ - tests::{TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM}, - values_extraction::{ - compute_leaf_mapping_of_mappings_metadata_digest, - compute_leaf_mapping_of_mappings_values_digest, - }, - MAX_LEAF_NODE_LEN, - }; + use crate::tests::TEST_MAX_COLUMNS; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, eth::{StorageSlot, StorageSlotNode}, mpt_sequential::utils::bytes_to_nibbles, + poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, - utils::{keccak256, Endianness, Packer}, + utils::{keccak256, Endianness, Packer, ToFields}, C, D, F, }; use mp2_test::{ @@ -293,28 +246,28 @@ mod tests { }; use plonky2::{ field::types::Field, + hash::hash_types::HashOut, iop::{target::Target, witness::PartialWitness}, + plonk::config::Hasher, }; use rand::{thread_rng, Rng}; use std::array; - type LeafCircuit = - LeafMappingOfMappingsCircuit; - type LeafWires = - LeafMappingOfMappingsWires; - #[derive(Clone, Debug)] - struct TestLeafMappingOfMappingsCircuit { - c: LeafCircuit, + struct TestNewLeafMappingOfMappingsCircuit { + c: LeafMappingOfMappingsCircuit, exp_value: Vec, } - impl UserCircuit for TestLeafMappingOfMappingsCircuit { + impl UserCircuit for TestNewLeafMappingOfMappingsCircuit { // Leaf wires + expected extracted value - type Wires = (LeafWires, Array); + type Wires = ( + LeafMappingOfMappingsWires, + Array, + ); fn build(b: &mut CBuilder) -> Self::Wires { - let leaf_wires = LeafCircuit::build(b); + let leaf_wires = LeafMappingOfMappingsCircuit::::build(b); let exp_value = Array::::new(b); leaf_wires.value.enforce_equal(b, &exp_value); @@ -330,12 +283,10 @@ mod tests { } fn test_circuit_for_storage_slot( - outer_key: Vec, - inner_key: Vec, + outer_key: &[u8; 32], + inner_key: &[u8; 32], storage_slot: StorageSlot, ) { - let rng = &mut thread_rng(); - let (mut trie, _) = generate_random_storage_mpt::<3, MAPPING_LEAF_VALUE_LEN>(); let value = random_vector(MAPPING_LEAF_VALUE_LEN); let encoded_value: Vec = rlp::encode(&value).to_vec(); @@ -349,40 +300,55 @@ mod tests { let slot = storage_slot.slot(); let evm_word = storage_slot.evm_offset(); - let [outer_key_id, inner_key_id] = array::from_fn(|_| rng.gen()); - let metadata = - ColumnsMetadata::::sample(slot, evm_word); // Compute the metadata digest. - let table_info = metadata.actual_table_info().to_vec(); - let extracted_column_identifiers = metadata.extracted_column_identifiers(); - let metadata_digest = compute_leaf_mapping_of_mappings_metadata_digest::< - TEST_MAX_COLUMNS, - TEST_MAX_FIELD_PER_EVM, - >(table_info.clone(), slot, outer_key_id, inner_key_id); - // Compute the values digest. - let values_digest = compute_leaf_mapping_of_mappings_values_digest::( - table_info, - &extracted_column_identifiers, - value.clone().try_into().unwrap(), - evm_word, - (outer_key.clone(), outer_key_id), - (inner_key.clone(), inner_key_id), + let table_metadata = TableMetadata::::sample( + true, + &[OUTER_KEY_ID_PREFIX, INNER_KEY_ID_PREFIX], + &[slot], + F::from_canonical_u32(evm_word), ); - let slot = MappingSlot::new(slot, outer_key.clone()); - let c = LeafCircuit { + + let metadata_digest = table_metadata.digest(); + let (input_val_digest, row_unique_data) = + table_metadata.input_value_digest(&[outer_key, inner_key]); + let extracted_val_digest = + table_metadata.extracted_value_digest(&value, &[slot], F::from_canonical_u32(evm_word)); + + // row_id = H2int(row_unique_data || num_actual_columns) + let inputs = HashOut::from(row_unique_data) + .to_fields() + .into_iter() + .chain(once(F::from_canonical_usize( + table_metadata.num_actual_columns, + ))) + .collect_vec(); + let hash = H::hash_no_pad(&inputs); + let row_id = hash_to_int_value(hash); + + // values_digest = values_digest * row_id + let row_id = Scalar::from_noncanonical_biguint(row_id); + let values_digest = if evm_word == 0 { + (extracted_val_digest + input_val_digest) * row_id + } else { + extracted_val_digest * row_id + }; + + let slot = MappingSlot::new(slot, outer_key.to_vec()); + + let new_c = LeafMappingOfMappingsCircuit:: { node: node.clone(), - slot, - inner_key: inner_key.clone(), - outer_key_id: F::from_canonical_u64(outer_key_id), - inner_key_id: F::from_canonical_u64(inner_key_id), - metadata, + slot: slot.clone(), + inner_key: inner_key.to_vec(), + metadata: table_metadata, + evm_word: evm_word as u8, }; - let test_circuit = TestLeafMappingOfMappingsCircuit { - c, + + let new_test_circuit = TestNewLeafMappingOfMappingsCircuit { + c: new_c, exp_value: value.clone(), }; - let proof = run_circuit::(test_circuit); + let proof = run_circuit::(new_test_circuit); let pi = PublicInputs::new(&proof.public_inputs); // Check root hash { @@ -408,30 +374,33 @@ mod tests { assert_eq!(pi.n(), F::ONE); // Check metadata digest assert_eq!(pi.metadata_digest(), metadata_digest.to_weierstrass()); + // Check values digest assert_eq!(pi.values_digest(), values_digest.to_weierstrass()); } #[test] fn test_values_extraction_leaf_mapping_of_mappings_variable() { - let outer_key = random_vector(10); - let inner_key = random_vector(20); - let parent = StorageSlot::Mapping(outer_key.clone(), 2); + let rng = &mut thread_rng(); + let outer_key: [u8; 32] = array::from_fn(|_| rng.gen()); + let inner_key: [u8; 32] = array::from_fn(|_| rng.gen()); + let parent = StorageSlot::Mapping(outer_key.to_vec(), 2); let storage_slot = - StorageSlot::Node(StorageSlotNode::new_mapping(parent, inner_key.clone()).unwrap()); + StorageSlot::Node(StorageSlotNode::new_mapping(parent, inner_key.to_vec()).unwrap()); - test_circuit_for_storage_slot(outer_key, inner_key, storage_slot); + test_circuit_for_storage_slot(&outer_key, &inner_key, storage_slot); } #[test] fn test_values_extraction_leaf_mapping_of_mappings_struct() { - let outer_key = random_vector(10); - let inner_key = random_vector(20); - let grand = StorageSlot::Mapping(outer_key.clone(), 2); + let rng = &mut thread_rng(); + let outer_key: [u8; 32] = array::from_fn(|_| rng.gen()); + let inner_key: [u8; 32] = array::from_fn(|_| rng.gen()); + let grand = StorageSlot::Mapping(outer_key.to_vec(), 2); let parent = - StorageSlot::Node(StorageSlotNode::new_mapping(grand, inner_key.clone()).unwrap()); + StorageSlot::Node(StorageSlotNode::new_mapping(grand, inner_key.to_vec()).unwrap()); let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct(parent, 30)); - test_circuit_for_storage_slot(outer_key, inner_key, storage_slot); + test_circuit_for_storage_slot(&outer_key, &inner_key, storage_slot); } } diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index 2d815bc4a..b3eb60309 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -1,10 +1,8 @@ //! Module handling the leaf node inside a Receipt Trie -use crate::MAX_RECEIPT_LEAF_NODE_LEN; - use super::{ + gadgets::metadata_gadget::{TableMetadata, TableMetadataGadget, TableMetadataTarget}, public_inputs::{PublicInputs, PublicInputsArgs}, - DATA_PREFIX, GAS_USED_PREFIX, LOG_NUMBER_PREFIX, TOPIC_PREFIX, TX_INDEX_PREFIX, }; use alloy::{ @@ -13,17 +11,17 @@ use alloy::{ }; use anyhow::{anyhow, Result}; use mp2_common::{ - array::{Array, Vector, VectorWire}, + array::{Array, Targetable, Vector, VectorWire}, eth::{EventLogInfo, ReceiptProofInfo, ReceiptQuery}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN}, mpt_sequential::{MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, - poseidon::H, + poseidon::{hash_to_int_target, H}, public_inputs::PublicInputCommon, rlp::MAX_KEY_NIBBLE_LEN, types::{CBuilder, GFp}, - utils::{less_than, less_than_or_equal_to_unsafe, Endianness, PackerTarget, ToTargets}, - D, F, + utils::{less_than, less_than_or_equal_to_unsafe, Endianness, ToTargets}, + CHasher, D, F, }; use plonky2::{ field::types::Field, @@ -34,32 +32,24 @@ use plonky2::{ plonk::{circuit_builder::CircuitBuilder, config::Hasher}, }; -use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; +use plonky2_crypto::u32::arithmetic_u32::{CircuitBuilderU32, U32Target}; +use plonky2_ecdsa::gadgets::nonnative::CircuitBuilderNonNative; +use plonky2_ecgfp5::gadgets::curve::CircuitBuilderEcGFp5; use recursion_framework::circuit_builder::CircuitLogicWires; use rlp::Encodable; use serde::{Deserialize, Serialize}; -use std::{array::from_fn, iter}; -/// Maximum number of logs per transaction we can process -const MAX_LOGS_PER_TX: usize = 1; +use std::iter; /// The number of bytes that `gas_used` could take up in the receipt. /// We set a max of 3 here because this would be over half the gas in the block for Ethereum. const MAX_GAS_SIZE: u64 = 3; -/// The size of a topic in bytes in the rlp encoded receipt -const TOPICS_SIZE: usize = 32; - -/// The maximum number of topics that aren't the event signature. -const MAX_TOPICS: usize = 3; - -/// The maximum number of additional pieces of data we allow in an event (each being 32 bytes long). -const MAX_ADDITIONAL_DATA: usize = 2; - #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct ReceiptLeafWires +pub struct ReceiptLeafWires where [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, { /// The event we are monitoring for pub event: EventWires, @@ -70,15 +60,11 @@ where /// The index of this receipt in the block pub index: Target, /// The offsets of the relevant logs inside the node - pub relevant_logs_offset: VectorWire, + pub relevant_log_offset: Target, /// The key in the MPT Trie pub mpt_key: MPTKeyWire, - /// The column ID for the transaction index - pub tx_index_column_id: Target, - /// The column ID for the log number in the receipt - pub log_number_column_id: Target, - /// The gas used column ID - pub gas_used_column_id: Target, + /// The table metadata + pub(crate) metadata: TableMetadataTarget, } /// Contains all the information for an [`Event`] in rlp form @@ -94,193 +80,14 @@ pub struct EventWires { event_signature: Array, /// Byte offset from the start of the log to event signature sig_rel_offset: Target, - /// The topics for this Log - topics: [LogColumn; MAX_TOPICS], - /// The extra data stored by this Log - data: [LogColumn; MAX_ADDITIONAL_DATA], -} - -/// Contains all the information for a [`Log`] in rlp form -#[derive(Debug, Clone, Serialize, Deserialize, Copy, PartialEq, Eq)] -pub struct LogColumn { - column_id: Target, - /// The byte offset from the beggining of the log to this target - rel_byte_offset: Target, - /// The length of this topic/data - len: Target, -} - -impl LogColumn { - /// Assigns a log colum from a [`LogDataInfo`] - pub fn assign(&self, pw: &mut PartialWitness, info: &LogDataInfo) { - pw.set_target(self.column_id, info.column_id); - pw.set_target( - self.rel_byte_offset, - F::from_canonical_usize(info.rel_byte_offset), - ); - pw.set_target(self.len, F::from_canonical_usize(info.len)); - } -} - -impl EventWires { - /// Convert to an array for metadata digest - pub fn to_vec(&self) -> Vec { - let mut out = Vec::new(); - out.push(self.size); - out.extend_from_slice(&self.address.arr); - out.push(self.add_rel_offset); - out.extend_from_slice(&self.event_signature.arr); - out.push(self.sig_rel_offset); - - out - } - - #[allow(clippy::too_many_arguments)] - pub fn verify_logs_and_extract_values( - &self, - b: &mut CBuilder, - value: &VectorWire, - relevant_logs_offsets: &VectorWire, - tx_index: Target, - tx_index_column_id: Target, - log_number_column_id: Target, - gas_used_column_id: Target, - ) -> (Target, CurveTarget) { - let t = b._true(); - let one = b.one(); - let two = b.two(); - let zero = b.zero(); - let curve_zero = b.curve_zero(); - let mut row_points = Vec::new(); - - // Extract the gas used in the transaction, since the position of this can vary because it is after the key - // we have to prove we extracted from the correct location. - let header_len_len = b.add_const( - value.arr[0], - F::from_canonical_u64(1) - F::from_canonical_u64(247), - ); - let key_header = value.arr.random_access_large_array(b, header_len_len); - let less_than_val = b.constant(F::from_canonical_u8(128)); - let single_value = less_than(b, key_header, less_than_val, 8); - let key_len_maybe = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); - let key_len = b.select(single_value, one, key_len_maybe); - - // This is the start of the string that is the rlp encoded receipt (a string since the first element is transaction type). - // From here we subtract 183 to get the length of the length, then the encoded gas used is at length of length + 1 (for tx type) + (1 + list length) - // + 1 (for status) + 1 to get the header for the gas used string. - let string_offset = b.add(key_len, header_len_len); - let string_header = value.arr.random_access_large_array(b, string_offset); - let string_len_len = b.add_const(string_header, -F::from_canonical_u64(183)); - - let list_offset = b.add_many([string_offset, string_len_len, two]); - let list_header = value.arr.random_access_large_array(b, list_offset); - - let gas_used_offset_lo = b.add_const( - list_header, - F::from_canonical_u64(2) - F::from_canonical_u64(247), - ); - let gas_used_offset = b.add(gas_used_offset_lo, list_offset); - - let gas_used_header = value.arr.random_access_large_array(b, gas_used_offset); - let gas_used_len = b.add_const(gas_used_header, -F::from_canonical_u64(128)); - - let initial_gas_index = b.add(gas_used_offset, one); - let final_gas_index = b.add(gas_used_offset, gas_used_len); - - let combiner = b.constant(F::from_canonical_u64(1 << 8)); - - let gas_used = (0..MAX_GAS_SIZE).fold(zero, |acc, i| { - let access_index = b.add_const(initial_gas_index, F::from_canonical_u64(i)); - let array_value = value.arr.random_access_large_array(b, access_index); - - // If we have extracted a value from an index in the desired range (so lte final_gas_index) we want to add it. - // If access_index was strictly less than final_gas_index we need to multiply by 1 << 8 after (since the encoding is big endian) - let valid = less_than_or_equal_to_unsafe(b, access_index, final_gas_index, 12); - let need_scalar = less_than(b, access_index, final_gas_index, 12); - - let to_add = b.select(valid, array_value, zero); - - let scalar = b.select(need_scalar, combiner, one); - let tmp = b.add(acc, to_add); - b.mul(tmp, scalar) - }); - - // Map the gas used to a curve point for the value digest, gas used is the first column so use one as its column id. - let gas_digest = b.map_to_curve_point(&[gas_used_column_id, gas_used]); - let tx_index_digest = b.map_to_curve_point(&[tx_index_column_id, tx_index]); - - let initial_row_digest = b.add_curve_point(&[gas_digest, tx_index_digest]); - // We also keep track of the number of real logs we process as each log forms a row in our table - let mut n = zero; - for (index, log_offset) in relevant_logs_offsets.arr.arr.into_iter().enumerate() { - let mut points = Vec::new(); - // Extract the address bytes - let address_start = b.add(log_offset, self.add_rel_offset); - - let address_bytes = value.arr.extract_array_large::<_, _, 20>(b, address_start); - - let address_check = address_bytes.equals(b, &self.address); - // Extract the signature bytes - let sig_start = b.add(log_offset, self.sig_rel_offset); - - let sig_bytes = value.arr.extract_array_large::<_, _, 32>(b, sig_start); - - let sig_check = sig_bytes.equals(b, &self.event_signature); - - // We check to see if the relevant log offset is zero (this indicates a dummy value) - let dummy = b.is_equal(log_offset, zero); - - let address_to_enforce = b.select(dummy, t.target, address_check.target); - let sig_to_enforce = b.select(dummy, t.target, sig_check.target); - - b.connect(t.target, address_to_enforce); - b.connect(t.target, sig_to_enforce); - - for &log_column in self.topics.iter().chain(self.data.iter()) { - let data_start = b.add(log_offset, log_column.rel_byte_offset); - // The data is always 32 bytes long - let data_bytes = value.arr.extract_array_large::<_, _, 32>(b, data_start); - - // Pack the data and get the digest - let packed_data = data_bytes.arr.pack(b, Endianness::Big); - - let data_digest = b.map_to_curve_point( - &std::iter::once(log_column.column_id) - .chain(packed_data) - .collect::>(), - ); - - // For each column we use the `column_id` field to tell if its a dummy or not, zero indicates a dummy. - let dummy_column = b.is_equal(log_column.column_id, zero); - - let selected_point = b.select_curve_point(dummy_column, curve_zero, data_digest); - - points.push(selected_point); - } - // If this is a real row we record the gas used in the transaction - points.push(initial_row_digest); - - // We also keep track of which log this is in the receipt to avoid having identical rows in the table in the case - // that the event we are tracking can be emitted multiple times in the same transaction but has no topics or data. - let log_number = b.constant(F::from_canonical_usize(index + 1)); - let log_no_digest = b.map_to_curve_point(&[log_number_column_id, log_number]); - points.push(log_no_digest); - - let increment = b.select(dummy, zero, one); - n = b.add(n, increment); - let row_point_sum = b.add_curve_point(&points); - let sum_digest = b.map_to_curve_point(&row_point_sum.to_targets()); - let point_to_add = b.select_curve_point(dummy, curve_zero, sum_digest); - row_points.push(point_to_add); - } - - (n, b.add_curve_point(&row_points)) - } } /// Circuit to prove a transaction receipt contains logs relating to a specific event. #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ReceiptLeafCircuit { +pub struct ReceiptLeafCircuit +where + [(); MAX_COLUMNS - 2]:, +{ /// This is the RLP encoded leaf node in the Receipt Trie. pub node: Vec, /// The transaction index, telling us where the receipt is in the block. The RLP encoding of the index @@ -294,14 +101,12 @@ pub struct ReceiptLeafCircuit { pub rel_add_offset: usize, /// The event signature hash pub event_signature: [u8; HASH_LEN], - /// The offset of the event signatur ein the rlp encoded log + /// The offset of the event signature in the rlp encoded log pub sig_rel_offset: usize, - /// The other topics information - pub topics: [LogDataInfo; MAX_TOPICS], - /// Any additional data that we will extract from the log - pub data: [LogDataInfo; MAX_ADDITIONAL_DATA], - /// This is the offsets in the node to the start of the logs that relate to `event_info` - pub relevant_logs_offset: Vec, + /// This is the offset in the node to the start of the log that relates to `event_info` + pub relevant_log_offset: usize, + /// The table metadata + pub metadata: TableMetadata, } /// Contains all the information for data contained in an [`Event`] @@ -315,15 +120,19 @@ pub struct LogDataInfo { pub len: usize, } -impl ReceiptLeafCircuit +impl ReceiptLeafCircuit where [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, { /// Create a new [`ReceiptLeafCircuit`] from a [`ReceiptProofInfo`] and a [`ReceiptQuery`] pub fn new( proof_info: &ReceiptProofInfo, query: &ReceiptQuery, - ) -> Result { + ) -> Result + where + [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA]:, + { // Since the compact encoding of the key is stored first plus an additional list header and // then the first element in the receipt body is the transaction type we calculate the offset to that point @@ -351,9 +160,9 @@ where let logs_offset = receipt_off + receipt_str_payload.header_len + 1 + logs_off; // Now we produce an iterator over the logs with each logs offset. - let relevant_logs_offset = iter::successors(Some(0usize), |i| Some(i + 1)) + let relevant_log_offset = iter::successors(Some(0usize), |i| Some(i + 1)) .map_while(|i| logs_rlp.at_with_offset(i).ok()) - .filter_map(|(log_rlp, log_off)| { + .find_map(|(log_rlp, log_off)| { let mut bytes = log_rlp.as_raw(); let log = Log::decode(&mut bytes).ok()?; @@ -368,8 +177,7 @@ where Some(0usize) } }) - .take(MAX_LOGS_PER_TX) - .collect::>(); + .ok_or(anyhow!("There were no relevant logs in this transaction"))?; let EventLogInfo:: { size, @@ -377,56 +185,11 @@ where add_rel_offset, event_signature, sig_rel_offset, - topics, - data, + .. } = query.event; - // We need a fixed number of topics for the circuit so we use dummies to pad to the correct length. - let mut final_topics = [LogDataInfo::default(); MAX_TOPICS]; - - final_topics.iter_mut().enumerate().for_each(|(j, topic)| { - if j < NO_TOPICS { - let input = [ - address.as_slice(), - event_signature.as_slice(), - TOPIC_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let column_id = H::hash_no_pad(&input).elements[0]; - *topic = LogDataInfo { - column_id, - rel_byte_offset: topics[j], - len: TOPICS_SIZE, - }; - } - }); - - // We need a fixed number of pieces of data for the circuit so we use dummies to pad to the correct length. - let mut final_data = [LogDataInfo::default(); MAX_ADDITIONAL_DATA]; - final_data.iter_mut().enumerate().for_each(|(j, d)| { - if j < MAX_DATA { - let input = [ - address.as_slice(), - event_signature.as_slice(), - DATA_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let column_id = H::hash_no_pad(&input).elements[0]; - *d = LogDataInfo { - column_id, - rel_byte_offset: data[j], - len: TOPICS_SIZE, - }; - }; - }); + // Construct the table metadata from the event + let metadata = TableMetadata::::from(query.event); Ok(Self { node: last_node.clone(), @@ -436,22 +199,25 @@ where rel_add_offset: add_rel_offset, event_signature, sig_rel_offset, - topics: final_topics, - data: final_data, - relevant_logs_offset, + relevant_log_offset, + metadata, }) } - pub fn build(b: &mut CBuilder) -> ReceiptLeafWires { + pub fn build(b: &mut CBuilder) -> ReceiptLeafWires { // Build the event wires let event_wires = Self::build_event_wires(b); - + // Build the metadata + let metadata = TableMetadataGadget::build(b); let zero = b.zero(); - let curve_zero = b.curve_zero(); + + let one = b.one(); + let two = b.two(); + let t = b._true(); // Add targets for the data specific to this receipt let index = b.add_virtual_target(); - let relevant_logs_offset = VectorWire::::new(b); + let relevant_log_offset = b.add_virtual_target(); let mpt_key = MPTKeyWire::new(b); @@ -460,46 +226,126 @@ where let node = wires.node; let root = wires.root; - // Add targets for the column ids for tx index, log number and gas used - let tx_index_column_id = b.add_virtual_target(); - let log_number_column_id = b.add_virtual_target(); - let gas_used_column_id = b.add_virtual_target(); - - // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for - let (n, dv) = event_wires.verify_logs_and_extract_values::( - b, - &node, - &relevant_logs_offset, - index, - tx_index_column_id, - log_number_column_id, - gas_used_column_id, + + // Extract the gas used in the transaction, since the position of this can vary because it is after the key + // we have to prove we extracted from the correct location. + let header_len_len = b.add_const( + node.arr.arr[0], + F::from_canonical_u64(1) - F::from_canonical_u64(247), ); + let key_header = node.arr.random_access_large_array(b, header_len_len); + let less_than_val = b.constant(F::from_canonical_u8(128)); + let single_value = less_than(b, key_header, less_than_val, 8); + let key_len_maybe = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); + let key_len = b.select(single_value, one, key_len_maybe); - let mut core_metadata = event_wires.to_vec(); - core_metadata.push(tx_index_column_id); - core_metadata.push(log_number_column_id); - core_metadata.push(gas_used_column_id); + // This is the start of the string that is the rlp encoded receipt (a string since the first element is transaction type). + // From here we subtract 183 to get the length of the length, then the encoded gas used is at length of length + 1 (for tx type) + (1 + list length) + // + 1 (for status) + 1 to get the header for the gas used string. + let string_offset = b.add(key_len, header_len_len); + let string_header = node.arr.random_access_large_array(b, string_offset); + let string_len_len = b.add_const(string_header, -F::from_canonical_u64(183)); + + let list_offset = b.add_many([string_offset, string_len_len, two]); + let list_header = node.arr.random_access_large_array(b, list_offset); + + let gas_used_offset_lo = b.add_const( + list_header, + F::from_canonical_u64(2) - F::from_canonical_u64(247), + ); + let gas_used_offset = b.add(gas_used_offset_lo, list_offset); - let initial_dm = b.map_to_curve_point(&core_metadata); + let gas_used_header = node.arr.random_access_large_array(b, gas_used_offset); + let gas_used_len = b.add_const(gas_used_header, -F::from_canonical_u64(128)); - let mut meta_data_points = vec![initial_dm]; + let initial_gas_index = b.add(gas_used_offset, one); + let final_gas_index = b.add(gas_used_offset, gas_used_len); - for topic in event_wires.topics.iter() { - let is_id_zero = b.is_equal(topic.column_id, zero); - let column_id_digest = b.map_one_to_curve_point(topic.column_id); - let selected = b.select_curve_point(is_id_zero, curve_zero, column_id_digest); - meta_data_points.push(selected); - } + let combiner = b.constant(F::from_canonical_u64(1 << 8)); - for data in event_wires.data.iter() { - let is_id_zero = b.is_equal(data.column_id, zero); - let column_id_digest = b.map_one_to_curve_point(data.column_id); - let selected = b.select_curve_point(is_id_zero, curve_zero, column_id_digest); - meta_data_points.push(selected); - } + let gas_used = (0..MAX_GAS_SIZE).fold(zero, |acc, i| { + let access_index = b.add_const(initial_gas_index, F::from_canonical_u64(i)); + let array_value = node.arr.random_access_large_array(b, access_index); + + // If we have extracted a value from an index in the desired range (so lte final_gas_index) we want to add it. + // If access_index was strictly less than final_gas_index we need to multiply by 1 << 8 after (since the encoding is big endian) + let valid = less_than_or_equal_to_unsafe(b, access_index, final_gas_index, 12); + let need_scalar = less_than(b, access_index, final_gas_index, 12); + + let to_add = b.select(valid, array_value, zero); - let dm = b.add_curve_point(&meta_data_points); + let scalar = b.select(need_scalar, combiner, one); + let tmp = b.add(acc, to_add); + b.mul(tmp, scalar) + }); + + let zero_u32 = b.zero_u32(); + let tx_index_input = Array::::from_array([ + zero_u32, + zero_u32, + zero_u32, + zero_u32, + zero_u32, + zero_u32, + zero_u32, + U32Target::from_target(index), + ]); + let gas_used_input = Array::::from_array([ + zero_u32, + zero_u32, + zero_u32, + zero_u32, + zero_u32, + zero_u32, + zero_u32, + U32Target::from_target(gas_used), + ]); + + // Extract input values + let (input_metadata_digest, input_value_digest) = + metadata.inputs_digests(b, &[tx_index_input.clone(), gas_used_input.clone()]); + // Now we verify extracted values + let (address_extract, signature_extract, extracted_metadata_digest, extracted_value_digest) = + metadata.extracted_receipt_digests( + b, + &node.arr, + relevant_log_offset, + event_wires.add_rel_offset, + event_wires.sig_rel_offset, + ); + + let address_check = address_extract.equals(b, &event_wires.address); + let sig_check = signature_extract.equals(b, &event_wires.event_signature); + + b.connect(t.target, address_check.target); + b.connect(t.target, sig_check.target); + + let dm = b.add_curve_point(&[input_metadata_digest, extracted_metadata_digest]); + + let value_digest = b.add_curve_point(&[input_value_digest, extracted_value_digest]); + + // Compute the unique data to identify a row is the mapping key. + // row_unique_data = H(tx_index || gas_used) + let row_unique_data = b.hash_n_to_hash_no_pad::( + tx_index_input + .arr + .iter() + .map(|t| t.to_target()) + .chain(gas_used_input.arr.iter().map(|t| t.to_target())) + .collect::>(), + ); + // row_id = H2int(row_unique_data || num_actual_columns) + let inputs = row_unique_data + .to_targets() + .into_iter() + .chain(std::iter::once(metadata.num_actual_columns)) + .collect(); + let hash = b.hash_n_to_hash_no_pad::(inputs); + let row_id = hash_to_int_target(b, hash); + + // values_digest = values_digest * row_id + let row_id = b.biguint_to_nonnative(&row_id); + let dv = b.curve_scalar_mul(value_digest, &row_id); // Register the public inputs PublicInputsArgs { @@ -507,7 +353,7 @@ where k: &wires.key, dv, dm, - n, + n: one, } .register_args(b); @@ -516,11 +362,9 @@ where node, root, index, - relevant_logs_offset, + relevant_log_offset, mpt_key, - tx_index_column_id, - log_number_column_id, - gas_used_column_id, + metadata, } } @@ -539,36 +383,20 @@ where // Signature relative offset let sig_rel_offset = b.add_virtual_target(); - // topics - let topics: [LogColumn; 3] = from_fn(|_| Self::build_log_column(b)); - - // data - let data: [LogColumn; 2] = from_fn(|_| Self::build_log_column(b)); - EventWires { size, address, add_rel_offset, event_signature, sig_rel_offset, - topics, - data, - } - } - - fn build_log_column(b: &mut CBuilder) -> LogColumn { - let column_id = b.add_virtual_target(); - let rel_byte_offset = b.add_virtual_target(); - let len = b.add_virtual_target(); - - LogColumn { - column_id, - rel_byte_offset, - len, } } - pub fn assign(&self, pw: &mut PartialWitness, wires: &ReceiptLeafWires) { + pub fn assign( + &self, + pw: &mut PartialWitness, + wires: &ReceiptLeafWires, + ) { self.assign_event_wires(pw, &wires.event); let pad_node = @@ -581,11 +409,10 @@ where ); pw.set_target(wires.index, GFp::from_canonical_u64(self.tx_index)); - let relevant_logs_vector = - Vector::::from_vec(&self.relevant_logs_offset) - .expect("Could not assign relevant logs offsets"); - wires.relevant_logs_offset.assign(pw, &relevant_logs_vector); - + pw.set_target( + wires.relevant_log_offset, + GFp::from_canonical_usize(self.relevant_log_offset), + ); let key_encoded = self.tx_index.rlp_bytes(); let key_nibbles: [u8; MAX_KEY_NIBBLE_LEN] = key_encoded .iter() @@ -598,43 +425,7 @@ where wires.mpt_key.assign(pw, &key_nibbles, key_encoded.len()); - // Work out the column ids for tx_index, log_number and gas_used - let tx_index_input = [ - self.address.as_slice(), - self.event_signature.as_slice(), - TX_INDEX_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let tx_index_column_id = H::hash_no_pad(&tx_index_input).elements[0]; - - let log_number_input = [ - self.address.as_slice(), - self.event_signature.as_slice(), - LOG_NUMBER_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let log_number_column_id = H::hash_no_pad(&log_number_input).elements[0]; - - let gas_used_input = [ - self.address.as_slice(), - self.event_signature.as_slice(), - GAS_USED_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; - - pw.set_target(wires.tx_index_column_id, tx_index_column_id); - pw.set_target(wires.log_number_column_id, log_number_column_id); - pw.set_target(wires.gas_used_column_id, gas_used_column_id); + TableMetadataGadget::::assign(pw, &self.metadata, &wires.metadata); } pub fn assign_event_wires(&self, pw: &mut PartialWitness, wires: &EventWires) { @@ -657,28 +448,19 @@ where wires.sig_rel_offset, F::from_canonical_usize(self.sig_rel_offset), ); - - wires - .topics - .iter() - .zip(self.topics.iter()) - .for_each(|(topic_wire, topic_info)| topic_wire.assign(pw, topic_info)); - wires - .data - .iter() - .zip(self.data.iter()) - .for_each(|(data_wire, data_info)| data_wire.assign(pw, data_info)); } } /// Num of children = 0 -impl CircuitLogicWires for ReceiptLeafWires +impl CircuitLogicWires + for ReceiptLeafWires where [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 2]:, { type CircuitBuilderParams = (); - type Inputs = ReceiptLeafCircuit; + type Inputs = ReceiptLeafCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; @@ -702,27 +484,24 @@ where #[cfg(test)] mod tests { - use crate::values_extraction::{ - compute_receipt_leaf_metadata_digest, compute_receipt_leaf_value_digest, - }; - use super::{ - //super::{compute_receipt_leaf_metadata_digest, compute_receipt_leaf_value_digest}, - *, - }; + use super::*; use mp2_common::{ - utils::{keccak256, Packer}, + eth::left_pad32, + poseidon::hash_to_int_value, + utils::{keccak256, Packer, ToFields}, C, }; use mp2_test::{ circuit::{run_circuit, UserCircuit}, mpt_sequential::generate_receipt_test_info, }; - + use plonky2::hash::hash_types::HashOut; + use plonky2_ecgfp5::curve::scalar_field::Scalar; #[derive(Clone, Debug)] struct TestReceiptLeafCircuit { - c: ReceiptLeafCircuit, + c: ReceiptLeafCircuit, } impl UserCircuit for TestReceiptLeafCircuit @@ -730,10 +509,10 @@ mod tests { [(); PAD_LEN(NODE_LEN)]:, { // Leaf wires + expected extracted value - type Wires = ReceiptLeafWires; + type Wires = ReceiptLeafWires; fn build(b: &mut CircuitBuilder) -> Self::Wires { - ReceiptLeafCircuit::::build(b) + ReceiptLeafCircuit::::build(b) } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { @@ -757,17 +536,34 @@ mod tests { >() where [(); PAD_LEN(NODE_LEN)]:, + [(); 7 - 2 - NO_TOPICS - MAX_DATA]:, { let receipt_proof_infos = generate_receipt_test_info::(); let proofs = receipt_proof_infos.proofs(); let info = proofs.first().unwrap(); let query = receipt_proof_infos.query(); - let c = ReceiptLeafCircuit::::new::(info, query).unwrap(); + let c = ReceiptLeafCircuit::::new::(info, query).unwrap(); + let metadata = c.metadata.clone(); let test_circuit = TestReceiptLeafCircuit { c }; let node = info.mpt_proof.last().unwrap().clone(); + let mut tx_index_input = [0u8; 32]; + tx_index_input[31] = info.tx_index as u8; + + let node_rlp = rlp::Rlp::new(&node); + // The actual receipt data is item 1 in the list + let receipt_rlp = node_rlp.at(1).unwrap(); + + // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` + let receipt_list = rlp::Rlp::new(&receipt_rlp.data().unwrap()[1..]); + + // The logs themselves start are the item at index 3 in this list + let gas_used_rlp = receipt_list.at(1).unwrap(); + + let gas_used_bytes = left_pad32(gas_used_rlp.data().unwrap()); + assert!(node.len() <= NODE_LEN); let proof = run_circuit::(test_circuit); let pi = PublicInputs::new(&proof.public_inputs); @@ -780,13 +576,33 @@ mod tests { // Check value digest { - let exp_digest = compute_receipt_leaf_value_digest(&proofs[0], &query.event); + let (input_d, row_unique_data) = + metadata.input_value_digest(&[&tx_index_input, &gas_used_bytes]); + let extracted_vd = metadata.extracted_receipt_value_digest(&node, &query.event); + + let total = input_d + extracted_vd; + + // row_id = H2int(row_unique_data || num_actual_columns) + let inputs = HashOut::from(row_unique_data) + .to_fields() + .into_iter() + .chain(std::iter::once(GFp::from_canonical_usize( + metadata.num_actual_columns, + ))) + .collect::>(); + let hash = H::hash_no_pad(&inputs); + let row_id = hash_to_int_value(hash); + + // values_digest = values_digest * row_id + let row_id = Scalar::from_noncanonical_biguint(row_id); + + let exp_digest = total * row_id; assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); } // Check metadata digest { - let exp_digest = compute_receipt_leaf_metadata_digest(&query.event); + let exp_digest = metadata.digest(); assert_eq!(pi.metadata_digest(), exp_digest.to_weierstrass()); } } diff --git a/mp2-v1/src/values_extraction/leaf_single.rs b/mp2-v1/src/values_extraction/leaf_single.rs index 9a7959f86..e8552bc28 100644 --- a/mp2-v1/src/values_extraction/leaf_single.rs +++ b/mp2-v1/src/values_extraction/leaf_single.rs @@ -1,10 +1,7 @@ //! Module handling the single variable inside a storage trie use crate::values_extraction::{ - gadgets::{ - column_gadget::ColumnGadget, - metadata_gadget::{ColumnsMetadata, MetadataTarget}, - }, + gadgets::metadata_gadget::{TableMetadata, TableMetadataGadget, TableMetadataTarget}, public_inputs::{PublicInputs, PublicInputsArgs}, }; use anyhow::Result; @@ -18,98 +15,104 @@ use mp2_common::{ public_inputs::PublicInputCommon, storage_key::{SimpleSlot, SimpleStructSlotWires}, types::{CBuilder, GFp, MAPPING_LEAF_VALUE_LEN}, - utils::ToTargets, + u256::UInt256Target, + utils::{Endianness, ToTargets}, CHasher, D, F, }; use plonky2::{ - iop::{target::Target, witness::PartialWitness}, + field::types::Field, + iop::{ + target::Target, + witness::{PartialWitness, WitnessWrite}, + }, plonk::proof::ProofWithPublicInputsTarget, }; + use plonky2_ecdsa::gadgets::nonnative::CircuitBuilderNonNative; use plonky2_ecgfp5::gadgets::curve::CircuitBuilderEcGFp5; use recursion_framework::circuit_builder::CircuitLogicWires; use serde::{Deserialize, Serialize}; use std::iter::once; -use super::gadgets::metadata_gadget::MetadataGadget; - #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct LeafSingleWires< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, -> where - [(); PAD_LEN(NODE_LEN)]:, +pub struct LeafSingleWires +where + [(); MAX_COLUMNS - 0]:, { /// Full node from the MPT proof - node: VectorWire, + node: VectorWire, /// Leaf value - value: Array, + value: Array, /// MPT root - root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + root: KeccakWires<{ PAD_LEN(69) }>, /// Storage single variable slot slot: SimpleStructSlotWires, /// MPT metadata - metadata: MetadataTarget, + metadata: TableMetadataTarget, + /// Offset from the base slot, + offset: Target, } /// Circuit to prove the correct derivation of the MPT key from a simple slot #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct LeafSingleCircuit< - const NODE_LEN: usize, - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, -> { +pub struct LeafSingleCircuit +where + [(); MAX_COLUMNS - 0]:, +{ pub(crate) node: Vec, pub(crate) slot: SimpleSlot, - pub(crate) metadata: ColumnsMetadata, + pub(crate) metadata: TableMetadata, + pub(crate) offset: u32, } -impl - LeafSingleCircuit +impl LeafSingleCircuit where - [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 0]:, { - pub fn build(b: &mut CBuilder) -> LeafSingleWires { - let metadata = MetadataGadget::build(b); - let slot = SimpleSlot::build_struct(b, metadata.evm_word); - + pub fn build(b: &mut CBuilder) -> LeafSingleWires { + let metadata = TableMetadataGadget::build(b); + let offset = b.add_virtual_target(); + let slot = SimpleSlot::build_struct(b, offset); + let zero = b.zero(); // Build the node wires. - let wires = - MPTLeafOrExtensionNode::build_and_advance_key::<_, D, NODE_LEN, MAX_LEAF_VALUE_LEN>( - b, - &slot.base.mpt_key, - ); + let wires = MPTLeafOrExtensionNode::build_and_advance_key::<_, D, 69, MAX_LEAF_VALUE_LEN>( + b, + &slot.base.mpt_key, + ); let node = wires.node; let root = wires.root; - // Left pad the leaf value. - let value: Array = left_pad_leaf_value(b, &wires.value); + let key_input_with_offset = slot.location_bytes.pack(b, Endianness::Big); - // Compute the metadata digest and number of actual columns. - let (metadata_digest, num_actual_columns) = metadata.digest_info(b, slot.base.slot); + let u256_no_off = UInt256Target::new_from_target_unsafe(b, slot.base.slot); + let u256_loc = + UInt256Target::new_from_be_limbs(key_input_with_offset.arr.as_slice()).unwrap(); - // Compute the values digest. - let values_digest = ColumnGadget::::new( - &value.arr, - &metadata.table_info[..MAX_FIELD_PER_EVM], - &metadata.is_extracted_columns[..MAX_FIELD_PER_EVM], - ) - .build(b); + // Left pad the leaf value. + let value: Array = left_pad_leaf_value(b, &wires.value); + + // Compute the metadata digest and the value digest + let (metadata_digest, value_digest) = metadata.extracted_digests( + b, + &value, + &u256_no_off, + &u256_loc, + &[zero, zero, zero, zero, zero, zero, zero, slot.base.slot], + ); // row_id = H2int(H("") || num_actual_columns) let empty_hash = b.constant_hash(*empty_poseidon_hash()); let inputs = empty_hash .to_targets() .into_iter() - .chain(once(num_actual_columns)) + .chain(once(metadata.num_actual_columns)) .collect(); let hash = b.hash_n_to_hash_no_pad::(inputs); let row_id = hash_to_int_target(b, hash); // value_digest = value_digest * row_id let row_id = b.biguint_to_nonnative(&row_id); - let values_digest = b.curve_scalar_mul(values_digest, &row_id); + let values_digest = b.curve_scalar_mul(value_digest, &row_id); // Only one leaf in this node. let n = b.one(); @@ -130,36 +133,32 @@ where root, slot, metadata, + offset, } } - pub fn assign( - &self, - pw: &mut PartialWitness, - wires: &LeafSingleWires, - ) { + pub fn assign(&self, pw: &mut PartialWitness, wires: &LeafSingleWires) { let padded_node = - Vector::::from_vec(&self.node).expect("Invalid node"); + Vector::::from_vec(&self.node).expect("Invalid node"); wires.node.assign(pw, &padded_node); - KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::assign( + KeccakCircuit::<{ PAD_LEN(69) }>::assign( pw, &wires.root, &InputData::Assigned(&padded_node), ); - self.slot - .assign_struct(pw, &wires.slot, self.metadata.evm_word); - MetadataGadget::assign(pw, &self.metadata, &wires.metadata); + self.slot.assign_struct(pw, &wires.slot, self.offset); + TableMetadataGadget::assign(pw, &self.metadata, &wires.metadata); + pw.set_target(wires.offset, GFp::from_canonical_u32(self.offset)); } } /// Num of children = 0 -impl - CircuitLogicWires for LeafSingleWires +impl CircuitLogicWires for LeafSingleWires where - [(); PAD_LEN(NODE_LEN)]:, + [(); MAX_COLUMNS - 0]:, { type CircuitBuilderParams = (); - type Inputs = LeafSingleCircuit; + type Inputs = LeafSingleCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; @@ -180,18 +179,15 @@ where #[cfg(test)] mod tests { use super::*; - use crate::{ - tests::{TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM}, - values_extraction::compute_leaf_single_values_digest, - MAX_LEAF_NODE_LEN, - }; + use crate::tests::TEST_MAX_COLUMNS; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, eth::{StorageSlot, StorageSlotNode}, mpt_sequential::utils::bytes_to_nibbles, + poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, - utils::{keccak256, Endianness, Packer}, + utils::{keccak256, Endianness, Packer, ToFields}, C, D, F, }; use mp2_test::{ @@ -202,11 +198,12 @@ mod tests { use plonky2::{ field::types::Field, iop::{target::Target, witness::PartialWitness}, + plonk::config::Hasher, }; + use plonky2_ecgfp5::curve::scalar_field::Scalar; - type LeafCircuit = - LeafSingleCircuit; - type LeafWires = LeafSingleWires; + type LeafCircuit = LeafSingleCircuit; + type LeafWires = LeafSingleWires; #[derive(Clone, Debug)] struct TestLeafSingleCircuit { @@ -248,23 +245,38 @@ mod tests { let slot = storage_slot.slot(); let evm_word = storage_slot.evm_offset(); - let metadata = - ColumnsMetadata::::sample(slot, evm_word); // Compute the metadata digest. - let metadata_digest = metadata.digest(); - // Compute the values digest. - let table_info = metadata.actual_table_info().to_vec(); - let extracted_column_identifiers = metadata.extracted_column_identifiers(); - let values_digest = compute_leaf_single_values_digest::( - table_info, - &extracted_column_identifiers, - value.clone().try_into().unwrap(), + let table_metadata = TableMetadata::::sample( + true, + &[], + &[slot], + F::from_canonical_u32(evm_word), ); + + let metadata_digest = table_metadata.digest(); + let extracted_val_digest = + table_metadata.extracted_value_digest(&value, &[slot], F::from_canonical_u32(evm_word)); + + // row_id = H2int(row_unique_data || num_actual_columns) + let inputs = empty_poseidon_hash() + .to_fields() + .into_iter() + .chain(once(F::from_canonical_usize( + table_metadata.num_actual_columns, + ))) + .collect::>(); + let hash = H::hash_no_pad(&inputs); + let row_id = hash_to_int_value(hash); + + // values_digest = values_digest * row_id + let row_id = Scalar::from_noncanonical_biguint(row_id); + let values_digest = extracted_val_digest * row_id; let slot = SimpleSlot::new(slot); let c = LeafCircuit { node: node.clone(), slot, - metadata, + metadata: table_metadata, + offset: evm_word, }; let test_circuit = TestLeafSingleCircuit { c, diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index 8692924ce..e058d2f3b 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -1,32 +1,29 @@ use crate::api::SlotInput; +use anyhow::anyhow; use gadgets::{ - column_gadget::{filter_table_column_identifiers, ColumnGadgetData}, - column_info::ColumnInfo, - metadata_gadget::ColumnsMetadata, + column_info::{ExtractedColumnInfo, InputColumnInfo}, + metadata_gadget::TableMetadata, }; use itertools::Itertools; -use alloy::{ - consensus::TxReceipt, - primitives::{Address, IntoLogData}, -}; +use alloy::primitives::Address; use mp2_common::{ - eth::{left_pad32, EventLogInfo, ReceiptProofInfo, StorageSlot}, - group_hashing::map_to_curve_point, - poseidon::{empty_poseidon_hash, hash_to_int_value, H}, - types::{GFp, HashOutput, MAPPING_LEAF_VALUE_LEN}, + eth::{left_pad32, EventLogInfo, StorageSlot}, + poseidon::{empty_poseidon_hash, H}, + types::{GFp, HashOutput}, utils::{Endianness, Packer, ToFields}, F, }; use plonky2::{ field::types::{Field, PrimeField64}, - hash::hash_types::HashOut, plonk::config::Hasher, }; -use plonky2_ecgfp5::curve::{curve::Point as Digest, scalar_field::Scalar}; + +use plonky2_ecgfp5::curve::curve::Point; + use serde::{Deserialize, Serialize}; -use std::iter::{self, once}; +use std::iter::once; pub mod api; mod branch; @@ -55,11 +52,11 @@ pub(crate) const BLOCK_ID_DST: &[u8] = b"BLOCK_NUMBER"; #[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)] pub struct StorageSlotInfo { slot: StorageSlot, - table_info: Vec, + table_info: Vec, } impl StorageSlotInfo { - pub fn new(slot: StorageSlot, table_info: Vec) -> Self { + pub fn new(slot: StorageSlot, table_info: Vec) -> Self { Self { slot, table_info } } @@ -67,7 +64,7 @@ impl StorageSlotInfo { &self.slot } - pub fn table_info(&self) -> &[ColumnInfo] { + pub fn table_info(&self) -> &[ExtractedColumnInfo] { &self.table_info } @@ -75,20 +72,6 @@ impl StorageSlotInfo { self.slot.evm_offset() } - pub fn metadata( - &self, - ) -> ColumnsMetadata { - let evm_word = self.evm_word(); - let extracted_column_identifiers = - filter_table_column_identifiers(&self.table_info, self.slot.slot(), evm_word); - - ColumnsMetadata::new( - self.table_info.clone(), - &extracted_column_identifiers, - evm_word, - ) - } - pub fn outer_key_id( &self, contract_address: &Address, @@ -139,27 +122,212 @@ impl StorageSlotInfo { pub fn slot_inputs( &self, ) -> Vec { - self.metadata::() - .extracted_table_info() + self.table_info().iter().map(SlotInput::from).collect() + } + + pub fn table_columns( + &self, + contract_address: &Address, + chain_id: u64, + extra: Vec, + ) -> ColumnMetadata { + let slot = self.slot().slot(); + let num_mapping_keys = self.slot().mapping_keys().len(); + + let input_columns = match num_mapping_keys { + 0 => vec![], + 1 => { + let identifier = compute_id_with_prefix( + KEY_ID_PREFIX, + slot, + contract_address, + chain_id, + extra.clone(), + ); + let input_column = InputColumnInfo::new(&[slot], identifier, KEY_ID_PREFIX, 32); + vec![input_column] + } + 2 => { + let outer_identifier = compute_id_with_prefix( + OUTER_KEY_ID_PREFIX, + slot, + contract_address, + chain_id, + extra.clone(), + ); + let inner_identifier = compute_id_with_prefix( + INNER_KEY_ID_PREFIX, + slot, + contract_address, + chain_id, + extra.clone(), + ); + vec![ + InputColumnInfo::new(&[slot], outer_identifier, OUTER_KEY_ID_PREFIX, 32), + InputColumnInfo::new(&[slot], inner_identifier, INNER_KEY_ID_PREFIX, 32), + ] + } + _ => vec![], + }; + + ColumnMetadata::new(input_columns, self.table_info().to_vec()) + } +} + +/// Struct that mirrors [`TableMetadata`] but without having to specify generic constants. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ColumnMetadata { + pub input_columns: Vec, + pub extracted_columns: Vec, +} + +impl ColumnMetadata { + /// Create a new instance of [`ColumnMetadata`] + pub fn new( + input_columns: Vec, + extracted_columns: Vec, + ) -> ColumnMetadata { + ColumnMetadata { + input_columns, + extracted_columns, + } + } + + /// Getter for the [`InputColumnInfo`] + pub fn input_columns(&self) -> &[InputColumnInfo] { + &self.input_columns + } + + /// Getter for the [`ExtractedColumnInfo`] + pub fn extracted_columns(&self) -> &[ExtractedColumnInfo] { + &self.extracted_columns + } + + /// Computes the value digest for a provided value array and the unique row_id + pub fn input_value_digest(&self, input_vals: &[&[u8; 32]]) -> (Point, HashOutput) { + let point = self + .input_columns() + .iter() + .zip(input_vals.iter()) + .fold(Point::NEUTRAL, |acc, (column, value)| { + acc + column.value_digest(value.as_slice()) + }); + + let row_id_input = input_vals + .into_iter() + .map(|key| { + key.pack(Endianness::Big) + .into_iter() + .map(F::from_canonical_u32) + }) + .into_iter() + .flatten() + .collect::>(); + + (point, H::hash_no_pad(&row_id_input).into()) + } + + /// Compute the metadata digest. + pub fn digest(&self) -> Point { + let input_iter = self + .input_columns() + .iter() + .map(|column| column.digest()) + .collect::>(); + + let extracted_iter = self + .extracted_columns() + .iter() + .map(|column| column.digest()) + .collect::>(); + + input_iter + .into_iter() + .chain(extracted_iter) + .fold(Point::NEUTRAL, |acc, b| acc + b) + } + + pub fn extracted_value_digest( + &self, + value: &[u8], + extraction_id: &[u8], + location_offset: F, + ) -> Point { + let mut extraction_vec = extraction_id.pack(Endianness::Little); + extraction_vec.resize(8, 0u32); + extraction_vec.reverse(); + let extraction_id: [F; 8] = extraction_vec + .into_iter() + .map(F::from_canonical_u32) + .collect::>() + .try_into() + .expect("This should never fail"); + + self.extracted_columns() .iter() - .map(Into::into) - .collect_vec() + .fold(Point::NEUTRAL, |acc, column| { + let correct_id = extraction_id == column.extraction_id(); + let correct_offset = location_offset == column.location_offset(); + let correct_location = correct_id && correct_offset; + + if correct_location { + acc + column.value_digest(value) + } else { + acc + } + }) } } + +impl TryFrom + for TableMetadata +where + [(); MAX_COLUMNS - INPUT_COLUMNS]:, +{ + type Error = anyhow::Error; + + fn try_from(value: ColumnMetadata) -> Result { + let ColumnMetadata { + input_columns, + extracted_columns, + } = value; + let input_array: [InputColumnInfo; INPUT_COLUMNS] = + input_columns.try_into().map_err(|e| { + anyhow!( + "Could not convert input columns to fixed length array: {:?}", + e + ) + })?; + + Ok(TableMetadata::::new( + &input_array, + &extracted_columns, + )) + } +} + /// Prefix used for making a topic column id. const TOPIC_PREFIX: &[u8] = b"topic"; +/// [`TOPIC_PREFIX`] as a [`str`] +const TOPIC_NAME: &str = "topic"; /// Prefix used for making a data column id. const DATA_PREFIX: &[u8] = b"data"; +/// [`DATA_PREFIX`] as a [`str`] +const DATA_NAME: &str = "data"; /// Prefix for transaction index const TX_INDEX_PREFIX: &[u8] = b"tx index"; /// Prefix for log number const LOG_NUMBER_PREFIX: &[u8] = b"log number"; +/// [`LOG_NUMBER_PREFIX`] as a [`str`] +const LOG_NUMBER_NAME: &str = "log number"; /// Prefix for gas used -const GAS_USED_PREFIX: &[u8] = b" gas used"; +const GAS_USED_PREFIX: &[u8] = b"gas used"; +/// [`GAS_USED_PREFIX`] as a [`str`] +const GAS_USED_NAME: &str = "gas used"; pub fn identifier_block_column() -> ColumnId { let inputs: Vec = BLOCK_ID_DST.to_fields(); @@ -264,7 +432,7 @@ pub fn identifier_for_inner_mapping_key_column_raw(slot: u8, extra: Vec) -> } /// Calculate ID with prefix. -fn compute_id_with_prefix( +pub(crate) fn compute_id_with_prefix( prefix: &[u8], slot: u8, contract_address: &Address, @@ -335,254 +503,10 @@ pub fn row_unique_data_for_mapping_of_mappings_leaf( H::hash_no_pad(&inputs).into() } -/// Compute the metadata digest for single variable leaf. -pub fn compute_leaf_single_metadata_digest< - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, ->( - table_info: Vec, -) -> Digest { - // We don't need `extracted_column_identifiers` and `evm_word` to compute the metadata digest. - ColumnsMetadata::::new(table_info, &[], 0).digest() -} - -/// Compute the values digest for single variable leaf. -pub fn compute_leaf_single_values_digest( - table_info: Vec, - extracted_column_identifiers: &[ColumnId], - value: [u8; MAPPING_LEAF_VALUE_LEN], -) -> Digest { - let num_actual_columns = F::from_canonical_usize(table_info.len()); - let values_digest = - ColumnGadgetData::::new(table_info, extracted_column_identifiers, value) - .digest(); - - // row_id = H2int(H("") || num_actual_columns) - let inputs = HashOut::from(row_unique_data_for_single_leaf()) - .to_fields() - .into_iter() - .chain(once(num_actual_columns)) - .collect_vec(); - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); - - // value_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - values_digest * row_id -} - -/// Compute the metadata digest for mapping variable leaf. -pub fn compute_leaf_mapping_metadata_digest< - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, ->( - table_info: Vec, - slot: u8, - key_id: ColumnId, -) -> Digest { - // We don't need `extracted_column_identifiers` and `evm_word` to compute the metadata digest. - let metadata_digest = - ColumnsMetadata::::new(table_info, &[], 0).digest(); - - // key_column_md = H( "\0KEY" || slot) - let key_id_prefix = u32::from_be_bytes(KEY_ID_PREFIX.try_into().unwrap()); - let inputs = vec![ - F::from_canonical_u32(key_id_prefix), - F::from_canonical_u8(slot), - ]; - let key_column_md = H::hash_no_pad(&inputs); - // metadata_digest += D(key_column_md || key_id) - let inputs = key_column_md - .to_fields() - .into_iter() - .chain(once(F::from_canonical_u64(key_id))) - .collect_vec(); - let metadata_key_digest = map_to_curve_point(&inputs); - - metadata_digest + metadata_key_digest -} - -/// Compute the values digest for mapping variable leaf. -pub fn compute_leaf_mapping_values_digest( - table_info: Vec, - extracted_column_identifiers: &[u64], - value: [u8; MAPPING_LEAF_VALUE_LEN], - mapping_key: MappingKey, - evm_word: u32, - key_id: ColumnId, -) -> Digest { - // We add key column to number of actual columns. - let num_actual_columns = F::from_canonical_usize(table_info.len() + 1); - let mut values_digest = - ColumnGadgetData::::new(table_info, extracted_column_identifiers, value) - .digest(); - - // values_digest += evm_word == 0 ? D(key_id || pack(left_pad32(key))) : CURVE_ZERO - let packed_mapping_key = left_pad32(&mapping_key) - .pack(Endianness::Big) - .into_iter() - .map(F::from_canonical_u32); - if evm_word == 0 { - let inputs = once(F::from_canonical_u64(key_id)) - .chain(packed_mapping_key.clone()) - .collect_vec(); - let values_key_digest = map_to_curve_point(&inputs); - values_digest += values_key_digest; - } - let row_unique_data = HashOut::from(row_unique_data_for_mapping_leaf(&mapping_key)); - // row_id = H2int(row_unique_data || num_actual_columns) - let inputs = row_unique_data - .to_fields() - .into_iter() - .chain(once(num_actual_columns)) - .collect_vec(); - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); - - // value_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - values_digest * row_id -} - -/// Compute the metadata digest for mapping of mappings leaf. -pub fn compute_leaf_mapping_of_mappings_metadata_digest< - const MAX_COLUMNS: usize, - const MAX_FIELD_PER_EVM: usize, ->( - table_info: Vec, - slot: u8, - outer_key_id: ColumnId, - inner_key_id: ColumnId, -) -> Digest { - // We don't need `extracted_column_identifiers` and `evm_word` to compute the metadata digest. - let metadata_digest = - ColumnsMetadata::::new(table_info, &[], 0).digest(); - - // Compute the outer and inner key metadata digests. - let [outer_key_digest, inner_key_digest] = [ - (OUTER_KEY_ID_PREFIX, outer_key_id), - (INNER_KEY_ID_PREFIX, inner_key_id), - ] - .map(|(prefix, key_id)| { - // key_column_md = H(KEY_ID_PREFIX || slot) - let prefix = u64::from_be_bytes(prefix.try_into().unwrap()); - let inputs = vec![F::from_canonical_u64(prefix), F::from_canonical_u8(slot)]; - let key_column_md = H::hash_no_pad(&inputs); - - // key_digest = D(key_column_md || key_id) - let inputs = key_column_md - .to_fields() - .into_iter() - .chain(once(F::from_canonical_u64(key_id))) - .collect_vec(); - map_to_curve_point(&inputs) - }); - - // Add the outer and inner key digests into the metadata digest. - // metadata_digest + outer_key_digest + inner_key_digest - metadata_digest + inner_key_digest + outer_key_digest -} - -pub type MappingKey = Vec; -pub type ColumnId = u64; - -/// Compute the values digest for mapping of mappings leaf. -#[allow(clippy::too_many_arguments)] -pub fn compute_leaf_mapping_of_mappings_values_digest( - table_info: Vec, - extracted_column_identifiers: &[ColumnId], - value: [u8; MAPPING_LEAF_VALUE_LEN], - evm_word: u32, - outer_mapping_data: (MappingKey, ColumnId), - inner_mapping_data: (MappingKey, ColumnId), -) -> Digest { - // Add inner key and outer key columns to the number of actual columns. - let num_actual_columns = F::from_canonical_usize(table_info.len() + 2); - let mut values_digest = - ColumnGadgetData::::new(table_info, extracted_column_identifiers, value) - .digest(); - - // Compute the outer and inner key values digests. - let [packed_outer_key, packed_inner_key] = - [&outer_mapping_data.0, &inner_mapping_data.0].map(|key| { - left_pad32(key) - .pack(Endianness::Big) - .into_iter() - .map(F::from_canonical_u32) - }); - if evm_word == 0 { - let [outer_key_digest, inner_key_digest] = [ - (outer_mapping_data.1, packed_outer_key.clone()), - (inner_mapping_data.1, packed_inner_key.clone()), - ] - .map(|(key_id, packed_key)| { - // D(key_id || pack(key)) - let inputs = once(F::from_canonical_u64(key_id)) - .chain(packed_key) - .collect_vec(); - map_to_curve_point(&inputs) - }); - // values_digest += outer_key_digest + inner_key_digest - values_digest += inner_key_digest + outer_key_digest; - } - - let row_unique_data = HashOut::from(row_unique_data_for_mapping_of_mappings_leaf( - &outer_mapping_data.0, - &inner_mapping_data.0, - )); - // row_id = H2int(row_unique_data || num_actual_columns) - let inputs = row_unique_data - .to_fields() - .into_iter() - .chain(once(num_actual_columns)) - .collect_vec(); - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); - - // values_digest = values_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - values_digest * row_id -} -/// Calculate `metadata_digest = D(address || signature || topics)` for receipt leaf. -/// Topics is an array of 5 values (some are dummies), each being `column_id`, `rel_byte_offset` (from the start of the log) -/// and `len`. -pub fn compute_receipt_leaf_metadata_digest( +/// Function that computes the column identifiers for the non-indexed columns together with their names as [`String`]s. +pub fn compute_non_indexed_receipt_column_ids( event: &EventLogInfo, -) -> Digest { - let mut out = Vec::new(); - out.push(event.size); - out.extend_from_slice(&event.address.0.map(|byte| byte as usize)); - out.push(event.add_rel_offset); - out.extend_from_slice(&event.event_signature.map(|byte| byte as usize)); - out.push(event.sig_rel_offset); - - let mut field_out = out - .into_iter() - .map(GFp::from_canonical_usize) - .collect::>(); - // Work out the column ids for tx_index, log_number and gas_used - let tx_index_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - TX_INDEX_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let tx_index_column_id = H::hash_no_pad(&tx_index_input).elements[0]; - - let log_number_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - LOG_NUMBER_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let log_number_column_id = H::hash_no_pad(&log_number_input).elements[0]; - +) -> Vec<(String, GFp)> { let gas_used_input = [ event.address.as_slice(), event.event_signature.as_slice(), @@ -593,13 +517,8 @@ pub fn compute_receipt_leaf_metadata_digest>(); let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; - field_out.push(tx_index_column_id); - field_out.push(log_number_column_id); - field_out.push(gas_used_column_id); - let core_metadata = map_to_curve_point(&field_out); - - let topic_digests = event + let topic_ids = event .topics .iter() .enumerate() @@ -614,12 +533,14 @@ pub fn compute_receipt_leaf_metadata_digest>(); - let column_id = H::hash_no_pad(&input).elements[0]; - map_to_curve_point(&[column_id]) + ( + format!("{}_{}", TOPIC_NAME, j + 1), + H::hash_no_pad(&input).elements[0], + ) }) - .collect::>(); + .collect::>(); - let data_digests = event + let data_ids = event .data .iter() .enumerate() @@ -634,136 +555,17 @@ pub fn compute_receipt_leaf_metadata_digest>(); - let column_id = H::hash_no_pad(&input).elements[0]; - map_to_curve_point(&[column_id]) + ( + format!("{}_{}", DATA_NAME, j + 1), + H::hash_no_pad(&input).elements[0], + ) }) - .collect::>(); + .collect::>(); - iter::once(core_metadata) - .chain(topic_digests) - .chain(data_digests) - .fold(Digest::NEUTRAL, |acc, p| acc + p) -} - -/// Calculate `value_digest` for receipt leaf. -pub fn compute_receipt_leaf_value_digest( - receipt_proof_info: &ReceiptProofInfo, - event: &EventLogInfo, -) -> Digest { - let receipt = receipt_proof_info.to_receipt().unwrap(); - let gas_used = receipt.cumulative_gas_used(); - - // Only use events that we are indexing - let address = event.address; - let sig = event.event_signature; - - // Work out the column ids for tx_index, log_number and gas_used - let tx_index_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - TX_INDEX_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let tx_index_column_id = H::hash_no_pad(&tx_index_input).elements[0]; - - let log_number_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - LOG_NUMBER_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let log_number_column_id = H::hash_no_pad(&log_number_input).elements[0]; - - let gas_used_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - GAS_USED_PREFIX, + [ + vec![(GAS_USED_NAME.to_string(), gas_used_column_id)], + topic_ids, + data_ids, ] .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; - - let index_digest = map_to_curve_point(&[ - tx_index_column_id, - GFp::from_canonical_u64(receipt_proof_info.tx_index), - ]); - - let gas_digest = - map_to_curve_point(&[gas_used_column_id, GFp::from_noncanonical_u128(gas_used)]); - let mut n = 0; - receipt - .logs() - .iter() - .cloned() - .filter_map(|log| { - let log_address = log.address; - let log_data = log.to_log_data(); - let (topics, data) = log_data.split(); - - if log_address == address && topics[0].0 == sig { - n += 1; - let topics_value_digest = topics - .iter() - .enumerate() - .skip(1) - .map(|(j, fixed)| { - let packed = fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields(); - let input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - TOPIC_PREFIX, - &[j as u8], - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let mut values = vec![H::hash_no_pad(&input).elements[0]]; - values.extend_from_slice(&packed); - map_to_curve_point(&values) - }) - .collect::>(); - let data_value_digest = data - .chunks(32) - .enumerate() - .map(|(j, fixed)| { - let packed = fixed.pack(mp2_common::utils::Endianness::Big).to_fields(); - let input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - DATA_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let mut values = vec![H::hash_no_pad(&input).elements[0]]; - values.extend_from_slice(&packed); - map_to_curve_point(&values) - }) - .collect::>(); - let log_no_digest = - map_to_curve_point(&[log_number_column_id, GFp::from_canonical_usize(n)]); - let initial_digest = index_digest + gas_digest + log_no_digest; - - let row_value = std::iter::once(initial_digest) - .chain(topics_value_digest) - .chain(data_value_digest) - .fold(Digest::NEUTRAL, |acc, p| acc + p); - - Some(map_to_curve_point(&row_value.to_fields())) - } else { - None - } - }) - .fold(Digest::NEUTRAL, |acc, p| acc + p) } diff --git a/mp2-v1/store/test_proofs.store b/mp2-v1/store/test_proofs.store deleted file mode 100644 index 0324f3b4c1c0280b993b31ff7a7b5741d87516db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 524288 zcmeI)KS~2Z6bInV9}^1;As|W$&mdS@q!YZvHb%5s*(`b}PhjT-L=bP_0lb1p-ixqg z3n2j^-!{LQeZvg%^>2z|`3)l1#n2oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+z^_2|=?~*q&T~b+3oyly}#|sAW8yMQPDg^}KB6izc5V$R&~h0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5)`gupm%nZ5b# z{{Ng0c0$7?2@oJafB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ e009C72oNAZfB*pk1PBlyK!5-N0t5*BFM)4A2pR|g diff --git a/mp2-v1/tests/common/cases/contract.rs b/mp2-v1/tests/common/cases/contract.rs index 3103e05f4..9c73640e2 100644 --- a/mp2-v1/tests/common/cases/contract.rs +++ b/mp2-v1/tests/common/cases/contract.rs @@ -2,13 +2,17 @@ use std::future::Future; use super::slot_info::{LargeStruct, MappingInfo, StorageSlotMappingKey, StorageSlotValue}; use crate::common::{ - bindings::simple::{ - Simple, - Simple::{ - MappingChange, MappingOfSingleValueMappingsChange, MappingOfStructMappingsChange, - MappingOperation, MappingStructChange, + bindings::{ + eventemitter::EventEmitter::{self, EventEmitterInstance}, + simple::{ + Simple, + Simple::{ + MappingChange, MappingOfSingleValueMappingsChange, MappingOfStructMappingsChange, + MappingOperation, MappingStructChange, + }, }, }, + cases::indexing::ReceiptUpdate, TestContext, }; use alloy::{ @@ -22,8 +26,6 @@ use anyhow::Result; use itertools::Itertools; use log::info; -use crate::common::bindings::simple::Simple::SimpleInstance; - use super::indexing::ContractUpdate; pub struct Contract { @@ -218,3 +220,21 @@ impl ContractController for Vec> { T::call_contract(&contract, changes).await } } +pub struct EventContract { + pub instance: EventEmitterInstance, Ethereum>, +} + +impl TestContract for EventContract { + type Update = ReceiptUpdate; + type Contract = EventEmitterInstance>; + + fn new(address: Address, provider: &RootProvider) -> Self { + Self { + instance: EventEmitter::new(address, provider.clone()), + } + } + + fn get_instance(&self) -> &Self::Contract { + &self.instance + } +} diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index fa613712b..5e8dfbb00 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -16,7 +16,7 @@ use mp2_v1::{ ColumnID, }, values_extraction::{ - gadgets::column_info::ColumnInfo, identifier_block_column, + compute_non_indexed_receipt_column_ids, identifier_block_column, identifier_for_inner_mapping_key_column, identifier_for_outer_mapping_key_column, identifier_for_value_column, }, @@ -26,17 +26,21 @@ use rand::{thread_rng, Rng}; use ryhope::storage::RoEpochKvStorage; use crate::common::{ - bindings::simple::Simple::{ - m1Call, mappingOfSingleValueMappingsCall, mappingOfStructMappingsCall, structMappingCall, + bindings::{ + eventemitter::EventEmitter::{self, EventEmitterInstance}, + simple::Simple::{self, MappingChange, MappingOperation, SimpleInstance}, }, cases::{ contract::Contract, identifier_for_mapping_key_column, - slot_info::LargeStruct, + slot_info::{ + LargeStruct, SimpleMapping, SimpleNestedMapping, StructMapping, StructNestedMapping, + }, table_source::{ - LengthExtractionArgs, MappingExtractionArgs, MappingIndex, MergeSource, - SingleExtractionArgs, + ContractExtractionArgs, LengthExtractionArgs, MappingExtractionArgs, MappingIndex, + MergeSource, ReceiptExtractionArgs, SingleExtractionArgs, TableSource, }, + TableIndexing, }, proof_storage::{ProofKey, ProofStorage}, rowtree::SecondaryIndexCell, @@ -47,15 +51,12 @@ use crate::common::{ TableInfo, TestContext, }; -use super::{ - super::bindings::simple::Simple::SimpleInstance, - slot_info::{SimpleMapping, SimpleNestedMapping, StructMapping, StructNestedMapping}, - ContractExtractionArgs, TableIndexing, TableSource, -}; use alloy::{ - contract::private::Transport, - network::Ethereum, - providers::{ProviderBuilder, RootProvider}, + contract::private::{Network, Provider, Transport}, + network::{Ethereum, TransactionBuilder}, + primitives::{Address, U256}, + providers::{ext::AnvilApi, ProviderBuilder, RootProvider}, + sol_types::SolEvent, }; use mp2_common::{eth::StorageSlot, proof::ProofWithVK, types::HashOutput}; @@ -110,6 +111,8 @@ fn single_value_slot_inputs() -> Vec { slot_inputs } +pub(crate) const TX_INDEX_COLUMN: &str = "tx index"; + impl TableIndexing { pub(crate) async fn merge_table_test_case( ctx: &mut TestContext, @@ -135,14 +138,14 @@ impl TableIndexing { .map(|(i, slot_input)| { let identifier = identifier_for_value_column(slot_input, &contract_address, chain_id, vec![]); - let info = ColumnInfo::new_from_slot_input(identifier, slot_input); + TableColumn { name: format!("single_column_{i}"), index: IndexType::None, // ALL single columns are "multiplier" since we do tableA * D(tableB), i.e. all // entries of table A are repeated for each entry of table B. multiplier: true, - info, + identifier, } }) .collect_vec(); @@ -176,11 +179,7 @@ impl TableIndexing { name: MAPPING_KEY_COLUMN.to_string(), index: IndexType::Secondary, multiplier: false, - info: ColumnInfo::new_from_slot_input( - key_id, - // The slot input is useless for the key column. - &slot_inputs[0], - ), + identifier: key_id, }; let rest_columns = value_ids .into_iter() @@ -190,7 +189,7 @@ impl TableIndexing { name: format!("{MAPPING_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, - info: ColumnInfo::new_from_slot_input(id, slot_input), + identifier: id, }) .collect_vec(); @@ -207,7 +206,7 @@ impl TableIndexing { name: MAPPING_VALUE_COLUMN.to_string(), index: IndexType::Secondary, multiplier: false, - info: ColumnInfo::new_from_slot_input(secondary_id, &secondary_slot_input), + identifier: secondary_id, }; let mut rest_columns = value_ids .into_iter() @@ -217,7 +216,7 @@ impl TableIndexing { name: format!("{MAPPING_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, - info: ColumnInfo::new_from_slot_input(id, slot_input), + identifier: id, }) .collect_vec(); rest_columns.push(TableColumn { @@ -225,7 +224,7 @@ impl TableIndexing { index: IndexType::None, multiplier: false, // The slot input is useless for the key column. - info: ColumnInfo::new_from_slot_input(key_id, &slot_inputs[0]), + identifier: key_id, }); (secondary_column, rest_columns) @@ -248,7 +247,7 @@ impl TableIndexing { // really, it is a special column we add multiplier: true, // Only valid for the identifier of block column, others are dummy. - info: ColumnInfo::new(0, identifier_block_column(), 0, 0, 0, 0), + identifier: identifier_block_column(), }, secondary: mapping_secondary_column, rest: all_columns, @@ -313,21 +312,18 @@ impl TableIndexing { index: IndexType::Primary, multiplier: false, // Only valid for the identifier of block column, others are dummy. - info: ColumnInfo::new(0, identifier_block_column(), 0, 0, 0, 0), + identifier: identifier_block_column(), }, secondary: TableColumn { name: SINGLE_SECONDARY_COLUMN.to_string(), index: IndexType::Secondary, // here we put false always since these are not coming from a "merged" table multiplier: false, - info: ColumnInfo::new_from_slot_input( - identifier_for_value_column( - &secondary_index_slot_input, - &contract_address, - chain_id, - vec![], - ), + identifier: identifier_for_value_column( &secondary_index_slot_input, + &contract_address, + chain_id, + vec![], ), }, rest: rest_column_slot_inputs @@ -340,12 +336,12 @@ impl TableIndexing { chain_id, vec![], ); - let info = ColumnInfo::new_from_slot_input(identifier, slot_input); + TableColumn { name: format!("rest_column_{i}"), index: IndexType::None, multiplier: false, - info, + identifier, } }) .collect_vec(), @@ -619,6 +615,109 @@ impl TableIndexing { )) } + pub(crate) async fn receipt_test_case( + no_topics: usize, + no_data: usize, + ctx: &mut TestContext, + ) -> Result<(TableIndexing, Vec>)> + where + T: ReceiptExtractionArgs, + [(); ::NO_TOPICS]:, + [(); ::MAX_DATA]:, + { + // Create a provider with the wallet for contract deployment and interaction. + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .wallet(ctx.wallet()) + .on_http(ctx.rpc_url.parse().unwrap()); + + let contract = EventEmitter::deploy(&provider).await.unwrap(); + info!( + "Deployed EventEmitter contract at address: {}", + contract.address() + ); + let contract_address = contract.address(); + let chain_id = ctx.rpc.get_chain_id().await.unwrap(); + let contract = Contract { + address: *contract_address, + chain_id, + }; + + // Retrieve the event signature `str` based on `no_topics` and `no_data` + let event_signature = match (no_topics, no_data) { + (0, 0) => EventEmitter::noIndexed::SIGNATURE, + (0, 1) => EventEmitter::noIOneD::SIGNATURE, + (0, 2) => EventEmitter::noITwoD::SIGNATURE, + (1, 0) => EventEmitter::oneIndexed::SIGNATURE, + (1, 1) => EventEmitter::oneIOneD::SIGNATURE, + (1, 2) => EventEmitter::oneITwoD::SIGNATURE, + (2, 0) => EventEmitter::twoIndexed::SIGNATURE, + (2, 1) => EventEmitter::twoIOneD::SIGNATURE, + (2, 2) => EventEmitter::twoITwoD::SIGNATURE, + (3, 0) => EventEmitter::threeIndexed::SIGNATURE, + (3, 1) => EventEmitter::oneData::SIGNATURE, + (3, 2) => EventEmitter::twoData::SIGNATURE, + _ => panic!( + "Events with {} topics and {} additional pieces of data not supported", + no_topics, no_data + ), + }; + + let mut source = T::new(contract.address(), event_signature); + let genesis_updates = source.init_contract_data(ctx, &contract).await; + + let indexing_genesis_block = ctx.block_number().await; + // Defining the columns structure of the table from the source event + // This is depending on what is our data source, mappings and CSV both have their o + // own way of defining their table. + let columns = TableColumns { + primary: TableColumn { + name: BLOCK_COLUMN_NAME.to_string(), + identifier: identifier_block_column(), + index: IndexType::Primary, + multiplier: false, + }, + secondary: TableColumn { + name: TX_INDEX_COLUMN.to_string(), + identifier: ::get_index(&source), + + index: IndexType::Secondary, + // here we put false always since these are not coming from a "merged" table + multiplier: false, + }, + rest: compute_non_indexed_receipt_column_ids(&source.get_event()) + .into_iter() + .map(|(name, identifier)| TableColumn { + name, + identifier: identifier.to_canonical_u64(), + index: IndexType::None, + multiplier: false, + }) + .collect::>(), + }; + + let tx_index_id = columns.secondary_column().identifier(); + let gas_used_id = columns.rest[0].identifier(); + let row_unique_id = TableRowUniqueID::Receipt(tx_index_id, gas_used_id); + let table = Table::new( + indexing_genesis_block, + "receipt_table".to_string(), + columns, + row_unique_id, + ) + .await; + Ok(( + TableIndexing:: { + value_column: "".to_string(), + source, + table, + contract, + contract_extraction: None, + }, + genesis_updates, + )) + } + pub async fn run( &mut self, ctx: &mut TestContext, @@ -922,21 +1021,16 @@ async fn build_mapping_table( name: MAPPING_KEY_COLUMN.to_string(), index: IndexType::Secondary, multiplier: false, - info: ColumnInfo::new_from_slot_input( - key_id, - // The slot input is useless for the key column. - &slot_inputs[0], - ), + identifier: key_id, }; let rest_columns = value_ids .into_iter() - .zip(slot_inputs.iter()) .enumerate() - .map(|(i, (id, slot_input))| TableColumn { + .map(|(i, id)| TableColumn { name: format!("{MAPPING_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, - info: ColumnInfo::new_from_slot_input(id, slot_input), + identifier: id, }) .collect_vec(); @@ -953,7 +1047,7 @@ async fn build_mapping_table( name: MAPPING_VALUE_COLUMN.to_string(), index: IndexType::Secondary, multiplier: false, - info: ColumnInfo::new_from_slot_input(secondary_id, &secondary_slot_input), + identifier: secondary_id, }; let mut rest_columns = value_ids .into_iter() @@ -963,7 +1057,7 @@ async fn build_mapping_table( name: format!("{MAPPING_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, - info: ColumnInfo::new_from_slot_input(id, slot_input), + identifier: id, }) .collect_vec(); rest_columns.push(TableColumn { @@ -971,7 +1065,7 @@ async fn build_mapping_table( index: IndexType::None, multiplier: false, // The slot input is useless for the key column. - info: ColumnInfo::new_from_slot_input(key_id, &Default::default()), + identifier: key_id, }); (secondary_column, rest_columns) @@ -987,7 +1081,7 @@ async fn build_mapping_table( index: IndexType::Primary, multiplier: false, // Only valid for the identifier of block column, others are dummy. - info: ColumnInfo::new(0, identifier_block_column(), 0, 0, 0, 0), + identifier: identifier_block_column(), }, secondary: secondary_column, rest: rest_columns, @@ -1021,7 +1115,7 @@ async fn build_mapping_of_mappings_table( name: format!("{MAPPING_OF_MAPPINGS_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, - info: ColumnInfo::new_from_slot_input(id, slot_input), + identifier: id, }) .collect_vec(); @@ -1031,19 +1125,14 @@ async fn build_mapping_of_mappings_table( name: MAPPING_OF_MAPPINGS_INNER_KEY_COLUMN.to_string(), index: IndexType::None, multiplier: false, - // The slot input is useless for the inner key column. - info: ColumnInfo::new_from_slot_input(inner_key_id, &slot_inputs[0]), + identifier: inner_key_id, }); TableColumn { name: MAPPING_OF_MAPPINGS_OUTER_KEY_COLUMN.to_string(), index: IndexType::Secondary, multiplier: false, - info: ColumnInfo::new_from_slot_input( - outer_key_id, - // The slot input is useless for the key column. - &slot_inputs[0], - ), + identifier: outer_key_id, } } MappingIndex::InnerKey(_) => { @@ -1051,25 +1140,20 @@ async fn build_mapping_of_mappings_table( name: MAPPING_OF_MAPPINGS_OUTER_KEY_COLUMN.to_string(), index: IndexType::None, multiplier: false, - // The slot input is useless for the inner key column. - info: ColumnInfo::new_from_slot_input(outer_key_id, &slot_inputs[0]), + identifier: outer_key_id, }); TableColumn { name: MAPPING_OF_MAPPINGS_INNER_KEY_COLUMN.to_string(), index: IndexType::Secondary, multiplier: false, - info: ColumnInfo::new_from_slot_input( - inner_key_id, - // The slot input is useless for the key column. - &slot_inputs[0], - ), + identifier: inner_key_id, } } MappingIndex::Value(secondary_value_id) => { let pos = rest_columns .iter() - .position(|col| &col.info.identifier().to_canonical_u64() == secondary_value_id) + .position(|col| &col.identifier() == secondary_value_id) .unwrap(); let mut secondary_column = rest_columns.remove(pos); secondary_column.index = IndexType::Secondary; @@ -1077,14 +1161,11 @@ async fn build_mapping_of_mappings_table( (outer_key_id, MAPPING_OF_MAPPINGS_OUTER_KEY_COLUMN), (inner_key_id, MAPPING_OF_MAPPINGS_INNER_KEY_COLUMN), ] - .map(|(id, name)| { - TableColumn { - name: name.to_string(), - index: IndexType::None, - multiplier: false, - // The slot input is useless for the inner key column. - info: ColumnInfo::new_from_slot_input(id, &slot_inputs[0]), - } + .map(|(id, name)| TableColumn { + name: name.to_string(), + index: IndexType::None, + multiplier: false, + identifier: id, }); rest_columns.extend(key_columns); @@ -1098,8 +1179,7 @@ async fn build_mapping_of_mappings_table( name: BLOCK_COLUMN_NAME.to_string(), index: IndexType::Primary, multiplier: false, - // Only valid for the identifier of block column, others are dummy. - info: ColumnInfo::new(0, identifier_block_column(), 0, 0, 0, 0), + identifier: identifier_block_column(), }, secondary: secondary_column, rest: rest_columns, @@ -1116,6 +1196,128 @@ async fn build_mapping_of_mappings_table( .await } +#[derive(Debug, Clone, Copy)] +pub struct ReceiptUpdate { + pub event_type: (u8, u8), + pub no_relevant: usize, + pub no_others: usize, +} + +impl ReceiptUpdate { + /// Create a new [`ReceiptUpdate`] + pub fn new(event_type: (u8, u8), no_relevant: usize, no_others: usize) -> ReceiptUpdate { + ReceiptUpdate { + event_type, + no_relevant, + no_others, + } + } + + /// Apply an update to an [`EventEmitterInstance`]. + pub async fn apply_update>( + &self, + ctx: &TestContext, + contract: &EventEmitterInstance, + ) { + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .wallet(ctx.wallet()) + .on_http(ctx.rpc_url.parse().unwrap()); + + let addresses = ctx.local_node.as_ref().unwrap().addresses(); + + provider.anvil_set_auto_mine(false).await.unwrap(); + + provider.anvil_auto_impersonate_account(true).await.unwrap(); + // Send a bunch of transactions, some of which are related to the event we are testing for. + let mut pending_tx_builders = vec![]; + + for j in 0..(self.no_relevant + self.no_others) { + let (tx_req, address_index) = { + let first_random = rand::random::() % 5; + let second_random = rand::random::() % 5; + let tx_req = if j < self.no_relevant { + self.select_event(contract) + } else { + let random = match first_random { + 0 => contract.testNoIndexed().into_transaction_request(), + 1 => contract.testTwoIndexed().into_transaction_request(), + 2 => contract.testThreeIndexed().into_transaction_request(), + 3 => contract.testOneData().into_transaction_request(), + 4 => contract.testTwoData().into_transaction_request(), + _ => unreachable!(), + }; + + let random_two = match second_random { + 0 => contract.testOneIOneD().into_transaction_request(), + 1 => contract.testTwoIOneD().into_transaction_request(), + 2 => contract.testTwoITwoD().into_transaction_request(), + 3 => contract.testNoIOneD().into_transaction_request(), + 4 => contract.testNoITwoD().into_transaction_request(), + _ => unreachable!(), + }; + match j % 2 { + 0 => random, + 1 => random_two, + _ => unreachable!(), + } + }; + let address_index = rand::random::() % addresses.len(); + (tx_req, address_index) + }; + let sender_address = addresses[address_index]; + + let funding = U256::from(1e18 as u64); + + provider + .anvil_set_balance(sender_address, funding) + .await + .unwrap(); + + let new_req = tx_req.with_from(sender_address); + let tx_req_final = provider + .fill(new_req) + .await + .unwrap() + .as_envelope() + .cloned() + .unwrap(); + pending_tx_builders.push(provider.send_tx_envelope(tx_req_final).await.unwrap()); + } + + provider + .anvil_auto_impersonate_account(false) + .await + .unwrap(); + provider.anvil_set_auto_mine(true).await.unwrap(); + + for pending_tx in pending_tx_builders { + pending_tx.watch().await.unwrap(); + } + } + + fn select_event, N: Network>( + &self, + contract: &EventEmitterInstance, + ) -> N::TransactionRequest { + match self.event_type { + (0, 0) => contract.testNoIndexed().into_transaction_request(), + (1, 0) => contract.testOneIndexed().into_transaction_request(), + (2, 0) => contract.testTwoIndexed().into_transaction_request(), + (3, 0) => contract.testThreeIndexed().into_transaction_request(), + (0, 1) => contract.testNoIOneD().into_transaction_request(), + (0, 2) => contract.testNoITwoD().into_transaction_request(), + (1, 1) => contract.testOneIOneD().into_transaction_request(), + (1, 2) => contract.testOneITwoD().into_transaction_request(), + (2, 1) => contract.testTwoIOneD().into_transaction_request(), + (2, 2) => contract.testTwoITwoD().into_transaction_request(), + (3, 1) => contract.testOneData().into_transaction_request(), + (3, 2) => contract.testTwoData().into_transaction_request(), + _ => contract.testNoIndexed().into_transaction_request(), + } + } +} + pub trait ContractUpdate: std::fmt::Debug where T: Transport + Clone, @@ -1125,6 +1327,16 @@ where fn apply_to(&self, ctx: &TestContext, contract: &Self::Contract) -> impl Future; } +impl ContractUpdate for ReceiptUpdate +where + T: Transport + Clone, +{ + type Contract = EventEmitterInstance>; + + async fn apply_to(&self, ctx: &TestContext, contract: &Self::Contract) { + self.apply_update(ctx, contract).await + } +} #[derive(Clone, Debug)] pub enum ChangeType { Deletion, diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 81edab14f..8eebbf489 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -2,7 +2,9 @@ use std::{ array, assert_matches::assert_matches, collections::{BTreeSet, HashMap}, + fmt::Debug, future::Future, + hash::Hash, str::FromStr, sync::atomic::{AtomicU64, AtomicUsize}, }; @@ -10,6 +12,7 @@ use std::{ use alloy::{ eips::BlockNumberOrTag, primitives::{Address, U256}, + providers::{Provider, ProviderBuilder}, }; use anyhow::{bail, Result}; use futures::{future::BoxFuture, FutureExt}; @@ -17,13 +20,14 @@ use itertools::Itertools; use log::{debug, info}; use mp2_common::{ eth::{EventLogInfo, ProofQuery, StorageSlot, StorageSlotNode}, + poseidon::H, proof::ProofWithVK, - types::HashOutput, + types::{GFp, HashOutput}, }; use mp2_v1::{ api::{ - compute_table_info, merge_metadata_hash, metadata_hash as metadata_hash_function, - SlotInput, SlotInputs, + combine_digest_and_block, compute_table_info, merge_metadata_hash, + metadata_hash as metadata_hash_function, SlotInput, SlotInputs, }, indexing::{ block::BlockPrimaryIndex, @@ -31,7 +35,7 @@ use mp2_v1::{ row::{RowTreeKey, ToNonce}, }, values_extraction::{ - gadgets::{column_gadget::extract_value, column_info::ColumnInfo}, + gadgets::{column_info::ExtractedColumnInfo, metadata_gadget::TableMetadata}, identifier_for_inner_mapping_key_column, identifier_for_mapping_key_column, identifier_for_outer_mapping_key_column, identifier_for_value_column, StorageSlotInfo, }, @@ -44,6 +48,10 @@ use rand::{ }; use crate::common::{ + cases::{ + contract::EventContract, + indexing::{ReceiptUpdate, TableRowValues, TX_INDEX_COLUMN}, + }, final_extraction::{ExtractionProofInput, ExtractionTableProof, MergeExtractionProof}, proof_storage::{ProofKey, ProofStorage}, rowtree::SecondaryIndexCell, @@ -53,9 +61,7 @@ use crate::common::{ use super::{ contract::{Contract, ContractController, MappingUpdate, SimpleSingleValues, TestContract}, - indexing::{ - ChangeType, TableRowUpdate, TableRowValues, UpdateType, SINGLE_SLOTS, SINGLE_STRUCT_SLOT, - }, + indexing::{ChangeType, TableRowUpdate, UpdateType, SINGLE_SLOTS, SINGLE_STRUCT_SLOT}, slot_info::{LargeStruct, MappingInfo, StorageSlotMappingKey, StorageSlotValue, StructMapping}, }; @@ -65,46 +71,44 @@ fn metadata_hash( chain_id: u64, extra: Vec, ) -> MetadataHash { - metadata_hash_function::( - slot_input, - contract_address, - chain_id, - extra, - ) + metadata_hash_function(slot_input, contract_address, chain_id, extra) } /// Save the columns information of same slot and EVM word. #[derive(Debug)] -struct SlotEvmWordColumns(Vec); +struct SlotEvmWordColumns(Vec); impl SlotEvmWordColumns { - fn new(column_info: Vec) -> Self { + fn new(column_info: Vec) -> Self { // Ensure the column information should have the same slot and EVM word. - let slot = column_info[0].slot(); - let evm_word = column_info[0].evm_word(); + + let slot = column_info[0].extraction_id()[0].0 as u8; + let evm_word = column_info[0].location_offset().0 as u32; column_info[1..].iter().for_each(|col| { - assert_eq!(col.slot(), slot); - assert_eq!(col.evm_word(), evm_word); + let col_slot = col.extraction_id()[0].0 as u8; + let col_word = col.location_offset().0 as u32; + assert_eq!(col_slot, slot); + assert_eq!(col_word, evm_word); }); Self(column_info) } fn slot(&self) -> u8 { // The columns should have the same slot. - u8::try_from(self.0[0].slot().to_canonical_u64()).unwrap() + u8::try_from(self.0[0].extraction_id()[0].to_canonical_u64()).unwrap() } fn evm_word(&self) -> u32 { // The columns should have the same EVM word. - u32::try_from(self.0[0].evm_word().to_canonical_u64()).unwrap() + u32::try_from(self.0[0].location_offset().to_canonical_u64()).unwrap() } - fn column_info(&self) -> &[ColumnInfo] { + fn column_info(&self) -> &[ExtractedColumnInfo] { &self.0 } } /// What is the secondary index chosen for the table in the mapping. /// Each entry contains the identifier of the column expected to store in our tree -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, Copy)] pub enum MappingIndex { OuterKey(u64), InnerKey(u64), @@ -398,6 +402,7 @@ impl TableSource for SingleExtractionArgs { impl TableSource for MergeSource { type Metadata = (SlotInputs, SlotInputs); + fn get_data(&self) -> Self::Metadata { (self.single.get_data(), self.mapping.get_data()) } @@ -430,13 +435,7 @@ impl TableSource for MergeSource { fn metadata_hash(&self, contract_address: Address, chain_id: u64) -> MetadataHash { let (single, mapping) = self.get_data(); - merge_metadata_hash::( - contract_address, - chain_id, - vec![], - single, - mapping, - ) + merge_metadata_hash(contract_address, chain_id, vec![], single, mapping) } fn can_query(&self) -> bool { @@ -641,13 +640,8 @@ impl MergeSource { // add the metadata hashes together - this is mostly for debugging let (simple, mapping) = self.get_data(); - let md = merge_metadata_hash::( - contract.address, - contract.chain_id, - vec![], - simple, - mapping, - ); + let md = + merge_metadata_hash(contract.address, contract.chain_id, vec![], simple, mapping); assert!(extract_a != extract_b); Ok(( ExtractionProofInput::Merge(MergeExtractionProof { @@ -670,13 +664,144 @@ pub(crate) struct LengthExtractionArgs { pub(crate) value: u8, } -/// Receipt extraction arguments -#[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone, Copy)] -pub(crate) struct ReceiptExtractionArgs { - /// The event data - pub(crate) event: EventLogInfo, - /// column that will be the secondary index - pub(crate) index: u64, +pub trait ReceiptExtractionArgs: + Serialize + for<'de> Deserialize<'de> + Debug + Hash + Eq + PartialEq + Clone + Copy +{ + const NO_TOPICS: usize; + const MAX_DATA: usize; + + fn new(address: Address, event_signature: &str) -> Self + where + Self: Sized; + + fn get_event(&self) -> EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA }>; + + fn get_index(&self) -> u64; +} + +impl ReceiptExtractionArgs + for EventLogInfo +{ + const MAX_DATA: usize = MAX_DATA; + const NO_TOPICS: usize = NO_TOPICS; + + fn new(address: Address, event_signature: &str) -> Self + where + Self: Sized, + { + EventLogInfo::::new(address, event_signature) + } + + fn get_event(&self) -> EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA }> + where + [(); Self::NO_TOPICS]:, + [(); Self::MAX_DATA]:, + { + let topics: [usize; Self::NO_TOPICS] = self + .topics + .into_iter() + .collect::>() + .try_into() + .unwrap(); + let data: [usize; Self::MAX_DATA] = self + .data + .into_iter() + .collect::>() + .try_into() + .unwrap(); + EventLogInfo::<{ Self::NO_TOPICS }, { Self::MAX_DATA }> { + size: self.size, + address: self.address, + add_rel_offset: self.add_rel_offset, + event_signature: self.event_signature, + sig_rel_offset: self.sig_rel_offset, + topics, + data, + } + } + + fn get_index(&self) -> u64 { + use plonky2::{ + field::types::{Field, PrimeField64}, + plonk::config::Hasher, + }; + + let tx_index_input = [ + self.address.as_slice(), + self.event_signature.as_slice(), + TX_INDEX_COLUMN.as_bytes(), + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + H::hash_no_pad(&tx_index_input).elements[0].to_canonical_u64() + } +} + +impl TableSource for R +where + [(); ::NO_TOPICS]:, + [(); ::MAX_DATA]:, + [(); 7 - 2 - ::NO_TOPICS - ::MAX_DATA]:, +{ + type Metadata = EventLogInfo<{ R::NO_TOPICS }, { R::MAX_DATA }>; + + fn can_query(&self) -> bool { + false + } + + fn get_data(&self) -> Self::Metadata { + self.get_event() + } + + fn init_contract_data<'a>( + &'a mut self, + ctx: &'a mut TestContext, + contract: &'a Contract, + ) -> BoxFuture<'a, Vec>> { + async move { + let contract_update = + ReceiptUpdate::new((R::NO_TOPICS as u8, R::MAX_DATA as u8), 5, 15); + + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .wallet(ctx.wallet()) + .on_http(ctx.rpc_url.parse().unwrap()); + + let event_emitter = EventContract::new(contract.address(), provider.root()); + event_emitter + .apply_update(ctx, &contract_update) + .await + .unwrap(); + vec![] + } + .boxed() + } + + async fn generate_extraction_proof_inputs( + &self, + _ctx: &mut TestContext, + _contract: &Contract, + _value_key: ProofKey, + ) -> Result<(ExtractionProofInput, HashOutput)> { + todo!("Implement as part of CRY-25") + } + + fn random_contract_update<'a>( + &'a mut self, + _ctx: &'a mut TestContext, + _contract: &'a Contract, + _c: ChangeType, + ) -> BoxFuture<'a, Vec>> { + todo!("Implement as part of CRY-25") + } + + fn metadata_hash(&self, _contract_address: Address, _chain_id: u64) -> MetadataHash { + let table_metadata = TableMetadata::<7, 2>::from(self.get_event()); + let digest = table_metadata.digest(); + combine_digest_and_block(digest) + } } /// Contract extraction arguments (C.3) @@ -811,9 +936,9 @@ impl SingleExtractionArgs { .await .storage_proof[0] .value; - let value_bytes = value.to_be_bytes(); + let value_bytes: [u8; 32] = value.to_be_bytes(); evm_word_col.column_info().iter().for_each(|col_info| { - let extracted_value = extract_value(&value_bytes, col_info); + let extracted_value = col_info.extract_value(value_bytes.as_slice()); let extracted_value = U256::from_be_bytes(extracted_value); let id = col_info.identifier().to_canonical_u64(); let cell = Cell::new(col_info.identifier().to_canonical_u64(), extracted_value); @@ -838,7 +963,7 @@ impl SingleExtractionArgs { }) } - fn table_info(&self, contract: &Contract) -> Vec { + fn table_info(&self, contract: &Contract) -> Vec { table_info(contract, self.slot_inputs.clone()) } @@ -1418,7 +1543,7 @@ impl MappingExtractionArgs { fn storage_slot_info( &self, evm_word: u32, - table_info: Vec, + table_info: Vec, mapping_key: &T, ) -> StorageSlotInfo { let storage_slot = mapping_key.storage_slot(self.slot, evm_word); @@ -1457,9 +1582,9 @@ impl MappingExtractionArgs { .storage_proof[0] .value; - let value_bytes = value.to_be_bytes(); + let value_bytes: [u8; 32] = value.to_be_bytes(); evm_word_col.column_info().iter().for_each(|col_info| { - let bytes = extract_value(&value_bytes, col_info); + let bytes = col_info.extract_value(&value_bytes); let value = U256::from_be_bytes(bytes); debug!( "Mapping extract value: column: {:?}, value = {}", @@ -1473,7 +1598,7 @@ impl MappingExtractionArgs { ::Value::from_u256_slice(&extracted_values) } - fn table_info(&self, contract: &Contract) -> Vec { + fn table_info(&self, contract: &Contract) -> Vec { table_info(contract, self.slot_inputs.clone()) } @@ -1484,17 +1609,20 @@ impl MappingExtractionArgs { } /// Contruct the table information by the contract and slot inputs. -fn table_info(contract: &Contract, slot_inputs: Vec) -> Vec { +fn table_info(contract: &Contract, slot_inputs: Vec) -> Vec { compute_table_info(slot_inputs, &contract.address, contract.chain_id, vec![]) } /// Construct the column information for each slot and EVM word. -fn evm_word_column_info(table_info: &[ColumnInfo]) -> Vec { +fn evm_word_column_info(table_info: &[ExtractedColumnInfo]) -> Vec { // Initialize a mapping of `(slot, evm_word) -> column_Identifier`. let mut column_info_map = HashMap::new(); table_info.iter().for_each(|col| { column_info_map - .entry((col.slot(), col.evm_word())) + .entry(( + col.extraction_id()[7].0 as u8, + col.location_offset().0 as u32, + )) .and_modify(|cols: &mut Vec<_>| cols.push(col.clone())) .or_insert(vec![col.clone()]); }); diff --git a/mp2-v1/tests/common/context.rs b/mp2-v1/tests/common/context.rs index 149ba80bb..383943759 100644 --- a/mp2-v1/tests/common/context.rs +++ b/mp2-v1/tests/common/context.rs @@ -200,8 +200,13 @@ const INDEX_INFO_FILE: &str = "index.info"; impl TestContext { pub(crate) fn wallet(&self) -> EthereumWallet { - let signer: PrivateKeySigner = self.local_node.as_ref().unwrap().keys()[0].clone().into(); - EthereumWallet::from(signer) + let keys = self.local_node.as_ref().unwrap().keys(); + let signer: PrivateKeySigner = keys[0].clone().into(); + let mut wallet = EthereumWallet::from(signer); + keys.iter().skip(1).for_each(|key| { + wallet.register_signer::(key.clone().into()); + }); + wallet } /// Build the parameters. /// diff --git a/mp2-v1/tests/common/mod.rs b/mp2-v1/tests/common/mod.rs index 16c16d533..477f6c935 100644 --- a/mp2-v1/tests/common/mod.rs +++ b/mp2-v1/tests/common/mod.rs @@ -36,7 +36,7 @@ pub(crate) const TEST_MAX_COLUMNS: usize = 32; pub(crate) const TEST_MAX_FIELD_PER_EVM: usize = 32; type ColumnIdentifier = u64; -type PublicParameters = mp2_v1::api::PublicParameters; +type PublicParameters = mp2_v1::api::PublicParameters; fn cell_tree_proof_to_hash(proof: &[u8]) -> HashOutput { let root_pi = ProofWithVK::deserialize(proof) diff --git a/mp2-v1/tests/common/rowtree.rs b/mp2-v1/tests/common/rowtree.rs index dfe894346..710c8d8fe 100644 --- a/mp2-v1/tests/common/rowtree.rs +++ b/mp2-v1/tests/common/rowtree.rs @@ -133,6 +133,23 @@ impl TestContext { &inner_mapping_key, ) } + TableRowUniqueID::Receipt(tx_index_id, gas_used_id) => { + let [tx_index, gas_used]: [[_; MAPPING_KEY_LEN]; 2] = [tx_index_id, gas_used_id].map(|column_id| { + row.column_value(column_id) + .unwrap_or_else(|| { + panic!("Cannot fetch the key of receipt column: column_id = {column_id}") + }) + .to_be_bytes() + }); + debug!( + "FETCHED receipt values to compute row_unique_data: tx_index = {:?}, gas_used = {:?}", + hex::encode(tx_index), + hex::encode(gas_used), + ); + + // The receipt row unique id is computed in the same way as mapping of mappings + row_unique_data_for_mapping_of_mappings_leaf(&tx_index, &gas_used) + } }; // NOTE remove that when playing more with sec. index assert!(!multiplier, "secondary index should be individual type"); diff --git a/mp2-v1/tests/common/storage_trie.rs b/mp2-v1/tests/common/storage_trie.rs index a8ec7bc20..760ed89e3 100644 --- a/mp2-v1/tests/common/storage_trie.rs +++ b/mp2-v1/tests/common/storage_trie.rs @@ -329,11 +329,19 @@ impl TrieNode { "[+] [+] MPT SLOT {} -> identifiers {:?} value {:?} value.digest() = {:?}", slot_info.slot().slot(), slot_info - .metadata::() - .extracted_table_info() + .table_columns(ctx.contract_address, ctx.chain_id, vec![]) + .extracted_columns() .iter() - .map(|info| info.identifier().to_canonical_u64()) - .collect_vec(), + .filter_map(|column| { + let check_one = column.extraction_id()[7].0 as u8 == slot_info.slot().slot(); + let check_two = column.location_offset().0 as u32 == slot_info.evm_word(); + if check_one && check_two { + Some(column.identifier().to_canonical_u64()) + } else { + None + } + }) + .collect::>(), U256::from_be_slice(&value), pi.values_digest(), ); diff --git a/mp2-v1/tests/common/table.rs b/mp2-v1/tests/common/table.rs index 5a5ce3b8f..3ee8f7af0 100644 --- a/mp2-v1/tests/common/table.rs +++ b/mp2-v1/tests/common/table.rs @@ -7,15 +7,12 @@ use futures::{ }; use itertools::Itertools; use log::debug; -use mp2_v1::{ - indexing::{ - block::{BlockPrimaryIndex, BlockTreeKey}, - cell::{self, Cell, CellTreeKey, MerkleCell, MerkleCellTree}, - index::IndexNode, - row::{CellCollection, Row, RowTreeKey}, - ColumnID, - }, - values_extraction::gadgets::column_info::ColumnInfo, +use mp2_v1::indexing::{ + block::{BlockPrimaryIndex, BlockTreeKey}, + cell::{self, Cell, CellTreeKey, MerkleCell, MerkleCellTree}, + index::IndexNode, + row::{CellCollection, Row, RowTreeKey}, + ColumnID, }; use parsil::symbols::{ColumnKind, ContextProvider, ZkColumn, ZkTable}; use plonky2::field::types::PrimeField64; @@ -60,7 +57,7 @@ impl IndexType { #[derive(Serialize, Deserialize, Clone, Debug)] pub struct TableColumn { pub name: String, - pub info: ColumnInfo, + pub identifier: u64, pub index: IndexType, /// multiplier means if this columns come from a "merged" table, then it either come from a /// table a or table b. One of these table is the "multiplier" table, the other is not. @@ -69,7 +66,7 @@ pub struct TableColumn { impl TableColumn { pub fn identifier(&self) -> ColumnID { - self.info.identifier().to_canonical_u64() + self.identifier } } @@ -84,6 +81,7 @@ pub enum TableRowUniqueID { Single, Mapping(ColumnID), MappingOfMappings(ColumnID, ColumnID), + Receipt(ColumnID, ColumnID), } #[derive(Clone, Debug, Serialize, Deserialize)] diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index 8cb2641f4..de3aeb37a 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -36,6 +36,7 @@ use common::{ use envconfig::Envconfig; use log::info; +use mp2_common::eth::EventLogInfo; use parsil::{ assembler::DynamicCircuitPis, parse_and_validate, @@ -90,6 +91,10 @@ async fn integrated_indexing() -> Result<()> { ctx.build_params(ParamsType::Indexing).unwrap(); info!("Params built"); + // For now we test that we can start a receipt case only. + let (_receipt, _genesis) = + TableIndexing::>::receipt_test_case(0, 0, &mut ctx).await?; + // NOTE: to comment to avoid very long tests... let (mut single, genesis) = TableIndexing::::single_value_test_case(&mut ctx).await?; From a94068c2034ab7ff114e2d727029c9d66946b211 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 23 Dec 2024 12:51:23 +0000 Subject: [PATCH 22/47] resolves CRY-25 --- mp2-common/src/eth.rs | 7 +- mp2-common/src/utils.rs | 35 + mp2-v1/src/api.rs | 8 +- mp2-v1/src/indexing/cell.rs | 4 +- mp2-v1/src/lib.rs | 2 - mp2-v1/src/values_extraction/api.rs | 1632 +++++++++--------- mp2-v1/src/values_extraction/branch.rs | 1 - mp2-v1/src/values_extraction/leaf_receipt.rs | 17 +- mp2-v1/src/values_extraction/mod.rs | 77 +- mp2-v1/src/values_extraction/planner.rs | 123 ++ mp2-v1/tests/common/cases/indexing.rs | 2 +- mp2-v1/tests/common/cases/table_source.rs | 104 +- mp2-v1/tests/common/rowtree.rs | 2 +- ryhope/src/storage/updatetree.rs | 1 + 14 files changed, 1179 insertions(+), 836 deletions(-) create mode 100644 mp2-v1/src/values_extraction/planner.rs diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 1292de7de..aee5aa3c4 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -488,7 +488,7 @@ impl ReceiptProofInfo { .verify_proof(self.mpt_root, &mpt_key, self.mpt_proof.clone())? .ok_or(anyhow!("No proof found when verifying"))?; - let rlp_receipt = rlp::Rlp::new(&valid[1..]); + let rlp_receipt = rlp::Rlp::new(&valid[..]); ReceiptWithBloom::decode(&mut rlp_receipt.as_raw()) .map_err(|e| anyhow!("Could not decode receipt got: {}", e)) } @@ -1045,10 +1045,11 @@ mod test { #[tokio::test] async fn test_pidgy_pinguin_mapping_slot() -> Result<()> { // first pinguin holder https://dune.com/queries/2450476/4027653 - // holder: 0x29469395eaf6f95920e59f858042f0e28d98a20b + // This was outdated so the following is the updated address + // holder: 0xee5ac9c6db07c26e71207a41e64df42e1a2b05cf // NFT id https://opensea.io/assets/ethereum/0xbd3531da5cf5857e7cfaa92426877b022e612cf8/1116 let mapping_value = - Address::from_str("0x29469395eaf6f95920e59f858042f0e28d98a20b").unwrap(); + Address::from_str("0xee5ac9c6db07c26e71207a41e64df42e1a2b05cf").unwrap(); let nft_id: u32 = 1116; let mapping_key = left_pad32(&nft_id.to_be_bytes()); let url = get_mainnet_url(); diff --git a/mp2-common/src/utils.rs b/mp2-common/src/utils.rs index af0e59d63..3cb6a6bba 100644 --- a/mp2-common/src/utils.rs +++ b/mp2-common/src/utils.rs @@ -804,6 +804,41 @@ impl, const D: usize> SliceConnector for CircuitBui } } +/// Convert an Uint32 target to Uint8 targets. +pub fn unpack_u32_to_u8_targets, const D: usize>( + b: &mut CircuitBuilder, + u: Target, + endianness: Endianness, +) -> Vec { + let zero = b.zero(); + let mut bits = b.split_le(u, u32::BITS as usize); + match endianness { + Endianness::Big => bits.reverse(), + Endianness::Little => (), + }; + bits.chunks(8) + .map(|chunk| { + // let bits: Box> = match endianness { + let bits: Box> = match endianness { + Endianness::Big => Box::new(chunk.iter()), + Endianness::Little => Box::new(chunk.iter().rev()), + }; + bits.fold(zero, |acc, bit| b.mul_const_add(F::TWO, acc, bit.target)) + }) + .collect() +} + +/// Convert Uint32 targets to Uint8 targets. +pub fn unpack_u32s_to_u8_targets, const D: usize>( + b: &mut CircuitBuilder, + u32s: Vec, + endianness: Endianness, +) -> Vec { + u32s.into_iter() + .flat_map(|u| unpack_u32_to_u8_targets(b, u, endianness)) + .collect() +} + #[cfg(test)] mod test { use super::{bits_to_num, Packer, ToFields}; diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index d10af5705..53636dff4 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -12,19 +12,19 @@ use crate::{ values_extraction::{ self, compute_id_with_prefix, gadgets::column_info::{ExtractedColumnInfo, InputColumnInfo}, - identifier_block_column, identifier_for_inner_mapping_key_column, - identifier_for_mapping_key_column, identifier_for_outer_mapping_key_column, - identifier_for_value_column, ColumnMetadata, INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, - OUTER_KEY_ID_PREFIX, + identifier_block_column, identifier_for_value_column, ColumnMetadata, INNER_KEY_ID_PREFIX, + KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }, MAX_LEAF_VALUE_LEN, MAX_RECEIPT_LEAF_NODE_LEN, }; + use alloy::primitives::Address; use anyhow::Result; use itertools::Itertools; use log::debug; use mp2_common::{ digest::Digest, + group_hashing::map_to_curve_point, poseidon::H, types::HashOutput, utils::{Fieldable, ToFields}, diff --git a/mp2-v1/src/indexing/cell.rs b/mp2-v1/src/indexing/cell.rs index 7ad8461a2..29802ed34 100644 --- a/mp2-v1/src/indexing/cell.rs +++ b/mp2-v1/src/indexing/cell.rs @@ -57,7 +57,9 @@ pub async fn new_tree< /// Cell is the information stored in a specific cell of a specific row. /// A row node in the row tree contains a vector of such cells. -#[derive(Clone, Default, Debug, Serialize, Deserialize, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[derive( + Clone, Copy, Default, Debug, Serialize, Deserialize, Hash, PartialEq, Eq, PartialOrd, Ord, +)] pub struct Cell { /// The unique identifier of the cell, derived from the contract it comes /// from and its slot in its storage. diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index 5ca55964f..40efe183c 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -35,6 +35,4 @@ pub mod values_extraction; pub(crate) mod tests { /// Testing maximum columns pub(crate) const TEST_MAX_COLUMNS: usize = 32; - /// Testing maximum fields for each EVM word - pub(crate) const TEST_MAX_FIELD_PER_EVM: usize = 32; } diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 1ffb27522..7db8d4994 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -19,7 +19,7 @@ use anyhow::{bail, ensure, Result}; use log::debug; use mp2_common::{ default_config, - eth::{ReceiptProofInfo, ReceiptQuery}, + eth::{EventLogInfo, ReceiptProofInfo}, mpt_sequential::PAD_LEN, poseidon::H, proof::{ProofInputSerialized, ProofWithVK}, @@ -144,13 +144,13 @@ where /// Create a circuit input for proving a leaf MPT node of a transaction receipt. pub fn new_receipt_leaf( info: &ReceiptProofInfo, - query: &ReceiptQuery, + event: &EventLogInfo, ) -> Self where [(); 7 - 2 - NO_TOPICS - MAX_DATA]:, { CircuitInput::LeafReceipt( - ReceiptLeafCircuit::::new::(info, query) + ReceiptLeafCircuit::::new::(info, event) .expect("Could not construct Receipt Leaf Circuit"), ) } @@ -502,806 +502,826 @@ where } } -// #[cfg(test)] -// mod tests { -// use super::{ -// super::{public_inputs, StorageSlotInfo}, -// *, -// }; -// use crate::{ -// tests::{TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM}, -// values_extraction::{ -// compute_leaf_mapping_metadata_digest, compute_leaf_mapping_of_mappings_metadata_digest, -// compute_leaf_mapping_of_mappings_values_digest, compute_leaf_mapping_values_digest, -// compute_leaf_single_metadata_digest, compute_leaf_single_values_digest, -// identifier_raw_extra, -// }, -// MAX_LEAF_NODE_LEN, -// }; -// use alloy::primitives::Address; -// use eth_trie::{EthTrie, MemoryDB, Trie}; -// use itertools::Itertools; -// use log::info; -// use mp2_common::{ -// eth::{StorageSlot, StorageSlotNode}, -// group_hashing::weierstrass_to_point, -// mpt_sequential::utils::bytes_to_nibbles, -// types::MAPPING_LEAF_VALUE_LEN, -// }; -// use mp2_test::{ -// mpt_sequential::{generate_random_storage_mpt, generate_receipt_test_info}, -// utils::random_vector, -// }; -// use plonky2::field::types::Field; -// use plonky2_ecgfp5::curve::curve::Point; -// use rand::{thread_rng, Rng}; -// use std::{str::FromStr, sync::Arc}; - -// type CircuitInput = super::CircuitInput; -// type PublicParameters = super::PublicParameters; - -// #[derive(Debug)] -// struct TestEthTrie { -// trie: EthTrie, -// mpt_keys: Vec>, -// } - -// #[test] -// fn test_values_extraction_api_single_variable() { -// const TEST_SLOTS: [u8; 2] = [5, 10]; - -// let _ = env_logger::try_init(); - -// let storage_slot1 = StorageSlot::Simple(TEST_SLOTS[0] as usize); -// let storage_slot2 = StorageSlot::Simple(TEST_SLOTS[1] as usize); - -// let table_info = TEST_SLOTS -// .into_iter() -// .map(|slot| { -// let mut col_info = ColumnInfo::sample(); -// col_info.slot = F::from_canonical_u8(slot); -// col_info.evm_word = F::ZERO; - -// col_info -// }) -// .collect_vec(); - -// let test_slots = [ -// StorageSlotInfo::new(storage_slot1, table_info.clone()), -// StorageSlotInfo::new(storage_slot2, table_info), -// ]; - -// test_api(test_slots); -// } - -// #[test] -// fn test_values_extraction_api_single_struct() { -// const TEST_SLOT: u8 = 2; -// const TEST_EVM_WORDS: [u32; 2] = [10, 20]; - -// let _ = env_logger::try_init(); - -// let parent_slot = StorageSlot::Simple(TEST_SLOT as usize); -// let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( -// parent_slot.clone(), -// TEST_EVM_WORDS[0], -// )); -// let storage_slot2 = -// StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); - -// let table_info = TEST_EVM_WORDS -// .into_iter() -// .map(|evm_word| { -// let mut col_info = ColumnInfo::sample(); -// col_info.slot = F::from_canonical_u8(TEST_SLOT); -// col_info.evm_word = F::from_canonical_u32(evm_word); - -// col_info -// }) -// .collect_vec(); - -// let test_slots = [ -// StorageSlotInfo::new(storage_slot1, table_info.clone()), -// StorageSlotInfo::new(storage_slot2, table_info), -// ]; - -// test_api(test_slots); -// } - -// #[test] -// fn test_values_extraction_api_mapping_variable() { -// const TEST_SLOT: u8 = 2; - -// let _ = env_logger::try_init(); - -// let mapping_key1 = vec![10]; -// let mapping_key2 = vec![20]; -// let storage_slot1 = StorageSlot::Mapping(mapping_key1, TEST_SLOT as usize); -// let storage_slot2 = StorageSlot::Mapping(mapping_key2, TEST_SLOT as usize); - -// // The first and second column infos are same (only for testing). -// let table_info = [0; 2] -// .into_iter() -// .map(|_| { -// let mut col_info = ColumnInfo::sample(); -// col_info.slot = F::from_canonical_u8(TEST_SLOT); -// col_info.evm_word = F::ZERO; - -// col_info -// }) -// .collect_vec(); - -// let test_slots = [ -// StorageSlotInfo::new(storage_slot1, table_info.clone()), -// StorageSlotInfo::new(storage_slot2, table_info), -// ]; - -// test_api(test_slots); -// } - -// #[test] -// fn test_values_extraction_api_mapping_struct() { -// const TEST_SLOT: u8 = 2; -// const TEST_EVM_WORDS: [u32; 2] = [10, 20]; - -// let _ = env_logger::try_init(); - -// let parent_slot = StorageSlot::Mapping(vec![10, 20], TEST_SLOT as usize); -// let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( -// parent_slot.clone(), -// TEST_EVM_WORDS[0], -// )); -// let storage_slot2 = -// StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); - -// let table_info = TEST_EVM_WORDS -// .into_iter() -// .map(|evm_word| { -// let mut col_info = ColumnInfo::sample(); -// col_info.slot = F::from_canonical_u8(TEST_SLOT); -// col_info.evm_word = F::from_canonical_u32(evm_word); - -// col_info -// }) -// .collect_vec(); - -// let test_slots = [ -// StorageSlotInfo::new(storage_slot1, table_info.clone()), -// StorageSlotInfo::new(storage_slot2, table_info), -// ]; - -// test_api(test_slots); -// } - -// #[test] -// fn test_values_extraction_api_mapping_of_mappings() { -// const TEST_SLOT: u8 = 2; -// const TEST_EVM_WORDS: [u32; 2] = [10, 20]; - -// let _ = env_logger::try_init(); - -// let grand_slot = StorageSlot::Mapping(vec![10, 20], TEST_SLOT as usize); -// let parent_slot = -// StorageSlot::Node(StorageSlotNode::new_mapping(grand_slot, vec![30, 40]).unwrap()); -// let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( -// parent_slot.clone(), -// TEST_EVM_WORDS[0], -// )); -// let storage_slot2 = -// StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); - -// let table_info = TEST_EVM_WORDS -// .into_iter() -// .map(|evm_word| { -// let mut col_info = ColumnInfo::sample(); -// col_info.slot = F::from_canonical_u8(TEST_SLOT); -// col_info.evm_word = F::from_canonical_u32(evm_word); - -// col_info -// }) -// .collect_vec(); - -// let test_slots = [ -// StorageSlotInfo::new(storage_slot1, table_info.clone()), -// StorageSlotInfo::new(storage_slot2, table_info), -// ]; - -// test_api(test_slots); -// } - -// #[test] -// fn test_values_extraction_api_branch_with_multiple_children() { -// const TEST_SLOT: u8 = 2; -// const NUM_CHILDREN: usize = 6; - -// let _ = env_logger::try_init(); - -// let storage_slot = StorageSlot::Simple(TEST_SLOT as usize); -// let table_info = { -// let mut col_info = ColumnInfo::sample(); -// col_info.slot = F::from_canonical_u8(TEST_SLOT); -// col_info.evm_word = F::ZERO; - -// vec![col_info] -// }; -// let test_slot = StorageSlotInfo::new(storage_slot, table_info); - -// test_branch_with_multiple_children(NUM_CHILDREN, test_slot); -// } - -// #[test] -// fn test_values_extraction_api_serialization() { -// const TEST_SLOT: u8 = 10; -// const TEST_EVM_WORD: u32 = 5; -// const TEST_OUTER_KEY: [u8; 2] = [10, 20]; -// const TEST_INNER_KEY: [u8; 3] = [30, 40, 50]; - -// let _ = env_logger::try_init(); - -// let rng = &mut thread_rng(); - -// // Test serialization for public parameters. -// let params = PublicParameters::build(); -// let encoded = bincode::serialize(¶ms).unwrap(); -// let decoded_params: PublicParameters = bincode::deserialize(&encoded).unwrap(); -// assert!(decoded_params == params); - -// let test_circuit_input = |input: CircuitInput| { -// // Test circuit input serialization. -// let encoded_input = bincode::serialize(&input).unwrap(); -// let decoded_input: CircuitInput = bincode::deserialize(&encoded_input).unwrap(); - -// // Test proof serialization. -// let proof = params.generate_proof(decoded_input).unwrap(); -// let encoded_proof = bincode::serialize(&proof).unwrap(); -// let decoded_proof: ProofWithVK = bincode::deserialize(&encoded_proof).unwrap(); -// assert_eq!(proof, decoded_proof); - -// encoded_proof -// }; - -// // Construct the table info for testing. -// let table_info = { -// vec![ExtractedColumnInfo::sample( -// true, -// &[ -// F::ZERO, -// F::ZERO, -// F::ZERO, -// F::ZERO, -// F::ZERO, -// F::ZERO, -// F::ZERO, -// F::from_canonical_u8(TEST_SLOT), -// ], -// F::from_canonical_u32(TEST_EVM_WORD), -// )] -// }; - -// // Test for single variable leaf. -// let parent_slot = StorageSlot::Simple(TEST_SLOT as usize); -// let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct( -// parent_slot.clone(), -// TEST_EVM_WORD, -// )); -// let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); -// let mut test_trie = generate_test_trie(1, &test_slot); -// let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); -// test_circuit_input(CircuitInput::new_single_variable_leaf( -// proof.last().unwrap().to_vec(), -// TEST_SLOT, -// TEST_EVM_WORD, -// table_info.clone(), -// )); - -// // Test for mapping variable leaf. -// let parent_slot = StorageSlot::Mapping(TEST_OUTER_KEY.to_vec(), TEST_SLOT as usize); -// let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct( -// parent_slot.clone(), -// TEST_EVM_WORD, -// )); -// let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); -// let mut test_trie = generate_test_trie(1, &test_slot); -// let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); -// let key_id = rng.gen(); -// test_circuit_input(CircuitInput::new_mapping_variable_leaf( -// proof.last().unwrap().to_vec(), -// TEST_SLOT, -// TEST_OUTER_KEY.to_vec(), -// key_id, -// TEST_EVM_WORD, -// table_info.clone(), -// )); - -// // Test for mapping of mappings leaf. -// let grand_slot = StorageSlot::Mapping(TEST_OUTER_KEY.to_vec(), TEST_SLOT as usize); -// let parent_slot = StorageSlot::Node( -// StorageSlotNode::new_mapping(grand_slot, TEST_INNER_KEY.to_vec()).unwrap(), -// ); -// let storage_slot = -// StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORD)); -// let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); -// let mut test_trie = generate_test_trie(2, &test_slot); -// let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); -// let outer_key_id = rng.gen(); -// let inner_key_id = rng.gen(); -// let encoded = test_circuit_input(CircuitInput::new_mapping_of_mappings_leaf( -// proof.last().unwrap().to_vec(), -// TEST_SLOT, -// TEST_OUTER_KEY.to_vec(), -// TEST_INNER_KEY.to_vec(), -// outer_key_id, -// inner_key_id, -// TEST_EVM_WORD, -// table_info, -// )); - -// // Test for branch. -// let branch_node = proof[proof.len() - 2].to_vec(); -// test_circuit_input(CircuitInput::Branch(BranchInput { -// input: InputNode { -// node: branch_node.clone(), -// }, -// serialized_child_proofs: vec![encoded], -// })); -// } - -// fn test_api(test_slots: [StorageSlotInfo; 2]) { -// info!("Generating MPT proofs"); -// let memdb = Arc::new(MemoryDB::new(true)); -// let mut trie = EthTrie::new(memdb.clone()); -// let mpt_keys = test_slots -// .iter() -// .map(|test_slot| { -// let mpt_key = test_slot.slot.mpt_key(); -// let value = random_vector(MAPPING_LEAF_VALUE_LEN); -// trie.insert(&mpt_key, &rlp::encode(&value)).unwrap(); -// mpt_key -// }) -// .collect_vec(); -// trie.root_hash().unwrap(); -// let mpt_proofs = mpt_keys -// .into_iter() -// .map(|key| trie.get_proof(&key).unwrap()) -// .collect_vec(); -// // Get the branch node. -// let node_len = mpt_proofs[0].len(); -// // Ensure both are located in the same branch. -// assert_eq!(node_len, mpt_proofs[1].len()); -// let branch_node = mpt_proofs[0][node_len - 2].clone(); -// assert_eq!(branch_node, mpt_proofs[1][node_len - 2]); - -// info!("Generating parameters"); -// let params = build_circuits_params(); - -// let leaf_proofs = test_slots -// .into_iter() -// .zip_eq(mpt_proofs) -// .enumerate() -// .map(|(i, (test_slot, mut leaf_proof))| { -// info!("Proving leaf {i}"); -// prove_leaf(¶ms, leaf_proof.pop().unwrap(), test_slot) -// }) -// .collect(); - -// info!("Proving branch"); -// let _branch_proof = prove_branch(¶ms, branch_node, leaf_proofs); -// } - -// /// Generate a branch proof. -// fn prove_branch( -// params: &PublicParameters, -// node: Vec, -// leaf_proofs: Vec>, -// ) -> Vec { -// let input = CircuitInput::new_branch(node, leaf_proofs); -// generate_proof(params, input).unwrap() -// } -// #[test] -// fn test_receipt_api() { -// let receipt_proof_infos = generate_receipt_test_info::<1, 0>(); -// let receipt_proofs = receipt_proof_infos.proofs(); -// let query = receipt_proof_infos.query(); -// // We need two nodes that are children of the same branch so we compare the last but two nodes for each of them until we find a case that works -// let (info_one, info_two) = if let Some((one, two)) = receipt_proofs -// .iter() -// .enumerate() -// .find_map(|(i, current_proof)| { -// let current_node_second_to_last = -// current_proof.mpt_proof[current_proof.mpt_proof.len() - 2].clone(); -// receipt_proofs -// .iter() -// .skip(i + 1) -// .find(|find_info| { -// find_info.mpt_proof[find_info.mpt_proof.len() - 2].clone() -// == current_node_second_to_last -// }) -// .map(|matching| (current_proof, matching)) -// }) { -// (one, two) -// } else { -// panic!("No relevant events with same branch node parent") -// }; - -// let proof_length_1 = info_one.mpt_proof.len(); -// let proof_length_2 = info_two.mpt_proof.len(); - -// let list_one = rlp::decode_list::>(&info_one.mpt_proof[proof_length_1 - 2]); -// let list_two = rlp::decode_list::>(&info_two.mpt_proof[proof_length_2 - 2]); - -// assert_eq!(list_one, list_two); -// assert!(list_one.len() == 17); - -// println!("Generating params..."); -// let params = build_circuits_params(); - -// println!("Proving leaf 1..."); -// let leaf_input_1 = CircuitInput::new_receipt_leaf(info_one, query); -// let now = std::time::Instant::now(); -// let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); -// { -// let lp = ProofWithVK::deserialize(&leaf_proof1).unwrap(); -// let pub1 = PublicInputs::new(&lp.proof.public_inputs); -// let (_, ptr) = pub1.mpt_key_info(); -// println!("pointer: {}", ptr); -// } -// println!( -// "Proof for leaf 1 generated in {} ms", -// now.elapsed().as_millis() -// ); - -// println!("Proving leaf 2..."); -// let leaf_input_2 = CircuitInput::new_receipt_leaf(info_two, query); -// let now = std::time::Instant::now(); -// let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); -// println!( -// "Proof for leaf 2 generated in {} ms", -// now.elapsed().as_millis() -// ); - -// // The branch case for receipts is identical to that of a mapping so we use the same api. -// println!("Proving branch..."); -// let branch_input = CircuitInput::new_branch( -// info_two.mpt_proof[proof_length_1 - 2].clone(), -// vec![leaf_proof1, leaf_proof2], -// ); - -// let now = std::time::Instant::now(); -// generate_proof(¶ms, branch_input).unwrap(); -// println!( -// "Proof for branch node generated in {} ms", -// now.elapsed().as_millis() -// ); -// } - -// /// Generate a leaf proof. -// fn prove_leaf(params: &PublicParameters, node: Vec, test_slot: StorageSlotInfo) -> Vec { -// // RLP(RLP(compact(partial_key_in_nibble)), RLP(value)) -// let leaf_tuple: Vec> = rlp::decode_list(&node); -// assert_eq!(leaf_tuple.len(), 2); -// let value = leaf_tuple[1][1..].to_vec().try_into().unwrap(); - -// let evm_word = test_slot.evm_word(); -// let table_info = test_slot.table_info(); -// let metadata = test_slot.metadata::(); -// let extracted_column_identifiers = metadata.extracted_column_identifiers(); - -// // Build the identifier extra data, it's used to compute the key IDs. -// const TEST_CONTRACT_ADDRESS: &str = "0x105dD0eF26b92a3698FD5AaaF688577B9Cafd970"; -// const TEST_CHAIN_ID: u64 = 1000; -// let id_extra = identifier_raw_extra( -// &Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(), -// TEST_CHAIN_ID, -// vec![], -// ); - -// let (expected_metadata_digest, expected_values_digest, circuit_input) = match &test_slot -// .slot -// { -// // Simple variable slot -// StorageSlot::Simple(slot) => { -// let metadata_digest = compute_leaf_single_metadata_digest::< -// TEST_MAX_COLUMNS, -// TEST_MAX_FIELD_PER_EVM, -// >(table_info.to_vec()); - -// let values_digest = compute_leaf_single_values_digest::( -// table_info.to_vec(), -// &extracted_column_identifiers, -// value, -// ); - -// let circuit_input = CircuitInput::new_single_variable_leaf( -// node, -// *slot as u8, -// evm_word, -// table_info.to_vec(), -// ); - -// (metadata_digest, values_digest, circuit_input) -// } -// // Mapping variable -// StorageSlot::Mapping(mapping_key, slot) => { -// let outer_key_id = test_slot.outer_key_id_raw(id_extra).unwrap(); -// let metadata_digest = compute_leaf_mapping_metadata_digest::< -// TEST_MAX_COLUMNS, -// TEST_MAX_FIELD_PER_EVM, -// >( -// table_info.to_vec(), *slot as u8, outer_key_id -// ); - -// let values_digest = compute_leaf_mapping_values_digest::( -// table_info.to_vec(), -// &extracted_column_identifiers, -// value, -// mapping_key.clone(), -// evm_word, -// outer_key_id, -// ); - -// let circuit_input = CircuitInput::new_mapping_variable_leaf( -// node, -// *slot as u8, -// mapping_key.clone(), -// outer_key_id, -// evm_word, -// table_info.to_vec(), -// ); - -// (metadata_digest, values_digest, circuit_input) -// } -// StorageSlot::Node(StorageSlotNode::Struct(parent, _)) => match *parent.clone() { -// // Simple Struct -// StorageSlot::Simple(slot) => { -// let metadata_digest = compute_leaf_single_metadata_digest::< -// TEST_MAX_COLUMNS, -// TEST_MAX_FIELD_PER_EVM, -// >(table_info.to_vec()); - -// let values_digest = compute_leaf_single_values_digest::( -// table_info.to_vec(), -// &extracted_column_identifiers, -// value, -// ); - -// let circuit_input = CircuitInput::new_single_variable_leaf( -// node, -// slot as u8, -// evm_word, -// table_info.to_vec(), -// ); - -// (metadata_digest, values_digest, circuit_input) -// } -// // Mapping Struct -// StorageSlot::Mapping(mapping_key, slot) => { -// let outer_key_id = test_slot.outer_key_id_raw(id_extra).unwrap(); -// let metadata_digest = -// compute_leaf_mapping_metadata_digest::< -// TEST_MAX_COLUMNS, -// TEST_MAX_FIELD_PER_EVM, -// >(table_info.to_vec(), slot as u8, outer_key_id); - -// let values_digest = compute_leaf_mapping_values_digest::( -// table_info.to_vec(), -// &extracted_column_identifiers, -// value, -// mapping_key.clone(), -// evm_word, -// outer_key_id, -// ); - -// let circuit_input = CircuitInput::new_mapping_variable_leaf( -// node, -// slot as u8, -// mapping_key, -// outer_key_id, -// evm_word, -// table_info.to_vec(), -// ); - -// (metadata_digest, values_digest, circuit_input) -// } -// // Mapping of mappings Struct -// StorageSlot::Node(StorageSlotNode::Mapping(grand, inner_mapping_key)) => { -// match *grand { -// StorageSlot::Mapping(outer_mapping_key, slot) => { -// let outer_key_id = -// test_slot.outer_key_id_raw(id_extra.clone()).unwrap(); -// let inner_key_id = test_slot.inner_key_id_raw(id_extra).unwrap(); -// let metadata_digest = -// compute_leaf_mapping_of_mappings_metadata_digest::< -// TEST_MAX_COLUMNS, -// TEST_MAX_FIELD_PER_EVM, -// >( -// table_info.to_vec(), slot as u8, outer_key_id, inner_key_id -// ); - -// let values_digest = compute_leaf_mapping_of_mappings_values_digest::< -// TEST_MAX_FIELD_PER_EVM, -// >( -// table_info.to_vec(), -// &extracted_column_identifiers, -// value, -// evm_word, -// outer_mapping_key.clone(), -// inner_mapping_key.clone(), -// outer_key_id, -// inner_key_id, -// ); - -// let circuit_input = CircuitInput::new_mapping_of_mappings_leaf( -// node, -// slot as u8, -// outer_mapping_key, -// inner_mapping_key, -// outer_key_id, -// inner_key_id, -// evm_word, -// table_info.to_vec(), -// ); - -// (metadata_digest, values_digest, circuit_input) -// } -// _ => unreachable!(), -// } -// } -// _ => unreachable!(), -// }, -// _ => unreachable!(), -// }; - -// let proof = generate_proof(params, circuit_input).unwrap(); - -// // Check the metadata digest of public inputs. -// let decoded_proof = ProofWithVK::deserialize(&proof).unwrap(); -// let pi = PublicInputs::new(&decoded_proof.proof.public_inputs); -// assert_eq!( -// pi.metadata_digest(), -// expected_metadata_digest.to_weierstrass() -// ); -// assert_eq!(pi.values_digest(), expected_values_digest.to_weierstrass()); - -// proof -// } - -// /// Generate a MPT trie with sepcified number of children. -// fn generate_test_trie(num_children: usize, storage_slot: &StorageSlotInfo) -> TestEthTrie { -// let (mut trie, _) = generate_random_storage_mpt::<3, 32>(); - -// let mut mpt_key = storage_slot.slot.mpt_key_vec(); -// let mpt_len = mpt_key.len(); -// let last_byte = mpt_key[mpt_len - 1]; -// let first_nibble = last_byte & 0xF0; -// let second_nibble = last_byte & 0x0F; - -// // Generate the test MPT keys. -// let mut mpt_keys = Vec::new(); -// for i in 0..num_children { -// // Only change the last nibble. -// mpt_key[mpt_len - 1] = first_nibble + ((second_nibble + i as u8) & 0x0F); -// mpt_keys.push(mpt_key.clone()); -// } - -// // Add the MPT keys to the trie. -// let value = rlp::encode(&random_vector(32)).to_vec(); -// mpt_keys -// .iter() -// .for_each(|key| trie.insert(key, &value).unwrap()); -// trie.root_hash().unwrap(); - -// TestEthTrie { trie, mpt_keys } -// } - -// /// Test the proof generation of one branch with the specified number of children. -// fn test_branch_with_multiple_children(num_children: usize, test_slot: StorageSlotInfo) { -// info!("Generating test trie"); -// let mut test_trie = generate_test_trie(num_children, &test_slot); - -// let mpt_key1 = test_trie.mpt_keys[0].as_slice(); -// let mpt_key2 = test_trie.mpt_keys[1].as_slice(); -// let proof1 = test_trie.trie.get_proof(mpt_key1).unwrap(); -// let proof2 = test_trie.trie.get_proof(mpt_key2).unwrap(); -// let node_len = proof1.len(); -// // Get the branch node. -// let branch_node = proof1[node_len - 2].clone(); -// // Ensure both are located in the same branch. -// assert_eq!(node_len, proof2.len()); -// assert_eq!(branch_node, proof2[node_len - 2]); - -// info!("Generating parameters"); -// let params = build_circuits_params(); - -// // Generate the branch proof with one leaf. -// println!("Generating leaf proof"); -// let leaf_proof_buf1 = prove_leaf(¶ms, proof1[node_len - 1].clone(), test_slot); -// let leaf_proof1 = ProofWithVK::deserialize(&leaf_proof_buf1).unwrap(); -// let pub1 = leaf_proof1.proof.public_inputs[..NUM_IO].to_vec(); -// let pi1 = PublicInputs::new(&pub1); -// assert_eq!(pi1.proof_inputs.len(), NUM_IO); -// let (_, comp_ptr) = pi1.mpt_key_info(); -// assert_eq!(comp_ptr, F::from_canonical_usize(63)); -// println!("Generating branch proof with one leaf"); -// let branch_proof = -// prove_branch(¶ms, branch_node.clone(), vec![leaf_proof_buf1.clone()]); -// let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); -// let exp_vk = params.branches.b1.get_verifier_data(); -// assert_eq!(branch_proof.verifier_data(), exp_vk); - -// // Generate a fake proof for testing branch circuit. -// let gen_fake_proof = |mpt_key| { -// let mut pub2 = pub1.clone(); -// assert_eq!(pub2.len(), NUM_IO); -// pub2[public_inputs::K_RANGE].copy_from_slice( -// &bytes_to_nibbles(mpt_key) -// .into_iter() -// .map(F::from_canonical_u8) -// .collect_vec(), -// ); -// assert_eq!(pub2.len(), pub1.len()); - -// let pi2 = PublicInputs::new(&pub2); -// { -// let (k1, p1) = pi1.mpt_key_info(); -// let (k2, p2) = pi2.mpt_key_info(); -// let (pt1, pt2) = ( -// p1.to_canonical_u64() as usize, -// p2.to_canonical_u64() as usize, -// ); -// assert!(pt1 < k1.len() && pt2 < k2.len()); -// assert!(p1 == p2); -// assert!(k1[..pt1] == k2[..pt2]); -// } -// let fake_proof = params -// .set -// .generate_input_proofs([pub2.clone().try_into().unwrap()]) -// .unwrap(); -// let vk = params.set.verifier_data_for_input_proofs::<1>()[0].clone(); -// ProofWithVK::from((fake_proof[0].clone(), vk)) -// .serialize() -// .unwrap() -// }; - -// // Check the public input of branch proof. -// let check_branch_public_inputs = |num_children, branch_proof: &ProofWithVK| { -// let [leaf_pi, branch_pi] = [&leaf_proof1, branch_proof] -// .map(|proof| PublicInputs::new(&proof.proof().public_inputs[..NUM_IO])); - -// let leaf_metadata_digest = leaf_pi.metadata_digest(); -// let leaf_values_digest = weierstrass_to_point(&leaf_pi.values_digest()); -// let branch_values_digest = -// (0..num_children).fold(Point::NEUTRAL, |acc, _| acc + leaf_values_digest); -// assert_eq!(branch_pi.metadata_digest(), leaf_metadata_digest); -// assert_eq!( -// branch_pi.values_digest(), -// branch_values_digest.to_weierstrass() -// ); -// assert_eq!(branch_pi.n(), F::from_canonical_usize(num_children)); -// }; - -// info!("Generating branch with two leaves"); -// let leaf_proof_buf2 = gen_fake_proof(mpt_key2); -// let branch_proof = prove_branch( -// ¶ms, -// branch_node.clone(), -// vec![leaf_proof_buf1.clone(), leaf_proof_buf2.clone()], -// ); -// let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); -// let exp_vk = params.branches.b4.get_verifier_data().clone(); -// assert_eq!(branch_proof.verifier_data(), &exp_vk); -// check_branch_public_inputs(2, &branch_proof); - -// // Generate `num_children - 2`` fake proofs. -// let mut leaf_proofs = vec![leaf_proof_buf1, leaf_proof_buf2]; -// for i in 2..num_children { -// let leaf_proof = gen_fake_proof(test_trie.mpt_keys[i].as_slice()); -// leaf_proofs.push(leaf_proof); -// } -// info!("Generating branch proof with {num_children} leaves"); -// let branch_proof = prove_branch(¶ms, branch_node, leaf_proofs); -// let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); -// let exp_vk = params.branches.b9.get_verifier_data().clone(); -// assert_eq!(branch_proof.verifier_data(), &exp_vk); -// check_branch_public_inputs(num_children, &branch_proof); -// } -// } +#[cfg(test)] +mod tests { + use super::{ + super::{public_inputs, StorageSlotInfo}, + *, + }; + use crate::{tests::TEST_MAX_COLUMNS, MAX_RECEIPT_LEAF_NODE_LEN}; + use alloy::primitives::Address; + use eth_trie::{EthTrie, MemoryDB, Trie}; + use itertools::Itertools; + use log::info; + use mp2_common::{ + eth::{left_pad32, StorageSlot, StorageSlotNode}, + group_hashing::weierstrass_to_point, + mpt_sequential::utils::bytes_to_nibbles, + types::MAPPING_LEAF_VALUE_LEN, + }; + use mp2_test::{ + mpt_sequential::{generate_random_storage_mpt, generate_receipt_test_info}, + utils::random_vector, + }; + use plonky2::field::types::Field; + use plonky2_ecgfp5::curve::curve::Point; + use rand::{thread_rng, Rng}; + use std::{str::FromStr, sync::Arc}; + + type CircuitInput = super::CircuitInput; + type PublicParameters = super::PublicParameters; + + #[derive(Debug)] + struct TestEthTrie { + trie: EthTrie, + mpt_keys: Vec>, + } + + #[test] + fn test_values_extraction_api_single_variable() { + const TEST_SLOTS: [u8; 2] = [5, 10]; + + let _ = env_logger::try_init(); + + let storage_slot1 = StorageSlot::Simple(TEST_SLOTS[0] as usize); + let storage_slot2 = StorageSlot::Simple(TEST_SLOTS[1] as usize); + + let table_info = TEST_SLOTS + .into_iter() + .map(|slot| { + ExtractedColumnInfo::sample( + true, + &[ + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::from_canonical_u8(slot), + ], + F::ZERO, + ) + }) + .collect_vec(); + + let test_slots = [ + StorageSlotInfo::new(storage_slot1, table_info.clone()), + StorageSlotInfo::new(storage_slot2, table_info), + ]; + + test_api(test_slots); + } + + #[test] + fn test_values_extraction_api_single_struct() { + const TEST_SLOT: u8 = 2; + const TEST_EVM_WORDS: [u32; 2] = [10, 20]; + + let _ = env_logger::try_init(); + + let parent_slot = StorageSlot::Simple(TEST_SLOT as usize); + let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( + parent_slot.clone(), + TEST_EVM_WORDS[0], + )); + let storage_slot2 = + StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); + + let table_info = TEST_EVM_WORDS + .into_iter() + .map(|evm_word| { + ExtractedColumnInfo::sample( + true, + &[ + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::from_canonical_u8(TEST_SLOT), + ], + F::from_canonical_u32(evm_word), + ) + }) + .collect_vec(); + + let test_slots = [ + StorageSlotInfo::new(storage_slot1, table_info.clone()), + StorageSlotInfo::new(storage_slot2, table_info), + ]; + + test_api(test_slots); + } + + #[test] + fn test_values_extraction_api_mapping_variable() { + const TEST_SLOT: u8 = 2; + + let _ = env_logger::try_init(); + + let mapping_key1 = vec![10]; + let mapping_key2 = vec![20]; + let storage_slot1 = StorageSlot::Mapping(mapping_key1, TEST_SLOT as usize); + let storage_slot2 = StorageSlot::Mapping(mapping_key2, TEST_SLOT as usize); + + // The first and second column infos are same (only for testing). + let table_info = [0u32; 2] + .into_iter() + .map(|evm_word| { + ExtractedColumnInfo::sample( + true, + &[ + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::from_canonical_u8(TEST_SLOT), + ], + F::from_canonical_u32(evm_word), + ) + }) + .collect_vec(); + + let test_slots = [ + StorageSlotInfo::new(storage_slot1, table_info.clone()), + StorageSlotInfo::new(storage_slot2, table_info), + ]; + + test_api(test_slots); + } + + #[test] + fn test_values_extraction_api_mapping_struct() { + const TEST_SLOT: u8 = 2; + const TEST_EVM_WORDS: [u32; 2] = [10, 20]; + + let _ = env_logger::try_init(); + + let parent_slot = StorageSlot::Mapping(vec![10, 20], TEST_SLOT as usize); + let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( + parent_slot.clone(), + TEST_EVM_WORDS[0], + )); + let storage_slot2 = + StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); + + let table_info = TEST_EVM_WORDS + .into_iter() + .map(|evm_word| { + ExtractedColumnInfo::sample( + true, + &[ + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::from_canonical_u8(TEST_SLOT), + ], + F::from_canonical_u32(evm_word), + ) + }) + .collect_vec(); + + let test_slots = [ + StorageSlotInfo::new(storage_slot1, table_info.clone()), + StorageSlotInfo::new(storage_slot2, table_info), + ]; + + test_api(test_slots); + } + + #[test] + fn test_values_extraction_api_mapping_of_mappings() { + const TEST_SLOT: u8 = 2; + const TEST_EVM_WORDS: [u32; 2] = [10, 20]; + + let _ = env_logger::try_init(); + + let grand_slot = StorageSlot::Mapping(vec![10, 20], TEST_SLOT as usize); + let parent_slot = + StorageSlot::Node(StorageSlotNode::new_mapping(grand_slot, vec![30, 40]).unwrap()); + let storage_slot1 = StorageSlot::Node(StorageSlotNode::new_struct( + parent_slot.clone(), + TEST_EVM_WORDS[0], + )); + let storage_slot2 = + StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORDS[1])); + + let table_info = TEST_EVM_WORDS + .into_iter() + .map(|evm_word| { + ExtractedColumnInfo::sample( + true, + &[ + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::from_canonical_u8(TEST_SLOT), + ], + F::from_canonical_u32(evm_word), + ) + }) + .collect_vec(); + + let test_slots = [ + StorageSlotInfo::new(storage_slot1, table_info.clone()), + StorageSlotInfo::new(storage_slot2, table_info), + ]; + + test_api(test_slots); + } + + #[test] + fn test_values_extraction_api_branch_with_multiple_children() { + const TEST_SLOT: u8 = 2; + const NUM_CHILDREN: usize = 6; + + let _ = env_logger::try_init(); + + let storage_slot = StorageSlot::Simple(TEST_SLOT as usize); + let table_info = { + vec![ExtractedColumnInfo::sample( + true, + &[ + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::from_canonical_u8(TEST_SLOT), + ], + F::ZERO, + )] + }; + let test_slot = StorageSlotInfo::new(storage_slot, table_info); + + test_branch_with_multiple_children(NUM_CHILDREN, test_slot); + } + + #[test] + fn test_values_extraction_api_serialization() { + const TEST_SLOT: u8 = 10; + const TEST_EVM_WORD: u32 = 5; + const TEST_OUTER_KEY: [u8; 2] = [10, 20]; + const TEST_INNER_KEY: [u8; 3] = [30, 40, 50]; + + let _ = env_logger::try_init(); + + let rng = &mut thread_rng(); + + // Test serialization for public parameters. + let params = PublicParameters::build(); + let encoded = bincode::serialize(¶ms).unwrap(); + let decoded_params: PublicParameters = bincode::deserialize(&encoded).unwrap(); + assert!(decoded_params == params); + + let test_circuit_input = |input: CircuitInput| { + // Test circuit input serialization. + let encoded_input = bincode::serialize(&input).unwrap(); + let decoded_input: CircuitInput = bincode::deserialize(&encoded_input).unwrap(); + + // Test proof serialization. + let proof = params.generate_proof(decoded_input).unwrap(); + let encoded_proof = bincode::serialize(&proof).unwrap(); + let decoded_proof: ProofWithVK = bincode::deserialize(&encoded_proof).unwrap(); + assert_eq!(proof, decoded_proof); + + encoded_proof + }; + + // Construct the table info for testing. + let table_info = { + vec![ExtractedColumnInfo::sample( + true, + &[ + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::ZERO, + F::from_canonical_u8(TEST_SLOT), + ], + F::from_canonical_u32(TEST_EVM_WORD), + )] + }; + + // Test for single variable leaf. + let parent_slot = StorageSlot::Simple(TEST_SLOT as usize); + let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct( + parent_slot.clone(), + TEST_EVM_WORD, + )); + let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); + let mut test_trie = generate_test_trie(1, &test_slot); + let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); + test_circuit_input(CircuitInput::new_single_variable_leaf( + proof.last().unwrap().to_vec(), + TEST_SLOT, + TEST_EVM_WORD, + table_info.clone(), + )); + + // Test for mapping variable leaf. + let parent_slot = StorageSlot::Mapping(TEST_OUTER_KEY.to_vec(), TEST_SLOT as usize); + let storage_slot = StorageSlot::Node(StorageSlotNode::new_struct( + parent_slot.clone(), + TEST_EVM_WORD, + )); + let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); + let mut test_trie = generate_test_trie(1, &test_slot); + let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); + let key_id = rng.gen(); + test_circuit_input(CircuitInput::new_mapping_variable_leaf( + proof.last().unwrap().to_vec(), + TEST_SLOT, + TEST_OUTER_KEY.to_vec(), + key_id, + TEST_EVM_WORD, + table_info.clone(), + )); + + // Test for mapping of mappings leaf. + let grand_slot = StorageSlot::Mapping(TEST_OUTER_KEY.to_vec(), TEST_SLOT as usize); + let parent_slot = StorageSlot::Node( + StorageSlotNode::new_mapping(grand_slot, TEST_INNER_KEY.to_vec()).unwrap(), + ); + let storage_slot = + StorageSlot::Node(StorageSlotNode::new_struct(parent_slot, TEST_EVM_WORD)); + let test_slot = StorageSlotInfo::new(storage_slot, table_info.clone()); + let mut test_trie = generate_test_trie(2, &test_slot); + let proof = test_trie.trie.get_proof(&test_trie.mpt_keys[0]).unwrap(); + let outer_key_id = rng.gen(); + let inner_key_id = rng.gen(); + let encoded = test_circuit_input(CircuitInput::new_mapping_of_mappings_leaf( + proof.last().unwrap().to_vec(), + TEST_SLOT, + TEST_OUTER_KEY.to_vec(), + TEST_INNER_KEY.to_vec(), + outer_key_id, + inner_key_id, + TEST_EVM_WORD, + table_info, + )); + + // Test for branch. + let branch_node = proof[proof.len() - 2].to_vec(); + test_circuit_input(CircuitInput::Branch(BranchInput { + input: InputNode { + node: branch_node.clone(), + }, + serialized_child_proofs: vec![encoded], + })); + } + + fn test_api(test_slots: [StorageSlotInfo; 2]) { + info!("Generating MPT proofs"); + let memdb = Arc::new(MemoryDB::new(true)); + let mut trie = EthTrie::new(memdb.clone()); + let mpt_keys = test_slots + .iter() + .map(|test_slot| { + let mpt_key = test_slot.slot.mpt_key(); + let value = random_vector(MAPPING_LEAF_VALUE_LEN); + trie.insert(&mpt_key, &rlp::encode(&value)).unwrap(); + mpt_key + }) + .collect_vec(); + trie.root_hash().unwrap(); + let mpt_proofs = mpt_keys + .into_iter() + .map(|key| trie.get_proof(&key).unwrap()) + .collect_vec(); + // Get the branch node. + let node_len = mpt_proofs[0].len(); + // Ensure both are located in the same branch. + assert_eq!(node_len, mpt_proofs[1].len()); + let branch_node = mpt_proofs[0][node_len - 2].clone(); + assert_eq!(branch_node, mpt_proofs[1][node_len - 2]); + + info!("Generating parameters"); + let params = build_circuits_params(); + + let leaf_proofs = test_slots + .into_iter() + .zip_eq(mpt_proofs) + .enumerate() + .map(|(i, (test_slot, mut leaf_proof))| { + info!("Proving leaf {i}"); + prove_leaf(¶ms, leaf_proof.pop().unwrap(), test_slot) + }) + .collect(); + + info!("Proving branch"); + let _branch_proof = prove_branch(¶ms, branch_node, leaf_proofs); + } + + /// Generate a branch proof. + fn prove_branch( + params: &PublicParameters, + node: Vec, + leaf_proofs: Vec>, + ) -> Vec { + let input = CircuitInput::new_branch(node, leaf_proofs); + generate_proof(params, input).unwrap() + } + #[test] + fn test_receipt_api() { + let receipt_proof_infos = generate_receipt_test_info::<1, 0>(); + let receipt_proofs = receipt_proof_infos.proofs(); + let query = receipt_proof_infos.query(); + // We need two nodes that are children of the same branch so we compare the last but two nodes for each of them until we find a case that works + let (info_one, info_two) = if let Some((one, two)) = receipt_proofs + .iter() + .enumerate() + .find_map(|(i, current_proof)| { + let current_node_second_to_last = + current_proof.mpt_proof[current_proof.mpt_proof.len() - 2].clone(); + receipt_proofs + .iter() + .skip(i + 1) + .find(|find_info| { + find_info.mpt_proof[find_info.mpt_proof.len() - 2].clone() + == current_node_second_to_last + }) + .map(|matching| (current_proof, matching)) + }) { + (one, two) + } else { + panic!("No relevant events with same branch node parent") + }; + + let proof_length_1 = info_one.mpt_proof.len(); + let proof_length_2 = info_two.mpt_proof.len(); + + let list_one = rlp::decode_list::>(&info_one.mpt_proof[proof_length_1 - 2]); + let list_two = rlp::decode_list::>(&info_two.mpt_proof[proof_length_2 - 2]); + + assert_eq!(list_one, list_two); + assert!(list_one.len() == 17); + + println!("Generating params..."); + let params = build_circuits_params(); + + println!("Proving leaf 1..."); + let leaf_input_1 = CircuitInput::new_receipt_leaf(info_one, &query.event); + let now = std::time::Instant::now(); + let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); + { + let lp = ProofWithVK::deserialize(&leaf_proof1).unwrap(); + let pub1 = PublicInputs::new(&lp.proof.public_inputs); + let (_, ptr) = pub1.mpt_key_info(); + println!("pointer: {}", ptr); + } + println!( + "Proof for leaf 1 generated in {} ms", + now.elapsed().as_millis() + ); + + println!("Proving leaf 2..."); + let leaf_input_2 = CircuitInput::new_receipt_leaf(info_two, &query.event); + let now = std::time::Instant::now(); + let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); + println!( + "Proof for leaf 2 generated in {} ms", + now.elapsed().as_millis() + ); + + // The branch case for receipts is identical to that of a mapping so we use the same api. + println!("Proving branch..."); + let branch_input = CircuitInput::new_branch( + info_two.mpt_proof[proof_length_1 - 2].clone(), + vec![leaf_proof1, leaf_proof2], + ); + + let now = std::time::Instant::now(); + generate_proof(¶ms, branch_input).unwrap(); + println!( + "Proof for branch node generated in {} ms", + now.elapsed().as_millis() + ); + } + + /// Generate a leaf proof. + fn prove_leaf(params: &PublicParameters, node: Vec, test_slot: StorageSlotInfo) -> Vec { + // RLP(RLP(compact(partial_key_in_nibble)), RLP(value)) + let leaf_tuple: Vec> = rlp::decode_list(&node); + assert_eq!(leaf_tuple.len(), 2); + let value: [u8; 32] = leaf_tuple[1][1..].to_vec().try_into().unwrap(); + + let evm_word = test_slot.evm_word(); + let location_offset = F::from_canonical_u32(evm_word); + let table_info = test_slot.table_info(); + + // Build the identifier extra data, it's used to compute the key IDs. + const TEST_CONTRACT_ADDRESS: &str = "0x105dD0eF26b92a3698FD5AaaF688577B9Cafd970"; + const TEST_CHAIN_ID: u64 = 1000; + + let contract_address = Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(); + + let metadata = test_slot.table_columns(&contract_address, TEST_CHAIN_ID, vec![]); + + let (expected_metadata_digest, expected_values_digest, circuit_input) = + match &test_slot.slot { + // Simple variable slot + StorageSlot::Simple(slot) => { + let metadata_digest = metadata.digest(); + let values_digest = metadata.storage_values_digest( + &[], + value.as_slice(), + &[*slot as u8], + location_offset, + ); + + let circuit_input = CircuitInput::new_single_variable_leaf( + node, + *slot as u8, + evm_word, + table_info.to_vec(), + ); + + (metadata_digest, values_digest, circuit_input) + } + // Mapping variable + StorageSlot::Mapping(mapping_key, slot) => { + let padded_key = left_pad32(mapping_key); + let metadata_digest = metadata.digest(); + let values_digest = metadata.storage_values_digest( + &[&padded_key], + value.as_slice(), + &[*slot as u8], + location_offset, + ); + + let outer_key_id = metadata.input_columns()[0].identifier().0; + + let circuit_input = CircuitInput::new_mapping_variable_leaf( + node, + *slot as u8, + mapping_key.clone(), + outer_key_id, + evm_word, + table_info.to_vec(), + ); + + (metadata_digest, values_digest, circuit_input) + } + StorageSlot::Node(StorageSlotNode::Struct(parent, _)) => match *parent.clone() { + // Simple Struct + StorageSlot::Simple(slot) => { + let metadata_digest = metadata.digest(); + let values_digest = metadata.storage_values_digest( + &[], + value.as_slice(), + &[slot as u8], + location_offset, + ); + + let circuit_input = CircuitInput::new_single_variable_leaf( + node, + slot as u8, + evm_word, + table_info.to_vec(), + ); + + (metadata_digest, values_digest, circuit_input) + } + // Mapping Struct + StorageSlot::Mapping(mapping_key, slot) => { + let padded_key = left_pad32(&mapping_key); + let metadata_digest = metadata.digest(); + let values_digest = metadata.storage_values_digest( + &[&padded_key], + value.as_slice(), + &[slot as u8], + location_offset, + ); + + let outer_key_id = metadata.input_columns()[0].identifier().0; + + let circuit_input = CircuitInput::new_mapping_variable_leaf( + node, + slot as u8, + mapping_key, + outer_key_id, + evm_word, + table_info.to_vec(), + ); + + (metadata_digest, values_digest, circuit_input) + } + // Mapping of mappings Struct + StorageSlot::Node(StorageSlotNode::Mapping(grand, inner_mapping_key)) => { + match *grand { + StorageSlot::Mapping(outer_mapping_key, slot) => { + let padded_outer_key = left_pad32(&outer_mapping_key); + let padded_inner_key = left_pad32(&inner_mapping_key); + let metadata_digest = metadata.digest(); + let values_digest = metadata.storage_values_digest( + &[&padded_outer_key, &padded_inner_key], + value.as_slice(), + &[slot as u8], + location_offset, + ); + + let key_ids = metadata + .input_columns() + .iter() + .map(|col| col.identifier().0) + .collect::>(); + + let circuit_input = CircuitInput::new_mapping_of_mappings_leaf( + node, + slot as u8, + outer_mapping_key, + inner_mapping_key, + key_ids[0], + key_ids[1], + evm_word, + table_info.to_vec(), + ); + + (metadata_digest, values_digest, circuit_input) + } + _ => unreachable!(), + } + } + _ => unreachable!(), + }, + _ => unreachable!(), + }; + + let proof = generate_proof(params, circuit_input).unwrap(); + + // Check the metadata digest of public inputs. + let decoded_proof = ProofWithVK::deserialize(&proof).unwrap(); + let pi = PublicInputs::new(&decoded_proof.proof.public_inputs); + assert_eq!( + pi.metadata_digest(), + expected_metadata_digest.to_weierstrass() + ); + assert_eq!(pi.values_digest(), expected_values_digest.to_weierstrass()); + + proof + } + + /// Generate a MPT trie with sepcified number of children. + fn generate_test_trie(num_children: usize, storage_slot: &StorageSlotInfo) -> TestEthTrie { + let (mut trie, _) = generate_random_storage_mpt::<3, 32>(); + + let mut mpt_key = storage_slot.slot.mpt_key_vec(); + let mpt_len = mpt_key.len(); + let last_byte = mpt_key[mpt_len - 1]; + let first_nibble = last_byte & 0xF0; + let second_nibble = last_byte & 0x0F; + + // Generate the test MPT keys. + let mut mpt_keys = Vec::new(); + for i in 0..num_children { + // Only change the last nibble. + mpt_key[mpt_len - 1] = first_nibble + ((second_nibble + i as u8) & 0x0F); + mpt_keys.push(mpt_key.clone()); + } + + // Add the MPT keys to the trie. + let value = rlp::encode(&random_vector(32)).to_vec(); + mpt_keys + .iter() + .for_each(|key| trie.insert(key, &value).unwrap()); + trie.root_hash().unwrap(); + + TestEthTrie { trie, mpt_keys } + } + + /// Test the proof generation of one branch with the specified number of children. + fn test_branch_with_multiple_children(num_children: usize, test_slot: StorageSlotInfo) { + info!("Generating test trie"); + let mut test_trie = generate_test_trie(num_children, &test_slot); + + let mpt_key1 = test_trie.mpt_keys[0].as_slice(); + let mpt_key2 = test_trie.mpt_keys[1].as_slice(); + let proof1 = test_trie.trie.get_proof(mpt_key1).unwrap(); + let proof2 = test_trie.trie.get_proof(mpt_key2).unwrap(); + let node_len = proof1.len(); + // Get the branch node. + let branch_node = proof1[node_len - 2].clone(); + // Ensure both are located in the same branch. + assert_eq!(node_len, proof2.len()); + assert_eq!(branch_node, proof2[node_len - 2]); + + info!("Generating parameters"); + let params = build_circuits_params(); + + // Generate the branch proof with one leaf. + println!("Generating leaf proof"); + let leaf_proof_buf1 = prove_leaf(¶ms, proof1[node_len - 1].clone(), test_slot); + let leaf_proof1 = ProofWithVK::deserialize(&leaf_proof_buf1).unwrap(); + let pub1 = leaf_proof1.proof.public_inputs[..NUM_IO].to_vec(); + let pi1 = PublicInputs::new(&pub1); + assert_eq!(pi1.proof_inputs.len(), NUM_IO); + let (_, comp_ptr) = pi1.mpt_key_info(); + assert_eq!(comp_ptr, F::from_canonical_usize(63)); + println!("Generating branch proof with one leaf"); + let branch_proof = + prove_branch(¶ms, branch_node.clone(), vec![leaf_proof_buf1.clone()]); + let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); + let exp_vk = params.branches.b1.get_verifier_data(); + assert_eq!(branch_proof.verifier_data(), exp_vk); + + // Generate a fake proof for testing branch circuit. + let gen_fake_proof = |mpt_key| { + let mut pub2 = pub1.clone(); + assert_eq!(pub2.len(), NUM_IO); + pub2[public_inputs::K_RANGE].copy_from_slice( + &bytes_to_nibbles(mpt_key) + .into_iter() + .map(F::from_canonical_u8) + .collect_vec(), + ); + assert_eq!(pub2.len(), pub1.len()); + + let pi2 = PublicInputs::new(&pub2); + { + let (k1, p1) = pi1.mpt_key_info(); + let (k2, p2) = pi2.mpt_key_info(); + let (pt1, pt2) = ( + p1.to_canonical_u64() as usize, + p2.to_canonical_u64() as usize, + ); + assert!(pt1 < k1.len() && pt2 < k2.len()); + assert!(p1 == p2); + assert!(k1[..pt1] == k2[..pt2]); + } + let fake_proof = params + .set + .generate_input_proofs([pub2.clone().try_into().unwrap()]) + .unwrap(); + let vk = params.set.verifier_data_for_input_proofs::<1>()[0].clone(); + ProofWithVK::from((fake_proof[0].clone(), vk)) + .serialize() + .unwrap() + }; + + // Check the public input of branch proof. + let check_branch_public_inputs = |num_children, branch_proof: &ProofWithVK| { + let [leaf_pi, branch_pi] = [&leaf_proof1, branch_proof] + .map(|proof| PublicInputs::new(&proof.proof().public_inputs[..NUM_IO])); + + let leaf_metadata_digest = leaf_pi.metadata_digest(); + let leaf_values_digest = weierstrass_to_point(&leaf_pi.values_digest()); + let branch_values_digest = + (0..num_children).fold(Point::NEUTRAL, |acc, _| acc + leaf_values_digest); + assert_eq!(branch_pi.metadata_digest(), leaf_metadata_digest); + assert_eq!( + branch_pi.values_digest(), + branch_values_digest.to_weierstrass(), + "Value digests did not agree pi: {:?}, calculated: {:?}", + branch_pi.values_digest(), + branch_values_digest.to_weierstrass(), + ); + assert_eq!(branch_pi.n(), F::from_canonical_usize(num_children)); + }; + + info!("Generating branch with two leaves"); + let leaf_proof_buf2 = gen_fake_proof(mpt_key2); + let branch_proof = prove_branch( + ¶ms, + branch_node.clone(), + vec![leaf_proof_buf1.clone(), leaf_proof_buf2.clone()], + ); + let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); + let exp_vk = params.branches.b4.get_verifier_data().clone(); + assert_eq!(branch_proof.verifier_data(), &exp_vk); + check_branch_public_inputs(2, &branch_proof); + + // Generate `num_children - 2`` fake proofs. + let mut leaf_proofs = vec![leaf_proof_buf1, leaf_proof_buf2]; + for i in 2..num_children { + let leaf_proof = gen_fake_proof(test_trie.mpt_keys[i].as_slice()); + leaf_proofs.push(leaf_proof); + } + info!("Generating branch proof with {num_children} leaves"); + let branch_proof = prove_branch(¶ms, branch_node, leaf_proofs); + let branch_proof = ProofWithVK::deserialize(&branch_proof).unwrap(); + let exp_vk = params.branches.b9.get_verifier_data().clone(); + assert_eq!(branch_proof.verifier_data(), &exp_vk); + check_branch_public_inputs(num_children, &branch_proof); + } +} diff --git a/mp2-v1/src/values_extraction/branch.rs b/mp2-v1/src/values_extraction/branch.rs index ec85c487c..2b2aedbb5 100644 --- a/mp2-v1/src/values_extraction/branch.rs +++ b/mp2-v1/src/values_extraction/branch.rs @@ -9,7 +9,6 @@ use mp2_common::{ mpt_sequential::{advance_key_branch, MPTKeyWire, NIBBLES_TO_BYTES, PAD_LEN}, public_inputs::PublicInputCommon, rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}, - serialization::{deserialize, serialize}, types::{CBuilder, GFp}, utils::{less_than, Endianness, PackerTarget}, D, diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index b3eb60309..a8b6038ef 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -12,7 +12,7 @@ use alloy::{ use anyhow::{anyhow, Result}; use mp2_common::{ array::{Array, Targetable, Vector, VectorWire}, - eth::{EventLogInfo, ReceiptProofInfo, ReceiptQuery}, + eth::{EventLogInfo, ReceiptProofInfo}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN}, mpt_sequential::{MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, @@ -125,10 +125,10 @@ where [(); PAD_LEN(NODE_LEN)]:, [(); MAX_COLUMNS - 2]:, { - /// Create a new [`ReceiptLeafCircuit`] from a [`ReceiptProofInfo`] and a [`ReceiptQuery`] + /// Create a new [`ReceiptLeafCircuit`] from a [`ReceiptProofInfo`] and a [`EventLogInfo`] pub fn new( proof_info: &ReceiptProofInfo, - query: &ReceiptQuery, + event: &EventLogInfo, ) -> Result where [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA]:, @@ -166,11 +166,11 @@ where let mut bytes = log_rlp.as_raw(); let log = Log::decode(&mut bytes).ok()?; - if log.address == query.contract + if log.address == event.address && log .data .topics() - .contains(&B256::from(query.event.event_signature)) + .contains(&B256::from(event.event_signature)) { Some(logs_offset + log_off) } else { @@ -186,10 +186,10 @@ where event_signature, sig_rel_offset, .. - } = query.event; + } = *event; // Construct the table metadata from the event - let metadata = TableMetadata::::from(query.event); + let metadata = TableMetadata::::from(*event); Ok(Self { node: last_node.clone(), @@ -543,7 +543,8 @@ mod tests { let info = proofs.first().unwrap(); let query = receipt_proof_infos.query(); - let c = ReceiptLeafCircuit::::new::(info, query).unwrap(); + let c = ReceiptLeafCircuit::::new::(info, &query.event) + .unwrap(); let metadata = c.metadata.clone(); let test_circuit = TestReceiptLeafCircuit { c }; diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index e058d2f3b..f1ba63f17 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -10,17 +10,18 @@ use itertools::Itertools; use alloy::primitives::Address; use mp2_common::{ eth::{left_pad32, EventLogInfo, StorageSlot}, - poseidon::{empty_poseidon_hash, H}, + poseidon::{empty_poseidon_hash, hash_to_int_value, H}, types::{GFp, HashOutput}, utils::{Endianness, Packer, ToFields}, F, }; use plonky2::{ field::types::{Field, PrimeField64}, + hash::hash_types::HashOut, plonk::config::Hasher, }; -use plonky2_ecgfp5::curve::curve::Point; +use plonky2_ecgfp5::curve::{curve::Point, scalar_field::Scalar}; use serde::{Deserialize, Serialize}; use std::iter::once; @@ -33,6 +34,7 @@ mod leaf_mapping; mod leaf_mapping_of_mappings; mod leaf_receipt; mod leaf_single; +pub mod planner; pub mod public_inputs; pub use api::{build_circuits_params, generate_proof, CircuitInput, PublicParameters}; @@ -203,6 +205,47 @@ impl ColumnMetadata { &self.extracted_columns } + /// Computes storage values digest + pub fn storage_values_digest( + &self, + input_vals: &[&[u8; 32]], + value: &[u8], + extraction_id: &[u8], + location_offset: F, + ) -> Point { + let (input_vd, row_unique) = self.input_value_digest(input_vals); + + let extract_vd = self.extracted_value_digest(value, extraction_id, location_offset); + + let inputs = if self.input_columns().is_empty() { + empty_poseidon_hash() + .to_fields() + .into_iter() + .chain(once(F::from_canonical_usize( + self.input_columns().len() + self.extracted_columns().len(), + ))) + .collect_vec() + } else { + HashOut::from(row_unique) + .to_fields() + .into_iter() + .chain(once(F::from_canonical_usize( + self.input_columns().len() + self.extracted_columns().len(), + ))) + .collect_vec() + }; + let hash = H::hash_no_pad(&inputs); + let row_id = hash_to_int_value(hash); + + // values_digest = values_digest * row_id + let row_id = Scalar::from_noncanonical_biguint(row_id); + if location_offset.0 == 0 { + (extract_vd + input_vd) * row_id + } else { + extract_vd * row_id + } + } + /// Computes the value digest for a provided value array and the unique row_id pub fn input_value_digest(&self, input_vals: &[&[u8; 32]]) -> (Point, HashOutput) { let point = self @@ -318,11 +361,8 @@ const DATA_NAME: &str = "data"; /// Prefix for transaction index const TX_INDEX_PREFIX: &[u8] = b"tx index"; - -/// Prefix for log number -const LOG_NUMBER_PREFIX: &[u8] = b"log number"; -/// [`LOG_NUMBER_PREFIX`] as a [`str`] -const LOG_NUMBER_NAME: &str = "log number"; +/// [`TX_INDEX_PREFIX`] as a [`str`] +const TX_INDEX_NAME: &str = "tx index"; /// Prefix for gas used const GAS_USED_PREFIX: &[u8] = b"gas used"; @@ -569,3 +609,26 @@ pub fn compute_non_indexed_receipt_column_ids( + event: &EventLogInfo, +) -> Vec<(String, GFp)> { + let tx_index_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + TX_INDEX_PREFIX, + ] + .concat() + .into_iter() + .map(GFp::from_canonical_u8) + .collect::>(); + let tx_index_column_id = ( + TX_INDEX_NAME.to_string(), + H::hash_no_pad(&tx_index_input).elements[0], + ); + + let mut other_ids = compute_non_indexed_receipt_column_ids(event); + other_ids.insert(0, tx_index_column_id); + + other_ids +} diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs new file mode 100644 index 000000000..ca013cb26 --- /dev/null +++ b/mp2-v1/src/values_extraction/planner.rs @@ -0,0 +1,123 @@ +//! This code returns an [`UpdateTree`] used to plan how we prove a series of values was extracted from a Merkle Patricia Trie. +use alloy::{ + network::Ethereum, + primitives::{keccak256, Address, B256}, + providers::RootProvider, + transports::Transport, +}; +use anyhow::Result; +use mp2_common::eth::{EventLogInfo, ReceiptQuery}; +use ryhope::storage::updatetree::UpdateTree; +use std::future::Future; + +/// Trait that is implemented for all data that we can provably extract. +pub trait Extractable { + fn create_update_tree( + &self, + contract: Address, + epoch: u64, + provider: &RootProvider, + ) -> impl Future>>; +} + +impl Extractable + for EventLogInfo +{ + async fn create_update_tree( + &self, + contract: Address, + epoch: u64, + provider: &RootProvider, + ) -> Result> { + let query = ReceiptQuery:: { + contract, + event: *self, + }; + + let proofs = query.query_receipt_proofs(provider, epoch.into()).await?; + + // Convert the paths into their keys using keccak + let key_paths = proofs + .iter() + .map(|input| input.mpt_proof.iter().map(keccak256).collect::>()) + .collect::>>(); + + // Now we make the UpdateTree + Ok(UpdateTree::::from_paths(key_paths, epoch as i64)) + } +} + +#[cfg(test)] +pub mod tests { + + use alloy::{eips::BlockNumberOrTag, primitives::Address, providers::ProviderBuilder, sol}; + use anyhow::anyhow; + use mp2_common::eth::BlockUtil; + use mp2_test::eth::get_mainnet_url; + use std::str::FromStr; + + use super::*; + + #[tokio::test] + async fn test_receipt_update_tree() -> Result<()> { + // First get the info we will feed in to our function + let event_info = test_receipt_trie_helper().await?; + + let contract = Address::from_str("0xbd3531da5cf5857e7cfaa92426877b022e612cf8")?; + let epoch: u64 = 21362445; + + let url = get_mainnet_url(); + // get some tx and receipt + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + + let update_tree = event_info + .create_update_tree(contract, epoch, &provider) + .await?; + + let block_util = build_test_data().await; + + assert_eq!(*update_tree.root(), block_util.block.header.receipts_root); + Ok(()) + } + + /// Function that fetches a block together with its transaction trie and receipt trie for testing purposes. + async fn build_test_data() -> BlockUtil { + let url = get_mainnet_url(); + // get some tx and receipt + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + + // We fetch a specific block which we know includes transactions relating to the PudgyPenguins contract. + BlockUtil::fetch(&provider, BlockNumberOrTag::Number(21362445)) + .await + .unwrap() + } + + /// Function to build a list of [`ReceiptProofInfo`] for a set block. + async fn test_receipt_trie_helper() -> Result> { + // First we choose the contract and event we are going to monitor. + // We use the mainnet PudgyPenguins contract at address 0xbd3531da5cf5857e7cfaa92426877b022e612cf8 + // and monitor for the `Approval` event. + let address = Address::from_str("0xbd3531da5cf5857e7cfaa92426877b022e612cf8")?; + + // We have to create what the event abi looks like + sol! { + #[allow(missing_docs)] + #[sol(rpc, abi)] + contract EventTest { + #[derive(Debug)] + event ApprovalForAll(address indexed owner, address indexed operator, bool approved); + + } + }; + + let approval_event = EventTest::abi::events() + .get("ApprovalForAll") + .ok_or(anyhow!("No ApprovalForAll event exists"))?[0] + .clone(); + + Ok(EventLogInfo::<2, 1>::new( + address, + &approval_event.signature(), + )) + } +} diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index 5e8dfbb00..eed083ab1 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -1397,7 +1397,7 @@ impl TableRowValues EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA }>; fn get_index(&self) -> u64; + + fn to_table_rows( + proof_infos: &[ReceiptProofInfo], + event: &EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA }>, + block: PrimaryIndex, + ) -> Vec> + where + [(); 7 - 2 - Self::NO_TOPICS - Self::MAX_DATA]:, + { + let metadata = TableMetadata::<7, 2>::from(*event); + + let (_, row_id) = metadata.input_value_digest(&[&[0u8; 32]; 2]); + let input_columns_ids = metadata + .input_columns() + .iter() + .map(|col| col.identifier().0) + .collect::>(); + let extracted_column_ids = metadata + .extracted_columns() + .iter() + .map(|col| col.identifier().0) + .collect::>(); + + proof_infos + .iter() + .flat_map(|info| { + let receipt_with_bloom = info.to_receipt().unwrap(); + + let tx_index_cell = Cell::new(input_columns_ids[0], U256::from(info.tx_index)); + + let gas_used_cell = Cell::new( + input_columns_ids[1], + U256::from(receipt_with_bloom.receipt.cumulative_gas_used), + ); + + receipt_with_bloom + .logs() + .iter() + .filter_map(|log| { + if log.address == event.address + && log.topics()[0].0 == event.event_signature + { + Some(log.clone()) + } else { + None + } + }) + .map(|log| { + let (topics, data) = log.data.split(); + let topics_cells = topics + .into_iter() + .skip(1) + .enumerate() + .map(|(j, topic)| Cell::new(extracted_column_ids[j], topic.into())) + .collect::>(); + + let data_start = topics_cells.len(); + let data_cells = data + .chunks(32) + .enumerate() + .map(|(j, data_slice)| { + Cell::new( + extracted_column_ids[data_start + j], + U256::from_be_slice(data_slice), + ) + }) + .collect::>(); + + let secondary = + SecondaryIndexCell::new_from(tx_index_cell, row_id.0.to_vec()); + + let collection = CellsUpdate:: { + previous_row_key: RowTreeKey::default(), + new_row_key: RowTreeKey::from(&secondary), + updated_cells: [vec![gas_used_cell], topics_cells, data_cells].concat(), + primary: block, + }; + + TableRowUpdate::::Insertion(collection, secondary) + }) + .collect::>>() + }) + .collect::>>() + } } impl ReceiptExtractionArgs @@ -760,6 +845,7 @@ where ctx: &'a mut TestContext, contract: &'a Contract, ) -> BoxFuture<'a, Vec>> { + let event = self.get_event(); async move { let contract_update = ReceiptUpdate::new((R::NO_TOPICS as u8, R::MAX_DATA as u8), 5, 15); @@ -774,7 +860,21 @@ where .apply_update(ctx, &contract_update) .await .unwrap(); - vec![] + + let block_number = ctx.block_number().await; + let new_block_number = block_number as BlockPrimaryIndex; + + let query = ReceiptQuery::<{ R::NO_TOPICS }, { R::MAX_DATA }> { + contract: contract.address(), + event, + }; + + let proof_infos = query + .query_receipt_proofs(provider.root(), block_number.into()) + .await + .unwrap(); + + R::to_table_rows(&proof_infos, &event, new_block_number) } .boxed() } diff --git a/mp2-v1/tests/common/rowtree.rs b/mp2-v1/tests/common/rowtree.rs index 710c8d8fe..4f26afada 100644 --- a/mp2-v1/tests/common/rowtree.rs +++ b/mp2-v1/tests/common/rowtree.rs @@ -48,7 +48,7 @@ impl SecondaryIndexCell { } pub fn cell(&self) -> Cell { - self.0.clone() + self.0 } pub fn rest(&self) -> RowTreeKeyNonce { self.1.clone() diff --git a/ryhope/src/storage/updatetree.rs b/ryhope/src/storage/updatetree.rs index 72051e148..12e5c5ff0 100644 --- a/ryhope/src/storage/updatetree.rs +++ b/ryhope/src/storage/updatetree.rs @@ -36,6 +36,7 @@ pub struct UpdateTreeNode { /// Whether this node is a leaf of an update path is_path_end: bool, } + impl UpdateTreeNode { fn is_leaf(&self) -> bool { self.children.is_empty() From d7b6ff7789cb5ec1bf07346fd11724160de9506c Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 30 Dec 2024 14:26:27 +0000 Subject: [PATCH 23/47] Fixed to_receipt method --- mp2-common/src/eth.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index aee5aa3c4..95ccde307 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -488,7 +488,7 @@ impl ReceiptProofInfo { .verify_proof(self.mpt_root, &mpt_key, self.mpt_proof.clone())? .ok_or(anyhow!("No proof found when verifying"))?; - let rlp_receipt = rlp::Rlp::new(&valid[..]); + let rlp_receipt = rlp::Rlp::new(&valid[1..]); ReceiptWithBloom::decode(&mut rlp_receipt.as_raw()) .map_err(|e| anyhow!("Could not decode receipt got: {}", e)) } From f2a0cd8ba8635ce6d7600f28490e161488a774e0 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 31 Dec 2024 10:28:46 +0000 Subject: [PATCH 24/47] Updated ethers receipt compatibility test --- mp2-common/src/eth.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 95ccde307..e0e6b72e4 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -1308,7 +1308,16 @@ mod test { .get_block_with_txs(blockid) .await? .expect("should have been a block"); - let receipts = provider.get_block_receipts(block.number.unwrap()).await?; + let receipts = provider + .get_block_receipts( + block + .number + .ok_or(anyhow::anyhow!("Couldn't unwrap block number"))?, + ) + .await + .map_err(|e| { + anyhow::anyhow!("Couldn't get ethers block receipts with error: {:?}", e) + })?; let tx_with_receipt = block .transactions From d6939708a95f2d59a8f0b560ec71d686d16a3ddf Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 31 Dec 2024 12:27:25 +0000 Subject: [PATCH 25/47] Receipt value extraction prover --- mp2-common/src/eth.rs | 38 +++ mp2-v1/src/values_extraction/api.rs | 23 +- mp2-v1/src/values_extraction/leaf_receipt.rs | 28 +- mp2-v1/src/values_extraction/planner.rs | 260 ++++++++++++++++++- mp2-v1/tests/common/cases/indexing.rs | 2 +- ryhope/src/storage/updatetree.rs | 16 +- 6 files changed, 341 insertions(+), 26 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index e0e6b72e4..354b8d358 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -74,6 +74,44 @@ pub fn extract_child_hashes(rlp_data: &[u8]) -> Vec> { hashes } +/// Enum used to distinguish between different types of node in an MPT. +#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum NodeType { + Branch, + Extension, + Leaf, +} + +/// Function that returns the [`NodeType`] of an RLP encoded MPT node +pub fn node_type(rlp_data: &[u8]) -> Result { + let rlp = Rlp::new(rlp_data); + + let item_count = rlp.item_count()?; + + if item_count == 17 { + Ok(NodeType::Branch) + } else if item_count == 2 { + // The first item is the encoded path, if it begins with a 2 or 3 it is a leaf, else it is an extension node + let first_item = rlp.at(0)?; + + // We want the first byte + let first_byte = first_item.as_raw()[0]; + + // The we divide by 16 to get the first nibble + match first_byte / 16 { + 0 | 1 => Ok(NodeType::Extension), + 2 | 3 => Ok(NodeType::Leaf), + _ => Err(anyhow!( + "Expected compact encoding beginning with 0,1,2 or 3" + )), + } + } else { + Err(anyhow!( + "RLP encoded Node item count was {item_count}, expected either 17 or 2" + )) + } +} + pub fn left_pad32(slice: &[u8]) -> [u8; 32] { left_pad::<32>(slice) } diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 7db8d4994..a24bc06df 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -19,7 +19,7 @@ use anyhow::{bail, ensure, Result}; use log::debug; use mp2_common::{ default_config, - eth::{EventLogInfo, ReceiptProofInfo}, + eth::EventLogInfo, mpt_sequential::PAD_LEN, poseidon::H, proof::{ProofInputSerialized, ProofWithVK}, @@ -143,15 +143,18 @@ where /// Create a circuit input for proving a leaf MPT node of a transaction receipt. pub fn new_receipt_leaf( - info: &ReceiptProofInfo, + last_node: &[u8], + tx_index: u64, event: &EventLogInfo, ) -> Self where [(); 7 - 2 - NO_TOPICS - MAX_DATA]:, { CircuitInput::LeafReceipt( - ReceiptLeafCircuit::::new::(info, event) - .expect("Could not construct Receipt Leaf Circuit"), + ReceiptLeafCircuit::::new::( + last_node, tx_index, event, + ) + .expect("Could not construct Receipt Leaf Circuit"), ) } @@ -981,7 +984,11 @@ mod tests { let params = build_circuits_params(); println!("Proving leaf 1..."); - let leaf_input_1 = CircuitInput::new_receipt_leaf(info_one, &query.event); + let leaf_input_1 = CircuitInput::new_receipt_leaf( + info_one.mpt_proof.last().unwrap(), + info_one.tx_index, + &query.event, + ); let now = std::time::Instant::now(); let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); { @@ -996,7 +1003,11 @@ mod tests { ); println!("Proving leaf 2..."); - let leaf_input_2 = CircuitInput::new_receipt_leaf(info_two, &query.event); + let leaf_input_2 = CircuitInput::new_receipt_leaf( + info_two.mpt_proof.last().unwrap(), + info_two.tx_index, + &query.event, + ); let now = std::time::Instant::now(); let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); println!( diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index a8b6038ef..67e6036ac 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -9,10 +9,10 @@ use alloy::{ primitives::{Address, Log, B256}, rlp::Decodable, }; -use anyhow::{anyhow, Result}; +use anyhow::Result; use mp2_common::{ array::{Array, Targetable, Vector, VectorWire}, - eth::{EventLogInfo, ReceiptProofInfo}, + eth::EventLogInfo, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN}, mpt_sequential::{MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, @@ -127,20 +127,13 @@ where { /// Create a new [`ReceiptLeafCircuit`] from a [`ReceiptProofInfo`] and a [`EventLogInfo`] pub fn new( - proof_info: &ReceiptProofInfo, + last_node: &[u8], + tx_index: u64, event: &EventLogInfo, ) -> Result where [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA]:, { - // Since the compact encoding of the key is stored first plus an additional list header and - // then the first element in the receipt body is the transaction type we calculate the offset to that point - - let last_node = proof_info - .mpt_proof - .last() - .ok_or(anyhow!("Could not get last node in receipt trie proof"))?; - // Convert to Rlp form so we can use provided methods. let node_rlp = rlp::Rlp::new(last_node); @@ -192,8 +185,8 @@ where let metadata = TableMetadata::::from(*event); Ok(Self { - node: last_node.clone(), - tx_index: proof_info.tx_index, + node: last_node.to_vec(), + tx_index, size, address, rel_add_offset: add_rel_offset, @@ -543,9 +536,14 @@ mod tests { let info = proofs.first().unwrap(); let query = receipt_proof_infos.query(); - let c = ReceiptLeafCircuit::::new::(info, &query.event) - .unwrap(); + let c = ReceiptLeafCircuit::::new::( + info.mpt_proof.last().unwrap(), + info.tx_index, + &query.event, + ) + .unwrap(); let metadata = c.metadata.clone(); + let test_circuit = TestReceiptLeafCircuit { c }; let node = info.mpt_proof.last().unwrap().clone(); diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index ca013cb26..135f31c56 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -6,10 +6,13 @@ use alloy::{ transports::Transport, }; use anyhow::Result; -use mp2_common::eth::{EventLogInfo, ReceiptQuery}; -use ryhope::storage::updatetree::UpdateTree; +use mp2_common::eth::{node_type, EventLogInfo, NodeType, ReceiptQuery}; +use ryhope::storage::updatetree::{Next, UpdateTree}; use std::future::Future; +use std::collections::HashMap; + +use super::{generate_proof, CircuitInput, PublicParameters}; /// Trait that is implemented for all data that we can provably extract. pub trait Extractable { fn create_update_tree( @@ -18,6 +21,33 @@ pub trait Extractable { epoch: u64, provider: &RootProvider, ) -> impl Future>>; + + fn prove_value_extraction( + &self, + contract: Address, + epoch: u64, + pp: &PublicParameters, + provider: &RootProvider, + ) -> impl Future>>; +} + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +struct ProofData { + node: Vec, + node_type: NodeType, + tx_index: Option, + proof: Option>, +} + +impl ProofData { + pub fn new(node: Vec, node_type: NodeType, tx_index: Option) -> ProofData { + ProofData { + node, + node_type, + tx_index, + proof: None, + } + } } impl Extractable @@ -45,6 +75,158 @@ impl Extractable // Now we make the UpdateTree Ok(UpdateTree::::from_paths(key_paths, epoch as i64)) } + + async fn prove_value_extraction( + &self, + contract: Address, + epoch: u64, + pp: &PublicParameters, + provider: &RootProvider, + ) -> Result> { + let query = ReceiptQuery:: { + contract, + event: *self, + }; + + let proofs = query.query_receipt_proofs(provider, epoch.into()).await?; + + let mut data_store = HashMap::::new(); + + // Convert the paths into their keys using keccak + let key_paths = proofs + .iter() + .map(|input| { + let tx_index = input.tx_index; + input + .mpt_proof + .iter() + .map(|node| { + let node_key = keccak256(node); + let node_type = node_type(node)?; + let tx = if let NodeType::Leaf = node_type { + Some(tx_index) + } else { + None + }; + data_store.insert(node_key, ProofData::new(node.clone(), node_type, tx)); + + Ok(node_key) + }) + .collect::>>() + }) + .collect::>>>()?; + + let update_tree = UpdateTree::::from_paths(key_paths, epoch as i64); + + let mut update_plan = update_tree.clone().into_workplan(); + + while let Some(Next::Ready(work_plan_item)) = update_plan.next() { + let node_type = data_store + .get(work_plan_item.k()) + .ok_or(anyhow::anyhow!( + "No ProofData found for key: {:?}", + work_plan_item.k() + ))? + .node_type; + + let update_tree_node = + update_tree + .get_node(work_plan_item.k()) + .ok_or(anyhow::anyhow!( + "No UpdateTreeNode found for key: {:?}", + work_plan_item.k() + ))?; + + match node_type { + NodeType::Leaf => { + let proof_data = + data_store + .get_mut(work_plan_item.k()) + .ok_or(anyhow::anyhow!( + "No ProofData found for key: {:?}", + work_plan_item.k() + ))?; + let input = CircuitInput::new_receipt_leaf( + &proof_data.node, + proof_data.tx_index.unwrap(), + self, + ); + let proof = generate_proof(pp, input)?; + proof_data.proof = Some(proof); + update_plan.done(&work_plan_item)?; + } + NodeType::Extension => { + let child_key = update_tree.get_child_keys(update_tree_node); + if child_key.len() != 1 { + return Err(anyhow::anyhow!("When proving extension node had {} many child keys when we should only have 1", child_key.len())); + } + let child_proof = data_store + .get(&child_key[0]) + .ok_or(anyhow::anyhow!( + "Extension node child had no proof data for key: {:?}", + child_key[0] + ))? + .clone(); + let proof_data = + data_store + .get_mut(work_plan_item.k()) + .ok_or(anyhow::anyhow!( + "No ProofData found for key: {:?}", + work_plan_item.k() + ))?; + let input = CircuitInput::new_extension( + proof_data.node.clone(), + child_proof.proof.ok_or(anyhow::anyhow!( + "Extension node child proof was a None value" + ))?, + ); + let proof = generate_proof(pp, input)?; + proof_data.proof = Some(proof); + update_plan.done(&work_plan_item)?; + } + NodeType::Branch => { + let child_keys = update_tree.get_child_keys(update_tree_node); + let child_proofs = child_keys + .iter() + .map(|key| { + data_store + .get(key) + .ok_or(anyhow::anyhow!( + "Branch child data could not be found for key: {:?}", + key + ))? + .clone() + .proof + .ok_or(anyhow::anyhow!("No proof found in brnach node child")) + }) + .collect::>>>()?; + let proof_data = + data_store + .get_mut(work_plan_item.k()) + .ok_or(anyhow::anyhow!( + "No ProofData found for key: {:?}", + work_plan_item.k() + ))?; + let input = CircuitInput::new_mapping_variable_branch( + proof_data.node.clone(), + child_proofs, + ); + let proof = generate_proof(pp, input)?; + proof_data.proof = Some(proof); + update_plan.done(&work_plan_item)?; + } + } + } + + let final_data = data_store + .get(update_tree.root()) + .ok_or(anyhow::anyhow!("No data for root of update tree found"))? + .clone(); + + final_data + .proof + .ok_or(anyhow::anyhow!("No proof stored for final data")) + } } #[cfg(test)] @@ -52,10 +234,21 @@ pub mod tests { use alloy::{eips::BlockNumberOrTag, primitives::Address, providers::ProviderBuilder, sol}; use anyhow::anyhow; - use mp2_common::eth::BlockUtil; + use eth_trie::Trie; + use mp2_common::{ + digest::Digest, + eth::BlockUtil, + proof::ProofWithVK, + utils::{Endianness, Packer}, + }; use mp2_test::eth::get_mainnet_url; use std::str::FromStr; + use crate::values_extraction::{ + api::build_circuits_params, compute_receipt_leaf_metadata_digest, + compute_receipt_leaf_value_digest, PublicInputs, + }; + use super::*; #[tokio::test] @@ -80,6 +273,67 @@ pub mod tests { Ok(()) } + #[tokio::test] + async fn test_receipt_proving() -> Result<()> { + // First get the info we will feed in to our function + let event_info = test_receipt_trie_helper().await?; + + let contract = Address::from_str("0xbd3531da5cf5857e7cfaa92426877b022e612cf8")?; + let epoch: u64 = 21362445; + + let url = get_mainnet_url(); + // get some tx and receipt + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + + let pp = build_circuits_params(); + let final_proof_bytes = event_info + .prove_value_extraction(contract, epoch, &pp, &provider) + .await?; + + let final_proof = ProofWithVK::deserialize(&final_proof_bytes)?; + let query = ReceiptQuery::<2, 1> { + contract, + event: event_info, + }; + + let metadata_digest = compute_receipt_leaf_metadata_digest(&event_info); + + let value_digest = query + .query_receipt_proofs(&provider, epoch.into()) + .await? + .iter() + .fold(Digest::NEUTRAL, |acc, info| { + acc + compute_receipt_leaf_value_digest(info, &event_info) + }); + + let pi = PublicInputs::new(&final_proof.proof.public_inputs); + + let mut block_util = build_test_data().await; + // Check the output hash + { + assert_eq!( + pi.root_hash(), + block_util + .receipts_trie + .root_hash()? + .0 + .to_vec() + .pack(Endianness::Little) + ); + } + + // Check value digest + { + assert_eq!(pi.values_digest(), value_digest.to_weierstrass()); + } + + // Check metadata digest + { + assert_eq!(pi.metadata_digest(), metadata_digest.to_weierstrass()); + } + Ok(()) + } + /// Function that fetches a block together with its transaction trie and receipt trie for testing purposes. async fn build_test_data() -> BlockUtil { let url = get_mainnet_url(); diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index eed083ab1..e97c824f2 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -979,7 +979,7 @@ impl TableIndexing { } }; - let table_id = &self.table.public_name.clone(); + let table_id = &self.table.public_name; // we construct the proof key for both mappings and single variable in the same way since // it is derived from the table id which should be different for any tables we create. let value_key = ProofKey::ValueExtraction((table_id.clone(), bn as BlockPrimaryIndex)); diff --git a/ryhope/src/storage/updatetree.rs b/ryhope/src/storage/updatetree.rs index 12e5c5ff0..b736572fc 100644 --- a/ryhope/src/storage/updatetree.rs +++ b/ryhope/src/storage/updatetree.rs @@ -41,9 +41,13 @@ impl UpdateTreeNode { fn is_leaf(&self) -> bool { self.children.is_empty() } + + pub fn k(&self) -> K { + self.k.clone() + } } -impl UpdateTree { +impl UpdateTree { pub fn root(&self) -> &K { &self.nodes[0].k } @@ -65,6 +69,16 @@ impl UpdateTree { pub fn nodes(&self) -> impl Iterator { self.nodes.iter().map(|n| &n.k) } + pub fn get_node(&self, key: &K) -> Option<&UpdateTreeNode> { + self.idx.get(key).map(|idx| self.node(*idx)) + } + + pub fn get_child_keys(&self, node: &UpdateTreeNode) -> Vec { + node.children + .iter() + .map(|idx| self.node(*idx).k()) + .collect() + } } impl UpdateTree { From 025d8db8dfdb52b3a0b86549adfa98509b91c83f Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 31 Dec 2024 14:11:55 +0000 Subject: [PATCH 26/47] Resolves CRY-26 --- mp2-v1/src/api.rs | 3 + mp2-v1/tests/common/cases/indexing.rs | 5 +- mp2-v1/tests/common/cases/table_source.rs | 82 ++++++++++++++++++++--- mp2-v1/tests/common/final_extraction.rs | 6 ++ mp2-v1/tests/integrated_tests.rs | 10 ++- 5 files changed, 93 insertions(+), 13 deletions(-) diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 53636dff4..8a27e9fb7 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -109,6 +109,9 @@ where pub fn empty_cell_tree_proof(&self) -> Result> { self.tree_creation.empty_cell_tree_proof() } + pub fn get_value_extraction_params(&self) -> &ValuesExtractionParameters { + &self.values_extraction + } } /// Instantiate the circuits employed for the pre-processing stage of LPN, diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index e97c824f2..b30e64b80 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -1337,15 +1337,16 @@ where self.apply_update(ctx, contract).await } } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Copy)] pub enum ChangeType { Deletion, Insertion, Update(UpdateType), Silent, + Receipt(usize, usize), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Copy)] pub enum UpdateType { SecondaryIndex, Rest, diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 345d0b461..da3c5a5ad 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -38,7 +38,9 @@ use mp2_v1::{ values_extraction::{ gadgets::{column_info::ExtractedColumnInfo, metadata_gadget::TableMetadata}, identifier_for_inner_mapping_key_column, identifier_for_mapping_key_column, - identifier_for_outer_mapping_key_column, identifier_for_value_column, StorageSlotInfo, + identifier_for_outer_mapping_key_column, identifier_for_value_column, + planner::Extractable, + StorageSlotInfo, }, }; use plonky2::field::types::PrimeField64; @@ -881,20 +883,76 @@ where async fn generate_extraction_proof_inputs( &self, - _ctx: &mut TestContext, - _contract: &Contract, - _value_key: ProofKey, + ctx: &mut TestContext, + contract: &Contract, + value_key: ProofKey, ) -> Result<(ExtractionProofInput, HashOutput)> { - todo!("Implement as part of CRY-25") + let event = self.get_event(); + + let ProofKey::ValueExtraction((_, bn)) = value_key else { + bail!("key wrong"); + }; + + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .wallet(ctx.wallet()) + .on_http(ctx.rpc_url.parse().unwrap()); + + let value_proof = event + .prove_value_extraction( + contract.address(), + bn as u64, + ctx.params().get_value_extraction_params(), + provider.root(), + ) + .await?; + Ok(( + ExtractionProofInput::Receipt(value_proof), + self.metadata_hash(contract.address(), contract.chain_id()), + )) } fn random_contract_update<'a>( &'a mut self, - _ctx: &'a mut TestContext, - _contract: &'a Contract, - _c: ChangeType, + ctx: &'a mut TestContext, + contract: &'a Contract, + c: ChangeType, ) -> BoxFuture<'a, Vec>> { - todo!("Implement as part of CRY-25") + let event = self.get_event(); + async move { + let ChangeType::Receipt(relevant, others) = c else { + panic!("Need ChangeType::Receipt, got: {:?}", c); + }; + let contract_update = + ReceiptUpdate::new((R::NO_TOPICS as u8, R::MAX_DATA as u8), relevant, others); + + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .wallet(ctx.wallet()) + .on_http(ctx.rpc_url.parse().unwrap()); + + let event_emitter = EventContract::new(contract.address(), provider.root()); + event_emitter + .apply_update(ctx, &contract_update) + .await + .unwrap(); + + let block_number = ctx.block_number().await; + let new_block_number = block_number as BlockPrimaryIndex; + + let query = ReceiptQuery::<{ R::NO_TOPICS }, { R::MAX_DATA }> { + contract: contract.address(), + event, + }; + + let proof_infos = query + .query_receipt_proofs(provider.root(), block_number.into()) + .await + .unwrap(); + + R::to_table_rows(&proof_infos, &event, new_block_number) + } + .boxed() } fn metadata_hash(&self, _contract_address: Address, _chain_id: u64) -> MetadataHash { @@ -1156,6 +1214,9 @@ impl SingleExtractionArgs { // We can take the first one since we're asking for single value and there is only one row. let old_table_values = &old_table_values[0]; match change_type { + ChangeType::Receipt(..) => { + panic!("Can't add a new receipt change for storage variable") + } ChangeType::Silent => {} ChangeType::Insertion => { panic!("Can't add a new row for blockchain data over single values") @@ -1392,6 +1453,9 @@ where let current_value = self.query_value(ctx, contract, current_key).await; let new_key = T::sample_key(); let updates = match c { + ChangeType::Receipt(..) => { + panic!("Can't add a new receipt change for storage variable") + } ChangeType::Silent => vec![], ChangeType::Insertion => { vec![MappingUpdate::Insertion( diff --git a/mp2-v1/tests/common/final_extraction.rs b/mp2-v1/tests/common/final_extraction.rs index af97b2793..4616810bf 100644 --- a/mp2-v1/tests/common/final_extraction.rs +++ b/mp2-v1/tests/common/final_extraction.rs @@ -24,10 +24,13 @@ pub struct MergeExtractionProof { pub mapping: ExtractionTableProof, } +type ReceiptExtractionProof = Vec; + #[derive(Clone, Debug, PartialEq, Eq)] pub enum ExtractionProofInput { Single(ExtractionTableProof), Merge(MergeExtractionProof), + Receipt(ReceiptExtractionProof), } impl TestContext { @@ -72,6 +75,9 @@ impl TestContext { inputs.single.value_proof, inputs.mapping.value_proof, ), + ExtractionProofInput::Receipt(input) => { + CircuitInput::new_receipt_input(block_proof, input) + } }?; let params = self.params(); let proof = self diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index de3aeb37a..022b2edf1 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -91,9 +91,15 @@ async fn integrated_indexing() -> Result<()> { ctx.build_params(ParamsType::Indexing).unwrap(); info!("Params built"); - // For now we test that we can start a receipt case only. - let (_receipt, _genesis) = + + let (mut receipt, genesis) = TableIndexing::>::receipt_test_case(0, 0, &mut ctx).await?; + let changes = vec![ + ChangeType::Receipt(1, 10), + ChangeType::Receipt(10, 1), + ChangeType::Receipt(0, 10), + ]; + receipt.run(&mut ctx, genesis, changes.clone()).await?; // NOTE: to comment to avoid very long tests... let (mut single, genesis) = From 268f1169d247902034bd5992a4441ce593c7e2e6 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 2 Jan 2025 09:39:13 +0000 Subject: [PATCH 27/47] Fixed topic cell id discrepancy --- mp2-v1/src/values_extraction/planner.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index 135f31c56..279ba09bb 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -96,6 +96,10 @@ impl Extractable let key_paths = proofs .iter() .map(|input| { + let digest = + crate::values_extraction::compute_receipt_leaf_value_digest(input, self) + .to_weierstrass(); + println!("extraction proof values digest: {:?}", digest); let tx_index = input.tx_index; input .mpt_proof From 017a15aa6caffd879e1e85e078a392f5251f6afd Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 2 Jan 2025 12:44:40 +0000 Subject: [PATCH 28/47] Fixed receipt value digest --- mp2-v1/src/values_extraction/planner.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index 279ba09bb..02b6ea939 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -101,6 +101,7 @@ impl Extractable .to_weierstrass(); println!("extraction proof values digest: {:?}", digest); let tx_index = input.tx_index; + println!("tx index: {}", tx_index); input .mpt_proof .iter() From 727b00a6967ee9dd75a3a92ec040ba0b0e12fd3c Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 2 Jan 2025 13:49:28 +0000 Subject: [PATCH 29/47] RowTreeUpdate Debugging --- mp2-v1/src/values_extraction/planner.rs | 5 ----- mp2-v1/tests/common/table.rs | 5 ++++- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index 02b6ea939..135f31c56 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -96,12 +96,7 @@ impl Extractable let key_paths = proofs .iter() .map(|input| { - let digest = - crate::values_extraction::compute_receipt_leaf_value_digest(input, self) - .to_weierstrass(); - println!("extraction proof values digest: {:?}", digest); let tx_index = input.tx_index; - println!("tx index: {}", tx_index); input .mpt_proof .iter() diff --git a/mp2-v1/tests/common/table.rs b/mp2-v1/tests/common/table.rs index 3ee8f7af0..26bf07144 100644 --- a/mp2-v1/tests/common/table.rs +++ b/mp2-v1/tests/common/table.rs @@ -470,6 +470,9 @@ impl Table { .map(|plan| RowUpdateResult { updates: plan }); { // debugging + if out.is_err() { + println!("Out was an error: {:?}", out); + } println!("\n+++++++++++++++++++++++++++++++++\n"); let root = self.row.root_data().await?.unwrap(); let new_epoch = self.row.current_epoch(); @@ -531,7 +534,7 @@ pub enum TreeRowUpdate { Deletion(RowTreeKey), } -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct RowUpdateResult { // There is only a single row key for a table that we update continuously // so no need to track all the rows that have been updated in the result From 0c7a982fcfe61da1256bb1c61fbf6265c00f3686 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 2 Jan 2025 14:50:52 +0000 Subject: [PATCH 30/47] Correct Receipt Row Tree --- mp2-v1/src/api.rs | 5 +- mp2-v1/src/values_extraction/api.rs | 2 +- .../gadgets/metadata_gadget.rs | 23 ++--- mp2-v1/src/values_extraction/leaf_mapping.rs | 14 ++- .../leaf_mapping_of_mappings.rs | 18 ++-- mp2-v1/src/values_extraction/leaf_receipt.rs | 16 ++-- mp2-v1/src/values_extraction/leaf_single.rs | 5 +- mp2-v1/src/values_extraction/mod.rs | 6 +- mp2-v1/src/values_extraction/planner.rs | 87 +++++++++++++++---- mp2-v1/tests/common/cases/contract.rs | 9 +- mp2-v1/tests/common/cases/indexing.rs | 82 ++++++++--------- mp2-v1/tests/common/cases/slot_info.rs | 4 +- mp2-v1/tests/common/cases/table_source.rs | 18 ++-- mp2-v1/tests/common/mod.rs | 2 - mp2-v1/tests/common/storage_trie.rs | 7 +- mp2-v1/tests/common/table.rs | 4 +- 16 files changed, 166 insertions(+), 136 deletions(-) diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 8a27e9fb7..d3020b4ed 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -1,5 +1,5 @@ //! Main APIs and related structures - +#![allow(clippy::identity_op)] use std::iter::once; use crate::{ @@ -15,7 +15,7 @@ use crate::{ identifier_block_column, identifier_for_value_column, ColumnMetadata, INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }, - MAX_LEAF_VALUE_LEN, MAX_RECEIPT_LEAF_NODE_LEN, + MAX_RECEIPT_LEAF_NODE_LEN, }; use alloy::primitives::Address; @@ -122,6 +122,7 @@ where [(); MAX_COLUMNS - 1]:, [(); MAX_COLUMNS - 0]:, { + sanity_check(); log::info!("Building contract_extraction parameters..."); let contract_extraction = contract_extraction::build_circuits_params(); log::info!("Building length_extraction parameters..."); diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index a24bc06df..74c873fa6 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -1,5 +1,5 @@ //! Values extraction APIs - +#![allow(clippy::identity_op)] use super::{ branch::{BranchCircuit, BranchWires}, extension::{ExtensionNodeCircuit, ExtensionNodeWires}, diff --git a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs index cac385112..486f07f11 100644 --- a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs +++ b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs @@ -16,13 +16,11 @@ use mp2_common::{ eth::EventLogInfo, group_hashing::CircuitBuilderGroupHashing, poseidon::H, - serialization::{deserialize_long_array, serialize_array, serialize_long_array}, + serialization::{deserialize_long_array, serialize_long_array}, types::{CBuilder, HashOutput}, u256::{CircuitBuilderU256, UInt256Target}, - utils::{ - less_than_or_equal_to_unsafe, Endianness, FromTargets, Packer, TargetsConnector, ToFields, - }, - CHasher, F, + utils::{Endianness, Packer}, + F, }; use plonky2::{ field::types::{Field, PrimeField64}, @@ -81,7 +79,7 @@ where let mut table_info = [ExtractedColumnInfo::default(); { MAX_COLUMNS - INPUT_COLUMNS }]; table_info .iter_mut() - .zip(extracted_columns.into_iter()) + .zip(extracted_columns) .for_each(|(ti, &column)| *ti = column); TableMetadata:: { @@ -233,6 +231,7 @@ where let logs_offset = receipt_off + receipt_str_payload.header_len + 1 + logs_off; // Now we produce an iterator over the logs with each logs offset. + #[allow(clippy::unnecessary_find_map)] let relevant_log_offset = std::iter::successors(Some(0usize), |i| Some(i + 1)) .map_while(|i| logs_rlp.at_with_offset(i).ok()) .find_map(|(log_rlp, log_off)| { @@ -394,17 +393,6 @@ where } } -// impl TryFrom for TableMetadata { -// type Error = anyhow::Error; -// fn try_from(value: StorageSlot) -> Result { -// match value { -// StorageSlot::Node(inner_slot) => {match inner_slot { -// StorageSlotNode:: -// }} -// } -// } -// } - #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] pub(crate) struct TableMetadataTarget where @@ -438,6 +426,7 @@ impl where [(); MAX_COLUMNS - INPUT_COLUMNS]:, { + #[cfg(test)] pub fn metadata_digest(&self, b: &mut CBuilder) -> CurveTarget { let input_points = self .input_columns diff --git a/mp2-v1/src/values_extraction/leaf_mapping.rs b/mp2-v1/src/values_extraction/leaf_mapping.rs index 1f09f3b9c..10d23040a 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping.rs @@ -1,11 +1,8 @@ //! Module handling the mapping entries inside a storage trie -use crate::values_extraction::{ - public_inputs::{PublicInputs, PublicInputsArgs}, - KEY_ID_PREFIX, -}; +use crate::values_extraction::public_inputs::{PublicInputs, PublicInputsArgs}; use anyhow::Result; -use itertools::Itertools; + use mp2_common::{ array::{Array, Targetable, Vector, VectorWire}, group_hashing::CircuitBuilderGroupHashing, @@ -16,7 +13,7 @@ use mp2_common::{ poseidon::hash_to_int_target, public_inputs::PublicInputCommon, storage_key::{MappingSlot, MappingStructSlotWires}, - types::{CBuilder, GFp, MAPPING_LEAF_VALUE_LEN}, + types::{CBuilder, GFp}, u256::UInt256Target, utils::{Endianness, ToTargets}, CHasher, D, F, @@ -213,7 +210,7 @@ where #[cfg(test)] mod tests { use super::*; - use crate::tests::TEST_MAX_COLUMNS; + use crate::{tests::TEST_MAX_COLUMNS, values_extraction::KEY_ID_PREFIX}; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, @@ -221,6 +218,7 @@ mod tests { mpt_sequential::utils::bytes_to_nibbles, poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, + types::MAPPING_LEAF_VALUE_LEN, utils::{keccak256, Endianness, Packer, ToFields}, C, D, F, }; @@ -303,7 +301,7 @@ mod tests { .chain(once(F::from_canonical_usize( table_metadata.num_actual_columns, ))) - .collect_vec(); + .collect::>(); let hash = H::hash_no_pad(&inputs); let row_id = hash_to_int_value(hash); diff --git a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs index 7bd8ecef4..1bb2cbfc9 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs @@ -5,10 +5,9 @@ use crate::values_extraction::{ gadgets::metadata_gadget::{TableMetadataGadget, TableMetadataTarget}, public_inputs::{PublicInputs, PublicInputsArgs}, - INNER_KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }; use anyhow::Result; -use itertools::Itertools; + use mp2_common::{ array::{Array, Targetable, Vector, VectorWire, L32}, group_hashing::CircuitBuilderGroupHashing, @@ -17,7 +16,7 @@ use mp2_common::{ poseidon::hash_to_int_target, public_inputs::PublicInputCommon, storage_key::{MappingOfMappingsSlotWires, MappingSlot}, - types::{CBuilder, GFp, MAPPING_LEAF_VALUE_LEN}, + types::{CBuilder, GFp}, u256::UInt256Target, utils::{Endianness, ToTargets}, CHasher, D, F, @@ -32,7 +31,7 @@ use plonky2::{ }; use plonky2_crypto::u32::arithmetic_u32::U32Target; use plonky2_ecdsa::gadgets::nonnative::CircuitBuilderNonNative; -use plonky2_ecgfp5::{curve::scalar_field::Scalar, gadgets::curve::CircuitBuilderEcGFp5}; +use plonky2_ecgfp5::gadgets::curve::CircuitBuilderEcGFp5; use recursion_framework::circuit_builder::CircuitLogicWires; use serde::{Deserialize, Serialize}; use std::iter::once; @@ -228,7 +227,10 @@ where #[cfg(test)] mod tests { use super::*; - use crate::tests::TEST_MAX_COLUMNS; + use crate::{ + tests::TEST_MAX_COLUMNS, + values_extraction::{INNER_KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX}, + }; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, @@ -236,6 +238,7 @@ mod tests { mpt_sequential::utils::bytes_to_nibbles, poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, + types::MAPPING_LEAF_VALUE_LEN, utils::{keccak256, Endianness, Packer, ToFields}, C, D, F, }; @@ -250,6 +253,9 @@ mod tests { iop::{target::Target, witness::PartialWitness}, plonk::config::Hasher, }; + + use plonky2_ecgfp5::curve::scalar_field::Scalar; + use rand::{thread_rng, Rng}; use std::array; @@ -321,7 +327,7 @@ mod tests { .chain(once(F::from_canonical_usize( table_metadata.num_actual_columns, ))) - .collect_vec(); + .collect::>(); let hash = H::hash_no_pad(&inputs); let row_id = hash_to_int_value(hash); diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index 67e6036ac..5ee55cb93 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -16,11 +16,11 @@ use mp2_common::{ group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN}, mpt_sequential::{MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, - poseidon::{hash_to_int_target, H}, + poseidon::hash_to_int_target, public_inputs::PublicInputCommon, rlp::MAX_KEY_NIBBLE_LEN, types::{CBuilder, GFp}, - utils::{less_than, less_than_or_equal_to_unsafe, Endianness, ToTargets}, + utils::{less_than, less_than_or_equal_to_unsafe, ToTargets}, CHasher, D, F, }; use plonky2::{ @@ -29,7 +29,7 @@ use plonky2::{ target::Target, witness::{PartialWitness, WitnessWrite}, }, - plonk::{circuit_builder::CircuitBuilder, config::Hasher}, + plonk::circuit_builder::CircuitBuilder, }; use plonky2_crypto::u32::arithmetic_u32::{CircuitBuilderU32, U32Target}; @@ -170,7 +170,9 @@ where Some(0usize) } }) - .ok_or(anyhow!("There were no relevant logs in this transaction"))?; + .ok_or(anyhow::anyhow!( + "There were no relevant logs in this transaction" + ))?; let EventLogInfo:: { size, @@ -482,15 +484,15 @@ mod tests { use mp2_common::{ eth::left_pad32, - poseidon::hash_to_int_value, - utils::{keccak256, Packer, ToFields}, + poseidon::{hash_to_int_value, H}, + utils::{keccak256, Endianness, Packer, ToFields}, C, }; use mp2_test::{ circuit::{run_circuit, UserCircuit}, mpt_sequential::generate_receipt_test_info, }; - use plonky2::hash::hash_types::HashOut; + use plonky2::{hash::hash_types::HashOut, plonk::config::Hasher}; use plonky2_ecgfp5::curve::scalar_field::Scalar; #[derive(Clone, Debug)] struct TestReceiptLeafCircuit { diff --git a/mp2-v1/src/values_extraction/leaf_single.rs b/mp2-v1/src/values_extraction/leaf_single.rs index e8552bc28..a119995cd 100644 --- a/mp2-v1/src/values_extraction/leaf_single.rs +++ b/mp2-v1/src/values_extraction/leaf_single.rs @@ -1,5 +1,5 @@ //! Module handling the single variable inside a storage trie - +#![allow(clippy::identity_op)] use crate::values_extraction::{ gadgets::metadata_gadget::{TableMetadata, TableMetadataGadget, TableMetadataTarget}, public_inputs::{PublicInputs, PublicInputsArgs}, @@ -14,7 +14,7 @@ use mp2_common::{ poseidon::{empty_poseidon_hash, hash_to_int_target}, public_inputs::PublicInputCommon, storage_key::{SimpleSlot, SimpleStructSlotWires}, - types::{CBuilder, GFp, MAPPING_LEAF_VALUE_LEN}, + types::{CBuilder, GFp}, u256::UInt256Target, utils::{Endianness, ToTargets}, CHasher, D, F, @@ -187,6 +187,7 @@ mod tests { mpt_sequential::utils::bytes_to_nibbles, poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, + types::MAPPING_LEAF_VALUE_LEN, utils::{keccak256, Endianness, Packer, ToFields}, C, D, F, }; diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index f1ba63f17..8c0673bab 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -257,14 +257,12 @@ impl ColumnMetadata { }); let row_id_input = input_vals - .into_iter() - .map(|key| { + .iter() + .flat_map(|key| { key.pack(Endianness::Big) .into_iter() .map(F::from_canonical_u32) }) - .into_iter() - .flatten() .collect::>(); (point, H::hash_no_pad(&row_id_input).into()) diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index 135f31c56..e8298492f 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -1,4 +1,5 @@ //! This code returns an [`UpdateTree`] used to plan how we prove a series of values was extracted from a Merkle Patricia Trie. +#![allow(clippy::identity_op)] use alloy::{ network::Ethereum, primitives::{keccak256, Address, B256}, @@ -22,13 +23,17 @@ pub trait Extractable { provider: &RootProvider, ) -> impl Future>>; - fn prove_value_extraction( + fn prove_value_extraction( &self, contract: Address, epoch: u64, - pp: &PublicParameters, + pp: &PublicParameters<512, MAX_COLUMNS>, provider: &RootProvider, - ) -> impl Future>>; + ) -> impl Future>> + where + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:; } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] @@ -52,6 +57,8 @@ impl ProofData { impl Extractable for EventLogInfo +where + [(); 7 - 2 - NO_TOPICS - MAX_DATA]:, { async fn create_update_tree( &self, @@ -76,13 +83,18 @@ impl Extractable Ok(UpdateTree::::from_paths(key_paths, epoch as i64)) } - async fn prove_value_extraction( + async fn prove_value_extraction( &self, contract: Address, epoch: u64, - pp: &PublicParameters, + pp: &PublicParameters<512, MAX_COLUMNS>, provider: &RootProvider, - ) -> Result> { + ) -> Result> + where + [(); MAX_COLUMNS - 2]:, + [(); MAX_COLUMNS - 1]:, + [(); MAX_COLUMNS - 0]:, + { let query = ReceiptQuery:: { contract, event: *self, @@ -207,10 +219,7 @@ impl Extractable "No ProofData found for key: {:?}", work_plan_item.k() ))?; - let input = CircuitInput::new_mapping_variable_branch( - proof_data.node.clone(), - child_proofs, - ); + let input = CircuitInput::new_branch(proof_data.node.clone(), child_proofs); let proof = generate_proof(pp, input)?; proof_data.proof = Some(proof); update_plan.done(&work_plan_item)?; @@ -237,16 +246,19 @@ pub mod tests { use eth_trie::Trie; use mp2_common::{ digest::Digest, - eth::BlockUtil, + eth::{left_pad32, BlockUtil}, + poseidon::{hash_to_int_value, H}, proof::ProofWithVK, - utils::{Endianness, Packer}, + types::GFp, + utils::{Endianness, Packer, ToFields}, }; use mp2_test::eth::get_mainnet_url; + use plonky2::{field::types::Field, hash::hash_types::HashOut, plonk::config::Hasher}; + use plonky2_ecgfp5::curve::scalar_field::Scalar; use std::str::FromStr; use crate::values_extraction::{ - api::build_circuits_params, compute_receipt_leaf_metadata_digest, - compute_receipt_leaf_value_digest, PublicInputs, + api::build_circuits_params, gadgets::metadata_gadget::TableMetadata, PublicInputs, }; use super::*; @@ -285,7 +297,7 @@ pub mod tests { // get some tx and receipt let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let pp = build_circuits_params(); + let pp = build_circuits_params::<512, 7>(); let final_proof_bytes = event_info .prove_value_extraction(contract, epoch, &pp, &provider) .await?; @@ -296,14 +308,55 @@ pub mod tests { event: event_info, }; - let metadata_digest = compute_receipt_leaf_metadata_digest(&event_info); + let metadata = TableMetadata::<7, 2>::from(event_info); + + let metadata_digest = metadata.digest(); let value_digest = query .query_receipt_proofs(&provider, epoch.into()) .await? .iter() .fold(Digest::NEUTRAL, |acc, info| { - acc + compute_receipt_leaf_value_digest(info, &event_info) + let node = info.mpt_proof.last().unwrap().clone(); + + let mut tx_index_input = [0u8; 32]; + tx_index_input[31] = info.tx_index as u8; + + let node_rlp = rlp::Rlp::new(&node); + // The actual receipt data is item 1 in the list + let receipt_rlp = node_rlp.at(1).unwrap(); + + // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` + let receipt_list = rlp::Rlp::new(&receipt_rlp.data().unwrap()[1..]); + + // The logs themselves start are the item at index 3 in this list + let gas_used_rlp = receipt_list.at(1).unwrap(); + + let gas_used_bytes = left_pad32(gas_used_rlp.data().unwrap()); + + let (input_vd, row_unique_data) = + metadata.input_value_digest(&[&tx_index_input, &gas_used_bytes]); + let extracted_vd = metadata.extracted_receipt_value_digest(&node, &event_info); + + let total = input_vd + extracted_vd; + + // row_id = H2int(row_unique_data || num_actual_columns) + let inputs = HashOut::from(row_unique_data) + .to_fields() + .into_iter() + .chain(std::iter::once(GFp::from_canonical_usize( + metadata.num_actual_columns, + ))) + .collect::>(); + let hash = H::hash_no_pad(&inputs); + let row_id = hash_to_int_value(hash); + + // values_digest = values_digest * row_id + let row_id = Scalar::from_noncanonical_biguint(row_id); + + let exp_digest = total * row_id; + + acc + exp_digest }); let pi = PublicInputs::new(&final_proof.proof.public_inputs); diff --git a/mp2-v1/tests/common/cases/contract.rs b/mp2-v1/tests/common/cases/contract.rs index 9c73640e2..029b11d2c 100644 --- a/mp2-v1/tests/common/cases/contract.rs +++ b/mp2-v1/tests/common/cases/contract.rs @@ -4,13 +4,7 @@ use super::slot_info::{LargeStruct, MappingInfo, StorageSlotMappingKey, StorageS use crate::common::{ bindings::{ eventemitter::EventEmitter::{self, EventEmitterInstance}, - simple::{ - Simple, - Simple::{ - MappingChange, MappingOfSingleValueMappingsChange, MappingOfStructMappingsChange, - MappingOperation, MappingStructChange, - }, - }, + simple::{Simple, Simple::MappingOperation}, }, cases::indexing::ReceiptUpdate, TestContext, @@ -50,6 +44,7 @@ impl Contract { } /// Creates a new [`Contract`] from an [`Address`] and `chain_id` + #[allow(dead_code)] pub fn new(address: Address, chain_id: u64) -> Contract { Contract { address, chain_id } } diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index b30e64b80..a3ebc66f6 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -26,10 +26,7 @@ use rand::{thread_rng, Rng}; use ryhope::storage::RoEpochKvStorage; use crate::common::{ - bindings::{ - eventemitter::EventEmitter::{self, EventEmitterInstance}, - simple::Simple::{self, MappingChange, MappingOperation, SimpleInstance}, - }, + bindings::eventemitter::EventEmitter::{self, EventEmitterInstance}, cases::{ contract::Contract, identifier_for_mapping_key_column, @@ -54,7 +51,7 @@ use crate::common::{ use alloy::{ contract::private::{Network, Provider, Transport}, network::{Ethereum, TransactionBuilder}, - primitives::{Address, U256}, + primitives::U256, providers::{ext::AnvilApi, ProviderBuilder, RootProvider}, sol_types::SolEvent, }; @@ -150,7 +147,7 @@ impl TableIndexing { }) .collect_vec(); let (mapping_secondary_column, mapping_rest_columns, row_unique_id, mapping_source) = { - let mut slot_inputs = LargeStruct::slot_inputs(MAPPING_STRUCT_SLOT as u8); + let slot_inputs = LargeStruct::slot_inputs(MAPPING_STRUCT_SLOT as u8); let key_id = identifier_for_mapping_key_column( MAPPING_STRUCT_SLOT as u8, &contract_address, @@ -168,7 +165,7 @@ impl TableIndexing { let mapping_index = MappingIndex::OuterKey(key_id); let source = MappingExtractionArgs::new( MAPPING_STRUCT_SLOT as u8, - mapping_index.clone(), + mapping_index, slot_inputs.clone(), None, ); @@ -183,9 +180,8 @@ impl TableIndexing { }; let rest_columns = value_ids .into_iter() - .zip(slot_inputs.iter()) .enumerate() - .map(|(i, (id, slot_input))| TableColumn { + .map(|(i, id)| TableColumn { name: format!("{MAPPING_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, @@ -201,7 +197,7 @@ impl TableIndexing { .position(|id| id == &secondary_value_id) .unwrap(); let secondary_id = value_ids.remove(pos); - let secondary_slot_input = slot_inputs.remove(pos); + let secondary_column = TableColumn { name: MAPPING_VALUE_COLUMN.to_string(), index: IndexType::Secondary, @@ -210,9 +206,8 @@ impl TableIndexing { }; let mut rest_columns = value_ids .into_iter() - .zip(slot_inputs.iter()) .enumerate() - .map(|(i, (id, slot_input))| TableColumn { + .map(|(i, id)| TableColumn { name: format!("{MAPPING_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, @@ -390,8 +385,8 @@ impl TableIndexing { let mapping_index = MappingIndex::OuterKey(key_id); let mut source = MappingExtractionArgs::::new( MAPPING_SLOT, - mapping_index.clone(), - vec![slot_input.clone()], + mapping_index, + vec![slot_input], Some(LengthExtractionArgs { slot: LENGTH_SLOT, value: LENGTH_VALUE, @@ -405,14 +400,7 @@ impl TableIndexing { let table_row_updates = source.init_contract_data(ctx, &contract).await; - let table = build_mapping_table( - ctx, - &mapping_index, - key_id, - vec![value_id], - vec![slot_input], - ) - .await; + let table = build_mapping_table(ctx, &mapping_index, key_id, vec![value_id]).await; let value_column = table.columns.rest[0].name.clone(); Ok(( @@ -459,14 +447,14 @@ impl TableIndexing { let mapping_index = MappingIndex::Value(value_ids[1]); let mut source = MappingExtractionArgs::::new( MAPPING_STRUCT_SLOT as u8, - mapping_index.clone(), + mapping_index, slot_inputs.clone(), None, ); let table_row_updates = source.init_contract_data(ctx, &contract).await; - let table = build_mapping_table(ctx, &mapping_index, key_id, value_ids, slot_inputs).await; + let table = build_mapping_table(ctx, &mapping_index, key_id, value_ids).await; let value_column = table.columns.rest[0].name.clone(); Ok(( @@ -515,8 +503,8 @@ impl TableIndexing { let index = MappingIndex::InnerKey(inner_key_id); let mut source = MappingExtractionArgs::::new( MAPPING_OF_SINGLE_VALUE_MAPPINGS_SLOT, - index.clone(), - vec![slot_input.clone()], + index, + vec![slot_input], None, ); @@ -528,7 +516,6 @@ impl TableIndexing { outer_key_id, inner_key_id, vec![value_id], - vec![slot_input], ) .await; let value_column = table.columns.rest[0].name.clone(); @@ -583,22 +570,16 @@ impl TableIndexing { let index = MappingIndex::Value(value_ids[1]); let mut source = MappingExtractionArgs::::new( MAPPING_OF_STRUCT_MAPPINGS_SLOT, - index.clone(), + index, slot_inputs.clone(), None, ); let table_row_updates = source.init_contract_data(ctx, &contract).await; - let table = build_mapping_of_mappings_table( - ctx, - &index, - outer_key_id, - inner_key_id, - value_ids, - slot_inputs, - ) - .await; + let table = + build_mapping_of_mappings_table(ctx, &index, outer_key_id, inner_key_id, value_ids) + .await; let value_column = table.columns.rest[0].name.clone(); Ok(( @@ -744,7 +725,24 @@ impl TableIndexing { if table_row_updates.is_empty() { continue; } + // If we are dealing with receipts we need to remove everything already in the row tree let bn = ctx.block_number().await as BlockPrimaryIndex; + + let table_row_updates = if let ChangeType::Receipt(..) = ut { + let current_row_epoch = self.table.row.current_epoch(); + let current_row_keys = self + .table + .row + .keys_at(current_row_epoch) + .await + .into_iter() + .map(TableRowUpdate::::Deletion) + .collect::>(); + [current_row_keys, table_row_updates].concat() + } else { + table_row_updates + }; + log::info!("Applying follow up updates to contract done - now at block {bn}",); // we first run the initial preprocessing and db creation. // NOTE: we don't show copy on write here - the fact of only reproving what has been @@ -1012,7 +1010,6 @@ async fn build_mapping_table( mapping_index: &MappingIndex, key_id: u64, mut value_ids: Vec, - mut slot_inputs: Vec, ) -> Table { // Construct the table columns. let (secondary_column, rest_columns) = match mapping_index { @@ -1042,7 +1039,7 @@ async fn build_mapping_table( .position(|id| id == secondary_value_id) .unwrap(); let secondary_id = value_ids.remove(pos); - let secondary_slot_input = slot_inputs.remove(pos); + let secondary_column = TableColumn { name: MAPPING_VALUE_COLUMN.to_string(), index: IndexType::Secondary, @@ -1051,9 +1048,8 @@ async fn build_mapping_table( }; let mut rest_columns = value_ids .into_iter() - .zip(slot_inputs.iter()) .enumerate() - .map(|(i, (id, slot_input))| TableColumn { + .map(|(i, id)| TableColumn { name: format!("{MAPPING_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, @@ -1105,13 +1101,11 @@ async fn build_mapping_of_mappings_table( outer_key_id: u64, inner_key_id: u64, value_ids: Vec, - slot_inputs: Vec, ) -> Table { let mut rest_columns = value_ids .into_iter() - .zip(slot_inputs.iter()) .enumerate() - .map(|(i, (id, slot_input))| TableColumn { + .map(|(i, id)| TableColumn { name: format!("{MAPPING_OF_MAPPINGS_VALUE_COLUMN}_{i}"), index: IndexType::None, multiplier: false, diff --git a/mp2-v1/tests/common/cases/slot_info.rs b/mp2-v1/tests/common/cases/slot_info.rs index 0e1465999..362b341bd 100644 --- a/mp2-v1/tests/common/cases/slot_info.rs +++ b/mp2-v1/tests/common/cases/slot_info.rs @@ -323,9 +323,7 @@ impl MappingInfo for StructMapping { /// Abstract for the mapping key of the storage slot. /// It could be a normal mapping key, or a pair of keys which identifies the /// mapping of mapppings key. -pub(crate) trait StorageSlotMappingKey: - Clone + Debug + PartialOrd + Ord + Send + Sync -{ +pub trait StorageSlotMappingKey: Clone + Debug + PartialOrd + Ord + Send + Sync { /// This is what the keys actually look like. type Key; diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index da3c5a5ad..fad39b821 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -59,7 +59,7 @@ use crate::common::{ proof_storage::{ProofKey, ProofStorage}, rowtree::SecondaryIndexCell, table::CellsUpdate, - Deserialize, MetadataHash, Serialize, TestContext, TEST_MAX_COLUMNS, TEST_MAX_FIELD_PER_EVM, + Deserialize, MetadataHash, Serialize, TestContext, }; use super::{ @@ -681,7 +681,7 @@ pub trait ReceiptExtractionArgs: fn get_index(&self) -> u64; - fn to_table_rows( + fn to_table_rows( proof_infos: &[ReceiptProofInfo], event: &EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA }>, block: PrimaryIndex, @@ -728,6 +728,7 @@ pub trait ReceiptExtractionArgs: } }) .map(|log| { + let log = log.clone(); let (topics, data) = log.data.split(); let topics_cells = topics .into_iter() @@ -755,7 +756,7 @@ pub trait ReceiptExtractionArgs: previous_row_key: RowTreeKey::default(), new_row_key: RowTreeKey::from(&secondary), updated_cells: [vec![gas_used_cell], topics_cells, data_cells].concat(), - primary: block, + primary: block.clone(), }; TableRowUpdate::::Insertion(collection, secondary) @@ -899,7 +900,7 @@ where .on_http(ctx.rpc_url.parse().unwrap()); let value_proof = event - .prove_value_extraction( + .prove_value_extraction::<32, _>( contract.address(), bn as u64, ctx.params().get_value_extraction_params(), @@ -1010,8 +1011,7 @@ impl SingleExtractionArgs { } pub(crate) fn secondary_index_slot_input(&self) -> Option { - self.secondary_index - .map(|idx| self.slot_inputs[idx].clone()) + self.secondary_index.map(|idx| self.slot_inputs[idx]) } pub(crate) fn rest_column_slot_inputs(&self) -> Vec { @@ -1426,7 +1426,7 @@ where contract: &'a Contract, c: ChangeType, ) -> BoxFuture<'a, Vec>> { - async { + async move { // NOTE 1: The first part is just trying to construct the right input to simulate any // changes on a mapping. This is mostly irrelevant for dist system but needs to manually // construct our test cases here. The second part is more interesting as it looks at @@ -1787,8 +1787,8 @@ fn evm_word_column_info(table_info: &[ExtractedColumnInfo]) -> Vec| cols.push(col.clone())) - .or_insert(vec![col.clone()]); + .and_modify(|cols: &mut Vec<_>| cols.push(*col)) + .or_insert(vec![*col]); }); column_info_map diff --git a/mp2-v1/tests/common/mod.rs b/mp2-v1/tests/common/mod.rs index 477f6c935..7e53e0f50 100644 --- a/mp2-v1/tests/common/mod.rs +++ b/mp2-v1/tests/common/mod.rs @@ -32,8 +32,6 @@ use plonky2::plonk::config::GenericHashOut; /// Testing maximum columns pub(crate) const TEST_MAX_COLUMNS: usize = 32; -/// Testing maximum fields for each EVM word -pub(crate) const TEST_MAX_FIELD_PER_EVM: usize = 32; type ColumnIdentifier = u64; type PublicParameters = mp2_v1::api::PublicParameters; diff --git a/mp2-v1/tests/common/storage_trie.rs b/mp2-v1/tests/common/storage_trie.rs index 760ed89e3..8cdebd755 100644 --- a/mp2-v1/tests/common/storage_trie.rs +++ b/mp2-v1/tests/common/storage_trie.rs @@ -1,14 +1,11 @@ //! Storage trie for proving tests -use super::{ - benchmarker::Benchmarker, PublicParameters, TestContext, TEST_MAX_COLUMNS, - TEST_MAX_FIELD_PER_EVM, -}; +use super::{benchmarker::Benchmarker, PublicParameters, TestContext}; use alloy::{ eips::BlockNumberOrTag, primitives::{Address, U256}, }; -use itertools::Itertools; + use log::debug; use mp2_common::{ eth::{ProofQuery, StorageSlot, StorageSlotNode}, diff --git a/mp2-v1/tests/common/table.rs b/mp2-v1/tests/common/table.rs index 26bf07144..d6edf9e82 100644 --- a/mp2-v1/tests/common/table.rs +++ b/mp2-v1/tests/common/table.rs @@ -15,7 +15,7 @@ use mp2_v1::indexing::{ ColumnID, }; use parsil::symbols::{ColumnKind, ContextProvider, ZkColumn, ZkTable}; -use plonky2::field::types::PrimeField64; + use ryhope::{ storage::{ pgsql::{SqlServerConnection, SqlStorageSettings}, @@ -108,7 +108,7 @@ impl TableColumns { .iter() .chain(once(&self.secondary)) .find(|c| c.identifier() == identifier) - .unwrap_or_else(|| panic!("can't find cell from identifier {}", identifier)) + .expect("can't find cell from identifier") .clone() } pub fn ordered_cells( From 01793abcd8695a17453f6fa6c0435d67e719adea Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 13 Jan 2025 13:47:46 +0000 Subject: [PATCH 31/47] Fixed slot input lengths for test --- mp2-v1/tests/common/cases/indexing.rs | 2 +- mp2-v1/tests/common/cases/slot_info.rs | 6 +++--- mp2-v1/tests/common/cases/table_source.rs | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index a3ebc66f6..e7e088ba3 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -98,7 +98,7 @@ pub(crate) const MAPPING_OF_MAPPINGS_VALUE_COLUMN: &str = "mapping_of_mappings_v /// Construct the all slot inputs for single value testing. fn single_value_slot_inputs() -> Vec { let mut slot_inputs = SINGLE_SLOTS - .map(|slot| SlotInput::new(slot, 0, 256, 0)) + .map(|slot| SlotInput::new(slot, 0, 32, 0)) .to_vec(); // Add the Struct single slots. diff --git a/mp2-v1/tests/common/cases/slot_info.rs b/mp2-v1/tests/common/cases/slot_info.rs index 362b341bd..43eab11ba 100644 --- a/mp2-v1/tests/common/cases/slot_info.rs +++ b/mp2-v1/tests/common/cases/slot_info.rs @@ -572,10 +572,10 @@ impl LargeStruct { pub fn slot_inputs(slot: u8) -> Vec { vec![ - SlotInput::new(slot, 0, 256, 0), + SlotInput::new(slot, 0, 32, 0), // Big-endian layout - SlotInput::new(slot, 16, 128, 1), - SlotInput::new(slot, 0, 128, 1), + SlotInput::new(slot, 16, 16, 1), + SlotInput::new(slot, 0, 16, 1), ] } } diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index fad39b821..661caa081 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -85,10 +85,10 @@ impl SlotEvmWordColumns { fn new(column_info: Vec) -> Self { // Ensure the column information should have the same slot and EVM word. - let slot = column_info[0].extraction_id()[0].0 as u8; + let slot = column_info[0].extraction_id()[7].0 as u8; let evm_word = column_info[0].location_offset().0 as u32; column_info[1..].iter().for_each(|col| { - let col_slot = col.extraction_id()[0].0 as u8; + let col_slot = col.extraction_id()[7].0 as u8; let col_word = col.location_offset().0 as u32; assert_eq!(col_slot, slot); assert_eq!(col_word, evm_word); @@ -98,7 +98,7 @@ impl SlotEvmWordColumns { } fn slot(&self) -> u8 { // The columns should have the same slot. - u8::try_from(self.0[0].extraction_id()[0].to_canonical_u64()).unwrap() + u8::try_from(self.0[0].extraction_id()[7].to_canonical_u64()).unwrap() } fn evm_word(&self) -> u32 { // The columns should have the same EVM word. From 14cf959dc7f70b76a44149eeb03f302a93f180f3 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 13 Jan 2025 18:23:43 +0000 Subject: [PATCH 32/47] StorageSlotInfo Single Value fix --- mp2-v1/src/api.rs | 18 ++++-- .../gadgets/metadata_gadget.rs | 11 ++-- mp2-v1/src/values_extraction/leaf_mapping.rs | 2 +- .../leaf_mapping_of_mappings.rs | 2 +- mp2-v1/src/values_extraction/leaf_single.rs | 2 +- mp2-v1/test-contracts/src/Simple.sol | 62 ------------------- mp2-v1/tests/common/cases/indexing.rs | 15 +++-- mp2-v1/tests/common/cases/table_source.rs | 27 +------- 8 files changed, 30 insertions(+), 109 deletions(-) diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index d3020b4ed..42768f56b 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -382,8 +382,8 @@ pub fn merge_metadata_hash( table_a: SlotInputs, table_b: SlotInputs, ) -> MetadataHash { - let md_a = value_metadata(table_a, &contract, chain_id, extra.clone()); - let md_b = value_metadata(table_b, &contract, chain_id, extra); + let (md_a, _) = value_metadata(table_a, &contract, chain_id, extra.clone()); + let (md_b, _) = value_metadata(table_b, &contract, chain_id, extra); let combined = map_to_curve_point(&md_a.to_fields()) + map_to_curve_point(&md_b.to_fields()); let contract_digest = contract_metadata_digest(&contract); // the block id is only added at the index tree level, the rest is combined at the final @@ -393,7 +393,12 @@ pub fn merge_metadata_hash( // NOTE: the block id is added at the end of the digest computation only once - this returns only // the part without the block id -fn value_metadata(inputs: SlotInputs, contract: &Address, chain_id: u64, extra: Vec) -> Digest { +fn value_metadata( + inputs: SlotInputs, + contract: &Address, + chain_id: u64, + extra: Vec, +) -> (Digest, Digest) { let column_metadata = inputs.to_column_metadata(contract, chain_id, extra.clone()); let md = column_metadata.digest(); @@ -408,7 +413,7 @@ fn value_metadata(inputs: SlotInputs, contract: &Address, chain_id: u64, extra: length_metadata_digest(length_slot, mapping_slot) } }; - md + length_digest + (md, length_digest) } /// Compute the table information for the value columns. @@ -452,7 +457,8 @@ pub fn metadata_hash( extra: Vec, ) -> MetadataHash { // closure to compute the metadata digest associated to a mapping variable - let value_digest = value_metadata(slot_input, contract_address, chain_id, extra); + let (value_digest, length_digest) = + value_metadata(slot_input, contract_address, chain_id, extra); // Correspond to the computation of final extraction base circuit. let value_digest = map_to_curve_point(&value_digest.to_fields()); // add contract digest @@ -464,5 +470,5 @@ pub fn metadata_hash( (contract_digest + value_digest).to_weierstrass(), ); // compute final hash - combine_digest_and_block(contract_digest + value_digest) + combine_digest_and_block(contract_digest + value_digest + length_digest) } diff --git a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs index 486f07f11..d15b55db8 100644 --- a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs +++ b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs @@ -514,10 +514,14 @@ where // We only extract if we are in the correct location AND `column.is_extracted` is true let correct_location = b.and(correct_offset, correct_extraction_id); + let not_selector = b.not(selector); + // We also make sure we should actually extract for this column, otherwise we have issues + // when indexing into the array. + let correct = b.and(not_selector, correct_location); // last_byte_found lets us know whether we continue extracting or not. // Hence if we want to extract values `extract` will be true so `last_byte_found` should be false - let mut last_byte_found = b.not(correct_location); + let mut last_byte_found = b.not(correct); // Even if the constant `VALUE_LEN` is larger than 32 this is the maximum size in bytes // of data that we extract per column @@ -526,10 +530,9 @@ where // We iterate over the result bytes in reverse order, the first element that we want to access // from `value` is `value[MAPPING_LEAF_VALUE_LEN - column.byte_offset - column.length]` and then // we keep extracting until we reach `value[column.byte_offset]`. - // let mapping_leaf_val_len = b.constant(F::from_canonical_usize(VALUE_LEN)); + let last_byte_offset = b.add(column.byte_offset, column.length); - // let to_sub = b.sub(mapping_leaf_val_len, last_byte_offset); - // let last_index = b.constant(F::from_canonical_usize(VALUE_LEN - 1)); + let start = b.sub(last_byte_offset, one); result_bytes diff --git a/mp2-v1/src/values_extraction/leaf_mapping.rs b/mp2-v1/src/values_extraction/leaf_mapping.rs index 10d23040a..37643936c 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping.rs @@ -106,7 +106,7 @@ where let (input_metadata_digest, input_value_digest) = metadata.inputs_digests(b, &[packed_mapping_key.clone()]); - let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests( + let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests::<32>( b, &value, &u256_no_off, diff --git a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs index 1bb2cbfc9..f48778b54 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs @@ -110,7 +110,7 @@ where .map(|key| Array::::pack(key, b, Endianness::Big)); let (input_metadata_digest, input_value_digest) = metadata.inputs_digests(b, &input_values); - let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests( + let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests::<32>( b, &value, &u256_no_off, diff --git a/mp2-v1/src/values_extraction/leaf_single.rs b/mp2-v1/src/values_extraction/leaf_single.rs index a119995cd..57a2b827c 100644 --- a/mp2-v1/src/values_extraction/leaf_single.rs +++ b/mp2-v1/src/values_extraction/leaf_single.rs @@ -92,7 +92,7 @@ where let value: Array = left_pad_leaf_value(b, &wires.value); // Compute the metadata digest and the value digest - let (metadata_digest, value_digest) = metadata.extracted_digests( + let (metadata_digest, value_digest) = metadata.extracted_digests::<32>( b, &value, &u256_no_off, diff --git a/mp2-v1/test-contracts/src/Simple.sol b/mp2-v1/test-contracts/src/Simple.sol index bdc651c8b..8f941fdec 100644 --- a/mp2-v1/test-contracts/src/Simple.sol +++ b/mp2-v1/test-contracts/src/Simple.sol @@ -127,24 +127,6 @@ contract Simple { structMapping[_key] = LargeStruct(_field1, _field2, _field3); } - // function changeMappingStruct(MappingStructChange[] memory changes) public { - // for (uint256 i = 0; i < changes.length; i++) { - // if (changes[i].operation == MappingOperation.Deletion) { - // delete structMapping[changes[i].key]; - // } else if ( - // changes[i].operation == MappingOperation.Insertion || - // changes[i].operation == MappingOperation.Update - // ) { - // setMappingStruct( - // changes[i].key, - // changes[i].field1, - // changes[i].field2, - // changes[i].field3 - // ); - // } - // } - // } - function changeMapping(MappingStructChange[] memory changes) public { for (uint256 i = 0; i < changes.length; i++) { if (changes[i].operation == MappingOperation.Deletion) { @@ -216,27 +198,6 @@ contract Simple { mappingOfSingleValueMappings[outerKey][innerKey] = value; } - // function changeMappingOfSingleValueMappings( - // MappingOfSingleValueMappingsChange[] memory changes - // ) public { - // for (uint256 i = 0; i < changes.length; i++) { - // if (changes[i].operation == MappingOperation.Deletion) { - // delete mappingOfSingleValueMappings[changes[i].outerKey][ - // changes[i].innerKey - // ]; - // } else if ( - // changes[i].operation == MappingOperation.Insertion || - // changes[i].operation == MappingOperation.Update - // ) { - // setMappingOfSingleValueMappings( - // changes[i].outerKey, - // changes[i].innerKey, - // changes[i].value - // ); - // } - // } - // } - // Set mapping of struct mappings. function setMappingOfStructMappings( uint256 outerKey, @@ -251,27 +212,4 @@ contract Simple { field3 ); } - - // function changeMappingOfStructMappings( - // MappingOfStructMappingsChange[] memory changes - // ) public { - // for (uint256 i = 0; i < changes.length; i++) { - // if (changes[i].operation == MappingOperation.Deletion) { - // delete mappingOfStructMappings[changes[i].outerKey][ - // changes[i].innerKey - // ]; - // } else if ( - // changes[i].operation == MappingOperation.Insertion || - // changes[i].operation == MappingOperation.Update - // ) { - // setMappingOfStructMappings( - // changes[i].outerKey, - // changes[i].innerKey, - // changes[i].field1, - // changes[i].field2, - // changes[i].field3 - // ); - // } - // } - // } } diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index e7e088ba3..c305d99ad 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -34,8 +34,8 @@ use crate::common::{ LargeStruct, SimpleMapping, SimpleNestedMapping, StructMapping, StructNestedMapping, }, table_source::{ - ContractExtractionArgs, LengthExtractionArgs, MappingExtractionArgs, MappingIndex, - MergeSource, ReceiptExtractionArgs, SingleExtractionArgs, TableSource, + ContractExtractionArgs, MappingExtractionArgs, MappingIndex, MergeSource, + ReceiptExtractionArgs, SingleExtractionArgs, TableSource, }, TableIndexing, }, @@ -64,9 +64,11 @@ pub(crate) const SINGLE_SLOTS: [u8; 4] = [0, 1, 2, 3]; const MAPPING_SLOT: u8 = 4; /// Test slot for length extraction +#[allow(dead_code)] const LENGTH_SLOT: u8 = 1; /// Test length value for length extraction +#[allow(dead_code)] const LENGTH_VALUE: u8 = 2; /// Test slot for contract extraction @@ -375,7 +377,7 @@ impl TableIndexing { let contract_address = contract.address; let chain_id = contract.chain_id; - let slot_input = SlotInput::new(MAPPING_SLOT, 0, 256, 0); + let slot_input = SlotInput::new(MAPPING_SLOT, 0, 32, 0); let key_id = identifier_for_mapping_key_column(MAPPING_SLOT, &contract_address, chain_id, vec![]); let value_id = @@ -387,10 +389,7 @@ impl TableIndexing { MAPPING_SLOT, mapping_index, vec![slot_input], - Some(LengthExtractionArgs { - slot: LENGTH_SLOT, - value: LENGTH_VALUE, - }), + None, ); let contract = Contract { @@ -482,7 +481,7 @@ impl TableIndexing { let contract_address = contract.address; let chain_id = contract.chain_id; - let slot_input = SlotInput::new(MAPPING_OF_SINGLE_VALUE_MAPPINGS_SLOT, 0, 256, 0); + let slot_input = SlotInput::new(MAPPING_OF_SINGLE_VALUE_MAPPINGS_SLOT, 0, 32, 0); let outer_key_id = identifier_for_outer_mapping_key_column( MAPPING_OF_SINGLE_VALUE_MAPPINGS_SLOT, &contract_address, diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 661caa081..7d6b53576 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -78,7 +78,7 @@ fn metadata_hash( } /// Save the columns information of same slot and EVM word. -#[derive(Debug)] +#[derive(Debug, Clone)] struct SlotEvmWordColumns(Vec); impl SlotEvmWordColumns { @@ -1285,31 +1285,6 @@ impl SingleExtractionArgs { } } -// /// Mapping extraction arguments -// #[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone)] -// pub(crate) struct MappingExtractionArgs -// where -// K: StorageSlotMappingKey, -// V: StorageSlotValue, -// { -// /// Mapping slot number -// slot: u8, -// /// Mapping index type -// index: MappingIndex, -// /// Slot input information -// slot_inputs: Vec, -// /// Mapping keys: they are useful for two things: -// /// * doing some controlled changes on the smart contract, since if we want to do an update we -// /// need to know an existing key -// /// * doing the MPT proofs over, since this test doesn't implement the copy on write for MPT -// /// (yet), we're just recomputing all the proofs at every block and we need the keys for that. -// mapping_keys: BTreeSet, -// /// The optional length extraction parameters -// length_args: Option, -// /// Phantom -// _phantom: PhantomData<(K, V)>, -// } - #[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone)] pub(crate) struct MappingExtractionArgs { /// Mapping slot number From 317351f7b68425a27567a60da976174f7ac9e938 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 21 Jan 2025 12:29:42 +0000 Subject: [PATCH 33/47] WIP: Review comments --- Cargo.lock | 1 + mp2-common/src/eth.rs | 122 ++++++++++--- mp2-common/src/mpt_sequential/key.rs | 10 -- .../src/mpt_sequential/leaf_or_extension.rs | 2 +- mp2-common/src/mpt_sequential/mod.rs | 8 +- mp2-common/src/utils.rs | 35 ---- mp2-test/src/circuit.rs | 2 +- mp2-test/src/mpt_sequential.rs | 32 ++-- mp2-v1/src/api.rs | 6 + mp2-v1/src/contract_extraction/branch.rs | 8 +- .../src/final_extraction/receipt_circuit.rs | 19 +- mp2-v1/src/lib.rs | 11 +- mp2-v1/src/values_extraction/api.rs | 49 +++--- .../values_extraction/gadgets/column_info.rs | 125 ++++++++++--- .../gadgets/metadata_gadget.rs | 165 ++++-------------- mp2-v1/src/values_extraction/leaf_receipt.rs | 62 ++----- mp2-v1/src/values_extraction/mod.rs | 110 ++---------- mp2-v1/src/values_extraction/planner.rs | 10 +- mp2-v1/tests/common/cases/indexing.rs | 98 ++++++++++- mp2-v1/tests/common/cases/table_source.rs | 46 ++--- mp2-v1/tests/common/mod.rs | 58 ------ verifiable-db/Cargo.toml | 1 + 22 files changed, 437 insertions(+), 543 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6e8b712f9..424dee27f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7004,6 +7004,7 @@ dependencies = [ "log", "mp2_common", "mp2_test", + "mp2_v1", "num", "plonky2", "plonky2_crypto", diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 354b8d358..ab78d7dbd 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -4,7 +4,7 @@ use alloy::{ consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom}, eips::BlockNumberOrTag, network::{eip2718::Encodable2718, BlockResponse}, - primitives::{Address, B256, U256}, + primitives::{Address, Log, B256, U256}, providers::{Provider, RootProvider}, rlp::{Decodable, Encodable as AlloyEncodable}, rpc::types::{ @@ -16,8 +16,7 @@ use anyhow::{anyhow, bail, Context, Result}; use eth_trie::{EthTrie, MemoryDB, Trie}; use ethereum_types::H256; use itertools::Itertools; -use log::debug; -use log::warn; +use log::{debug, warn}; use rlp::{Encodable, Rlp}; use serde::{Deserialize, Serialize}; @@ -39,7 +38,9 @@ use crate::{ const RETRY_NUM: usize = 3; /// The maximum size an additional piece of data can be in bytes. -const MAX_DATA_SIZE: usize = 32; +/// It should always be a multiple of 32 since Solidity event data encodes every object in 32 byte chunks +/// regardless of its true size. +const MAX_RECEIPT_DATA_SIZE: usize = 32; /// The size of an event topic rlp encoded. const ENCODED_TOPIC_SIZE: usize = 33; @@ -174,13 +175,13 @@ pub struct ProofQuery { /// Struct used for storing relevant data to query blocks as they come in. /// The constant `NO_TOPICS` is the number of indexed items in the event (excluding the event signature) and -/// `MAX_DATA` is the number of 32 byte words of data we expect in addition to the topics. +/// `MAX_DATA_WORDS` is the number of 32 byte words of data we want to extract in addition to the topics. #[derive(Debug, Clone)] -pub struct ReceiptQuery { +pub struct ReceiptQuery { /// The contract that emits the event we care about pub contract: Address, /// The signature of the event we wish to monitor for - pub event: EventLogInfo, + pub event: EventLogInfo, } /// Struct used to store all the information needed for proving a leaf is in the Receipt Trie. @@ -196,7 +197,7 @@ pub struct ReceiptProofInfo { /// Contains all the information for an [`Event`] in rlp form #[derive(Debug, Clone, Copy, Serialize, Deserialize, Hash, PartialEq, Eq)] -pub struct EventLogInfo { +pub struct EventLogInfo { /// Size in bytes of the whole log rlp encoded pub size: usize, /// Packed contract address to check @@ -218,25 +219,25 @@ pub struct EventLogInfo { serialize_with = "serialize_long_array", deserialize_with = "deserialize_long_array" )] - pub data: [usize; MAX_DATA], + pub data: [usize; MAX_DATA_WORDS], } -impl EventLogInfo { +impl EventLogInfo { /// Create a new instance from a contract [`Address`] and a [`str`] that is the event signature pub fn new(contract: Address, event_signature: &str) -> Self { // To calculate the total size of the log rlp encoded we use the fact that the address takes 21 bytes to encode, topics - // take 33 bytes each to incode and form a list that has length between 33 bytes and 132 bytes and data is a string that has 32 * MAX_DATA length + // take 33 bytes each to incode and form a list that has length between 33 bytes and 132 bytes and data is a string that has 32 * MAX_DATA_WORDS length // If we have more than one topic that is not the event signature the rlp encoding is a list that is over 55 bytes whose total length can be encoded in one byte, so the header length is 2 // Otherwise its still a list but the header is a single byte. let topics_header_len = alloy::rlp::length_of_length((1 + NO_TOPICS) * ENCODED_TOPIC_SIZE); // If the we have more than one piece of data it is rlp encoded as a string with length greater than 55 bytes - let data_header_len = alloy::rlp::length_of_length(MAX_DATA * MAX_DATA_SIZE); + let data_header_len = alloy::rlp::length_of_length(MAX_DATA_WORDS * MAX_RECEIPT_DATA_SIZE); let address_size = 21; let topics_size = (1 + NO_TOPICS) * ENCODED_TOPIC_SIZE + topics_header_len; - let data_size = MAX_DATA * MAX_DATA_SIZE + data_header_len; + let data_size = MAX_DATA_WORDS * MAX_RECEIPT_DATA_SIZE + data_header_len; let payload_size = address_size + topics_size + data_size; let header_size = alloy::rlp::length_of_length(payload_size); @@ -252,8 +253,8 @@ impl EventLogInfo EventLogInfo Result { + let node_rlp = rlp::Rlp::new(node); + + // The actual receipt data is item 1 in the list + let (receipt_rlp, receipt_off) = node_rlp.at_with_offset(1)?; + // The rlp encoded Receipt is not a list but a string that is formed of the `tx_type` followed by the remaining receipt + // data rlp encoded as a list. We retrieve the payload info so that we can work out relevant offsets later. + let receipt_str_payload = receipt_rlp.payload_info()?; + + // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` + let receipt_list = rlp::Rlp::new(&receipt_rlp.data()?[1..]); + + // The logs themselves start are the item at index 3 in this list + let (logs_rlp, logs_off) = receipt_list.at_with_offset(3)?; + + // We calculate the offset the that the logs are at from the start of the node + let logs_offset = receipt_off + receipt_str_payload.header_len + 1 + logs_off; + + // Now we produce an iterator over the logs with each logs offset. + let relevant_log_offset = std::iter::successors(Some(0usize), |i| Some(i + 1)) + .map_while(|i| logs_rlp.at_with_offset(i).ok()) + .find_map(|(log_rlp, log_off)| { + let mut bytes = log_rlp.as_raw(); + let log = Log::decode(&mut bytes).ok()?; + + if log.address == self.address + && log + .data + .topics() + .contains(&B256::from(self.event_signature)) + { + Some(logs_offset + log_off) + } else { + None + } + }) + .ok_or(anyhow::anyhow!( + "There were no relevant logs in this transaction" + ))?; + + Ok(relevant_log_offset) + } } /// Represent an intermediate or leaf node of a storage slot in contract. @@ -532,12 +577,12 @@ impl ReceiptProofInfo { } } -impl ReceiptQuery { +impl ReceiptQuery { /// Construct a new [`ReceiptQuery`] from the contract [`Address`] and the event's name as a [`str`]. pub fn new(contract: Address, event_name: &str) -> Self { Self { contract, - event: EventLogInfo::::new(contract, event_name), + event: EventLogInfo::::new(contract, event_name), } } @@ -548,6 +593,20 @@ impl ReceiptQuery, block: BlockNumberOrTag, ) -> Result> { + // Retrieve the transaction indices for the relevant logs + let tx_indices = self.retrieve_tx_indices(provider, block).await?; + + // Construct the Receipt Trie for this block so we can retrieve MPT proofs. + let mut block_util = BlockUtil::fetch(provider, block).await?; + ReceiptQuery::::extract_info(&tx_indices, &mut block_util) + } + + /// Function to query for relevant logs at a specific block, it returns a [`BTreeSet`] of the transaction indices that are relevant. + pub async fn retrieve_tx_indices( + &self, + provider: &RootProvider, + block: BlockNumberOrTag, + ) -> Result> { let filter = Filter::new() .select(block) .address(self.contract) @@ -555,14 +614,20 @@ impl ReceiptQuery, + block_util: &mut BlockUtil, + ) -> Result> { let mpt_root = block_util.receipts_trie.root_hash()?; let proofs = tx_indices - .into_iter() - .map(|tx_index| { + .iter() + .map(|&tx_index| { let key = tx_index.rlp_bytes(); let proof = block_util.receipts_trie.get_proof(&key[..])?; @@ -601,10 +666,15 @@ impl Rlpable for alloy::consensus::Header { } } +#[allow(dead_code)] pub struct BlockUtil { + /// The actual [`Block`] that the rest of the data relates to pub block: Block, + /// The transactions and Receipts in the block paired together pub txs: Vec, + /// The Receipts Trie pub receipts_trie: EthTrie, + /// The Transactions Trie pub transactions_trie: EthTrie, } @@ -686,7 +756,8 @@ impl BlockUtil { // recompute the receipts trie by first converting all receipts form RPC type to consensus type // since in Alloy these are two different types and RLP functions are only implemented for // consensus ones. - pub fn check(&mut self) -> Result<()> { + #[cfg(test)] + fn check(&mut self) -> Result<()> { let computed = self.receipts_trie.root_hash()?; let tx_computed = self.transactions_trie.root_hash()?; let expected = self.block.header.receipts_root; @@ -887,9 +958,10 @@ mod test { test_receipt_query_helper::<3, 2>() } - fn test_receipt_query_helper() -> Result<()> { + fn test_receipt_query_helper() -> Result<()> + { // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it - let test_info = generate_receipt_test_info::(); + let test_info = generate_receipt_test_info::(); let proofs = test_info.proofs(); let query = test_info.query(); for proof in proofs.iter() { diff --git a/mp2-common/src/mpt_sequential/key.rs b/mp2-common/src/mpt_sequential/key.rs index 2a14780d7..45424c623 100644 --- a/mp2-common/src/mpt_sequential/key.rs +++ b/mp2-common/src/mpt_sequential/key.rs @@ -17,22 +17,12 @@ use serde::{Deserialize, Serialize}; pub type MPTKeyWire = MPTKeyWireGeneric; -pub type ReceiptKeyWire = MPTKeyWireGeneric; - -pub const MAX_TX_KEY_NIBBLE_LEN: usize = 4; - /// Calculate the pointer from the MPT key. pub fn mpt_key_ptr(mpt_key: &[u8]) -> usize { let nibbles = Nibbles::from_compact(mpt_key); MAX_KEY_NIBBLE_LEN - 1 - nibbles.nibbles().len() } -/// Calculate the pointer from the MPT key. -pub fn receipt_key_ptr(mpt_key: &[u8]) -> usize { - let nibbles = Nibbles::from_compact(mpt_key); - MAX_TX_KEY_NIBBLE_LEN - 1 - nibbles.nibbles().len() -} - /// A structure that keeps a running pointer to the portion of the key the circuit /// already has proven. #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] diff --git a/mp2-common/src/mpt_sequential/leaf_or_extension.rs b/mp2-common/src/mpt_sequential/leaf_or_extension.rs index e5c0cf482..385eaec92 100644 --- a/mp2-common/src/mpt_sequential/leaf_or_extension.rs +++ b/mp2-common/src/mpt_sequential/leaf_or_extension.rs @@ -107,7 +107,7 @@ where { /// MPT node pub node: VectorWire, - /// MPT root + /// MPT hash of this node pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, /// New MPT key after advancing the current key pub key: MPTKeyWireGeneric, diff --git a/mp2-common/src/mpt_sequential/mod.rs b/mp2-common/src/mpt_sequential/mod.rs index 522c61d67..4ebaa4e81 100644 --- a/mp2-common/src/mpt_sequential/mod.rs +++ b/mp2-common/src/mpt_sequential/mod.rs @@ -32,10 +32,7 @@ mod key; mod leaf_or_extension; pub mod utils; -pub use key::{ - mpt_key_ptr, receipt_key_ptr, MPTKeyWire, MPTKeyWireGeneric, ReceiptKeyWire, - MAX_TX_KEY_NIBBLE_LEN, -}; +pub use key::{mpt_key_ptr, MPTKeyWire, MPTKeyWireGeneric}; pub use leaf_or_extension::{ MPTLeafOrExtensionNode, MPTLeafOrExtensionNodeGeneric, MPTLeafOrExtensionWires, MPTLeafOrExtensionWiresGeneric, MPTReceiptLeafNode, MPTReceiptLeafWiresGeneric, @@ -51,7 +48,8 @@ pub const MAX_LEAF_VALUE_LEN: usize = 33; /// This is the maximum size we allow for the value of Receipt Trie leaf /// currently set to be the same as we allow for a branch node in the Storage Trie -/// minus the length of the key header and key +/// minus the length of the key header and key. We choose this value as any larger would +/// result in an additional keccak permutation, thus increasing the circuit size. pub const MAX_RECEIPT_LEAF_VALUE_LEN: usize = 503; /// RLP item size for the extension node diff --git a/mp2-common/src/utils.rs b/mp2-common/src/utils.rs index 3cb6a6bba..af0e59d63 100644 --- a/mp2-common/src/utils.rs +++ b/mp2-common/src/utils.rs @@ -804,41 +804,6 @@ impl, const D: usize> SliceConnector for CircuitBui } } -/// Convert an Uint32 target to Uint8 targets. -pub fn unpack_u32_to_u8_targets, const D: usize>( - b: &mut CircuitBuilder, - u: Target, - endianness: Endianness, -) -> Vec { - let zero = b.zero(); - let mut bits = b.split_le(u, u32::BITS as usize); - match endianness { - Endianness::Big => bits.reverse(), - Endianness::Little => (), - }; - bits.chunks(8) - .map(|chunk| { - // let bits: Box> = match endianness { - let bits: Box> = match endianness { - Endianness::Big => Box::new(chunk.iter()), - Endianness::Little => Box::new(chunk.iter().rev()), - }; - bits.fold(zero, |acc, bit| b.mul_const_add(F::TWO, acc, bit.target)) - }) - .collect() -} - -/// Convert Uint32 targets to Uint8 targets. -pub fn unpack_u32s_to_u8_targets, const D: usize>( - b: &mut CircuitBuilder, - u32s: Vec, - endianness: Endianness, -) -> Vec { - u32s.into_iter() - .flat_map(|u| unpack_u32_to_u8_targets(b, u, endianness)) - .collect() -} - #[cfg(test)] mod test { use super::{bits_to_num, Packer, ToFields}; diff --git a/mp2-test/src/circuit.rs b/mp2-test/src/circuit.rs index bed5a98c9..f810dac93 100644 --- a/mp2-test/src/circuit.rs +++ b/mp2-test/src/circuit.rs @@ -85,7 +85,7 @@ pub fn setup_circuit< }; println!("[+] Circuit data built in {:?}s", now.elapsed().as_secs()); - println!("FRI config: {:?}", circuit_data.common.fri_params); + (wires, circuit_data, vcd) } diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 42e550623..6116712bb 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -51,27 +51,29 @@ pub fn generate_random_storage_mpt( } #[derive(Debug, Clone)] -pub struct ReceiptTestInfo { +pub struct ReceiptTestInfo { /// The query which we have returned proofs for - pub query: ReceiptQuery, + pub query: ReceiptQuery, /// The proofs for receipts relating to `self.query` pub proofs: Vec, } -impl ReceiptTestInfo { +impl + ReceiptTestInfo +{ /// Getter for the proofs pub fn proofs(&self) -> Vec { self.proofs.clone() } /// Getter for the query - pub fn query(&self) -> &ReceiptQuery { + pub fn query(&self) -> &ReceiptQuery { &self.query } } /// This function is used so that we can generate a Receipt Trie for a blog with varying transactions /// (i.e. some we are interested in and some we are not). -pub fn generate_receipt_test_info( -) -> ReceiptTestInfo { +pub fn generate_receipt_test_info( +) -> ReceiptTestInfo { // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] @@ -191,17 +193,9 @@ pub fn generate_receipt_test_info 4 => event_contract.testTwoData().into_transaction_request(), _ => unreachable!(), }; - let random_two = match (0..5).sample_single(&mut rng) { - 0 => event_contract.testEmit().into_transaction_request(), - 1 => event_contract.testTwoIndexed().into_transaction_request(), - 2 => event_contract.testThreeIndexed().into_transaction_request(), - 3 => event_contract.testOneData().into_transaction_request(), - 4 => event_contract.testTwoData().into_transaction_request(), - _ => unreachable!(), - }; + let tx_req = match i % 4 { - 0 => random, - 1 => random_two, + 0 | 1 => random, 2 => other_contract.otherEmit().into_transaction_request(), 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), @@ -227,7 +221,7 @@ pub fn generate_receipt_test_info // Finally we guarantee at least three of the event we are going to query for for _ in 0..3 { - let queried_event_req = match (NO_TOPICS, MAX_DATA) { + let queried_event_req = match (NO_TOPICS, MAX_DATA_WORDS) { (1, 0) => event_contract.testEmit().into_transaction_request(), (2, 0) => event_contract.testTwoIndexed().into_transaction_request(), (3, 0) => event_contract.testThreeIndexed().into_transaction_request(), @@ -267,7 +261,7 @@ pub fn generate_receipt_test_info // We want to get the event signature so we can make a ReceiptQuery let all_events = EventEmitter::abi::events(); - let events = match (NO_TOPICS, MAX_DATA) { + let events = match (NO_TOPICS, MAX_DATA_WORDS) { (1, 0) => all_events.get("testEvent").unwrap(), (2, 0) => all_events.get("twoIndexed").unwrap(), (3, 0) => all_events.get("threeIndexed").unwrap(), @@ -276,7 +270,7 @@ pub fn generate_receipt_test_info _ => panic!(), }; - let receipt_query = ReceiptQuery::::new( + let receipt_query = ReceiptQuery::::new( *event_contract.address(), &events[0].signature(), ); diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 42768f56b..09254151e 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -43,8 +43,14 @@ pub struct InputNode { // TODO: Specify `NODE_LEN = MAX_LEAF_NODE_LEN` in the generic parameter, // but it could not work for using `MAPPING_LEAF_NODE_LEN` constant directly. +/// We use `512` in as the `NODE_LEN` in [`values_extraction::CircuitInput`] to represent +/// the maximum length of a Receipt Trie leaf node. The Storage trie leaf node size is now hard coded into +/// the circuits. type ValuesExtractionInput = values_extraction::CircuitInput<512, MAX_COLUMNS>; +/// We use `512` in as the `NODE_LEN` in [`values_extraction::PublicParameters`] to represent +/// the maximum length of a Receipt Trie leaf node. The Storage trie leaf node size is now hard coded into +/// the circuits. type ValuesExtractionParameters = values_extraction::PublicParameters<512, MAX_COLUMNS>; fn sanity_check() { diff --git a/mp2-v1/src/contract_extraction/branch.rs b/mp2-v1/src/contract_extraction/branch.rs index b78e7edfa..3fa135261 100644 --- a/mp2-v1/src/contract_extraction/branch.rs +++ b/mp2-v1/src/contract_extraction/branch.rs @@ -55,13 +55,7 @@ where let headers = decode_fixed_list::<_, D, MAX_ITEMS_IN_LIST>(b, &node.arr.arr, zero); let (new_mpt_key, hash, is_valid, _) = - // MPTCircuit::<1, NODE_LEN, MAX_KEY_NIBBLE_LEN> - advance_key_branch( - b, - &node.arr, - &child_proof.mpt_key(), - &headers, - ); + advance_key_branch(b, &node.arr, &child_proof.mpt_key(), &headers); // We always enforce it's a branch node, i.e. that it has 17 entries. b.connect(is_valid.target, ttrue.target); diff --git a/mp2-v1/src/final_extraction/receipt_circuit.rs b/mp2-v1/src/final_extraction/receipt_circuit.rs index a1366a2af..ae53aa513 100644 --- a/mp2-v1/src/final_extraction/receipt_circuit.rs +++ b/mp2-v1/src/final_extraction/receipt_circuit.rs @@ -1,10 +1,9 @@ use mp2_common::{ default_config, - keccak::{OutputHash, PACKED_HASH_LEN}, + keccak::OutputHash, proof::{deserialize_proof, verify_proof_fixed_circuit, ProofWithVK}, public_inputs::PublicInputCommon, serialization::{deserialize, serialize}, - u256::UInt256Target, utils::{FromTargets, ToTargets}, C, D, F, }; @@ -19,7 +18,7 @@ use plonky2::{ proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget}, }, }; -use plonky2_ecgfp5::gadgets::curve::CurveTarget; + use recursion_framework::{ circuit_builder::CircuitLogicWires, framework::{ @@ -42,17 +41,6 @@ use anyhow::Result; #[derive(Debug, Clone, Copy)] pub struct ReceiptExtractionCircuit; -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ReceiptExtractionWires { - #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] - pub(crate) dm: CurveTarget, - #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] - pub(crate) dv: CurveTarget, - pub(crate) bh: [Target; PACKED_HASH_LEN], - pub(crate) prev_bh: [Target; PACKED_HASH_LEN], - pub(crate) bn: UInt256Target, -} - impl ReceiptExtractionCircuit { pub(crate) fn build( b: &mut CircuitBuilder, @@ -226,6 +214,7 @@ pub(crate) mod test { use mp2_common::{ keccak::PACKED_HASH_LEN, + rlp::MAX_KEY_NIBBLE_LEN, utils::{Endianness, Packer, ToFields}, }; use mp2_test::{ @@ -348,7 +337,7 @@ pub(crate) mod test { pub(crate) fn random() -> Self { let value_h = HashOut::::rand().to_bytes().pack(Endianness::Little); - let key = random_vector(64); + let key = random_vector(MAX_KEY_NIBBLE_LEN); let ptr = usize::MAX; let value_dv = Point::rand(); let value_dm = Point::rand(); diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index 40efe183c..3c64b501c 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -9,17 +9,26 @@ // stylistic feature #![feature(async_closure)] use mp2_common::{array::L32, mpt_sequential::PAD_LEN}; - +/// The maximum length of an MPT Branch Node that we accept, any larger would cause additional keccak permutation to run +/// resulting in having to have a number of different circuits for different size MPT branch nodes. pub const MAX_BRANCH_NODE_LEN: usize = 532; +/// The maximum length of an MPT Branch Node after its been padded for keccak hashing pub const MAX_BRANCH_NODE_LEN_PADDED: usize = PAD_LEN(532); /// rlp( rlp(max key 32b) + rlp(max value 32b) ) + 1 for compact encoding /// see test_len() pub const MAX_EXTENSION_NODE_LEN: usize = 69; +/// The size of a MPT extension node after it has been padded for keccak hashing. pub const MAX_EXTENSION_NODE_LEN_PADDED: usize = PAD_LEN(69); +/// rlp( rlp(max key 32b) + rlp(max value 32b) ) + 1 for compact encoding pub const MAX_LEAF_NODE_LEN: usize = MAX_EXTENSION_NODE_LEN; +/// The size of a Storage MPT leaf node after it has been padded for keccak hashing pub const MAX_LEAF_NODE_LEN_PADDED: usize = PAD_LEN(MAX_LEAF_NODE_LEN); +/// The maximum size in bytes of a value stored inside an MPT leaf node pub const MAX_LEAF_VALUE_LEN: usize = 32; +/// This is the length of Storage leaf value packed into u32 elements. pub const L32_LEAF_VALUE_LEN: usize = L32(MAX_LEAF_VALUE_LEN); +/// The maximum size of receipt leaf that we accept in the code, any larger causes additiona keccak hashing to occur resulting in +/// different circuits. pub const MAX_RECEIPT_LEAF_NODE_LEN: usize = 512; pub mod api; diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 74c873fa6..2ed9b555d 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -46,9 +46,9 @@ const NUM_IO: usize = PublicInputs::::TOTAL_LEN; /// CircuitInput is a wrapper around the different specialized circuits that can /// be used to prove a MPT node recursively. #[derive(Serialize, Deserialize)] -pub enum CircuitInput +pub enum CircuitInput where - [(); PAD_LEN(NODE_LEN)]:, + [(); PAD_LEN(LEAF_LEN)]:, [(); MAX_COLUMNS - 2]:, [(); MAX_COLUMNS - 1]:, [(); MAX_COLUMNS - 0]:, @@ -56,14 +56,14 @@ where LeafSingle(LeafSingleCircuit), LeafMapping(LeafMappingCircuit), LeafMappingOfMappings(LeafMappingOfMappingsCircuit), - LeafReceipt(ReceiptLeafCircuit), + LeafReceipt(ReceiptLeafCircuit), Extension(ExtensionInput), Branch(BranchInput), } -impl CircuitInput +impl CircuitInput where - [(); PAD_LEN(NODE_LEN)]:, + [(); PAD_LEN(LEAF_LEN)]:, [(); MAX_COLUMNS - 2]:, [(); MAX_COLUMNS - 1]:, [(); MAX_COLUMNS - 0]:, @@ -120,6 +120,9 @@ where evm_word: u32, table_info: Vec, ) -> Self { + // We calculate so called "Input" columns here. These are columns that involve data not explicitly extractable from an MPT node + // but are used in proving we are looking at the correct node. For instance mapping keys are used to calculate the position of a leaf node + // that we need to extract from, but only the output of a keccak hash of some combination of them is included in the node, hence we feed them in as witness. let outer_input_column = InputColumnInfo::new(&[slot], outer_key_id, OUTER_KEY_ID_PREFIX, 32); let inner_input_column = @@ -142,16 +145,16 @@ where } /// Create a circuit input for proving a leaf MPT node of a transaction receipt. - pub fn new_receipt_leaf( + pub fn new_receipt_leaf( last_node: &[u8], tx_index: u64, - event: &EventLogInfo, + event: &EventLogInfo, ) -> Self where - [(); 7 - 2 - NO_TOPICS - MAX_DATA]:, + [(); 7 - 2 - NO_TOPICS - MAX_DATA_WORDS]:, { CircuitInput::LeafReceipt( - ReceiptLeafCircuit::::new::( + ReceiptLeafCircuit::::new::( last_node, tx_index, event, ) .expect("Could not construct Receipt Leaf Circuit"), @@ -180,9 +183,9 @@ where /// Most notably, it holds them in a way to use the recursion framework allowing /// us to specialize circuits according to the situation. #[derive(Eq, PartialEq, Serialize, Deserialize)] -pub struct PublicParameters +pub struct PublicParameters where - [(); PAD_LEN(NODE_LEN)]:, + [(); PAD_LEN(LEAF_LEN)]:, [(); MAX_COLUMNS - 2]:, [(); MAX_COLUMNS - 1]:, [(); MAX_COLUMNS - 0]:, @@ -191,7 +194,7 @@ where leaf_mapping: CircuitWithUniversalVerifier>, leaf_mapping_of_mappings: CircuitWithUniversalVerifier>, - leaf_receipt: CircuitWithUniversalVerifier>, + leaf_receipt: CircuitWithUniversalVerifier>, extension: CircuitWithUniversalVerifier, #[cfg(not(test))] branches: BranchCircuits, @@ -205,10 +208,10 @@ where /// Public API employed to build the MPT circuits, which are returned in /// serialized form. -pub fn build_circuits_params( -) -> PublicParameters +pub fn build_circuits_params( +) -> PublicParameters where - [(); PAD_LEN(NODE_LEN)]:, + [(); PAD_LEN(LEAF_LEN)]:, [(); MAX_COLUMNS - 2]:, [(); MAX_COLUMNS - 1]:, [(); MAX_COLUMNS - 0]:, @@ -219,12 +222,12 @@ where /// Public API employed to generate a proof for the circuit specified by /// `CircuitInput`, employing the `circuit_params` generated with the /// `build_circuits_params` API. -pub fn generate_proof( - circuit_params: &PublicParameters, - circuit_type: CircuitInput, +pub fn generate_proof( + circuit_params: &PublicParameters, + circuit_type: CircuitInput, ) -> Result> where - [(); PAD_LEN(NODE_LEN)]:, + [(); PAD_LEN(LEAF_LEN)]:, [(); MAX_COLUMNS - 2]:, [(); MAX_COLUMNS - 1]:, [(); MAX_COLUMNS - 0]:, @@ -383,9 +386,9 @@ impl_branch_circuits!(TestBranchCircuits, 1, 4, 9); /// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping + 1 leaf mapping of mappings + 1 leaf receipt const MAPPING_CIRCUIT_SET_SIZE: usize = 8; -impl PublicParameters +impl PublicParameters where - [(); PAD_LEN(NODE_LEN)]:, + [(); PAD_LEN(LEAF_LEN)]:, [(); >::HASH_SIZE]:, [(); MAX_COLUMNS - 2]:, [(); MAX_COLUMNS - 1]:, @@ -416,7 +419,7 @@ where circuit_builder.build_circuit::>(()); debug!("Building leaf receipt circuit"); - let leaf_receipt = circuit_builder.build_circuit::>(()); + let leaf_receipt = circuit_builder.build_circuit::>(()); debug!("Building extension circuit"); let extension = circuit_builder.build_circuit::(()); @@ -453,7 +456,7 @@ where fn generate_proof( &self, - circuit_type: CircuitInput, + circuit_type: CircuitInput, ) -> Result { let set = &self.get_circuit_set(); match circuit_type { diff --git a/mp2-v1/src/values_extraction/gadgets/column_info.rs b/mp2-v1/src/values_extraction/gadgets/column_info.rs index 6def2b067..6ee1fc79f 100644 --- a/mp2-v1/src/values_extraction/gadgets/column_info.rs +++ b/mp2-v1/src/values_extraction/gadgets/column_info.rs @@ -2,8 +2,10 @@ use itertools::{zip_eq, Itertools}; use mp2_common::{ + array::Array, eth::{left_pad, left_pad32}, group_hashing::{map_to_curve_point, CircuitBuilderGroupHashing}, + keccak::PACKED_HASH_LEN, poseidon::H, types::{CBuilder, MAPPING_LEAF_VALUE_LEN}, utils::{Endianness, Packer}, @@ -12,13 +14,17 @@ use mp2_common::{ use plonky2::{ field::types::{Field, Sample}, hash::hash_types::{HashOut, HashOutTarget}, - iop::{target::Target, witness::WitnessWrite}, + iop::{ + target::{BoolTarget, Target}, + witness::WitnessWrite, + }, plonk::config::Hasher, }; +use plonky2_crypto::u32::arithmetic_u32::U32Target; use plonky2_ecgfp5::{curve::curve::Point, gadgets::curve::CurveTarget}; use rand::{thread_rng, Rng}; use serde::{Deserialize, Serialize}; -use std::{array, iter::once}; +use std::iter::once; /// Trait defining common functionality between [`InputColumnInfo`] and [`ExtractedColumnInfo`] pub trait ColumnInfo { @@ -29,12 +35,17 @@ pub trait ColumnInfo { fn identifier(&self) -> u64; } -/// Column info +/// This struct is used for information in MPT nodes that isn't explicitly extractable from the node itself, but is used +/// to prove that we are looking at the correct node. For instance with mapping keys the value stored for a mapping in slot `s` with key +/// `k` is `keccak(keccak(s) || k)` where we use `||` to represent concatenation. +/// +/// The metadata for these columns is also calculated slight differently so we seperate them from [`ExtractedColumnInfo`] since we never have to +/// index into an array to get the value stored in a cell of one of these columns, thus reducing cost when calculating the values digest. #[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize)] pub struct InputColumnInfo { /// This is the information used to identify the data relative to the contract, /// for storage extraction its the slot, for receipts its the event signature for example - pub extraction_identifier: [F; 8], + pub extraction_identifier: [F; PACKED_HASH_LEN], /// Column identifier pub identifier: F, /// Prefix used in computing mpt metadata @@ -52,7 +63,7 @@ impl InputColumnInfo { length: usize, ) -> Self { let mut extraction_vec = extraction_identifier.pack(Endianness::Little); - extraction_vec.resize(8, 0u32); + extraction_vec.resize(PACKED_HASH_LEN, 0u32); extraction_vec.reverse(); let extraction_identifier = extraction_vec .into_iter() @@ -128,12 +139,15 @@ impl InputColumnInfo { } } -/// Column info +/// This struct stores all the infomation that corresponds to data we actually extract from a MPT Leaf Node. +/// For instance in a storage leaf `self.extraction_identifier` would be the slot, `self.identifier` is this columns identifier in the table +/// `self.byte_offset` is how far from the start of the value stored in the node this extracted data begins, `self.length` is the number of bytes the data takes up +/// and `self.location_offset` is used in storage for signaling that this data is extracted from an object that may span multiple EVM words. #[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize, Deserialize, Copy)] pub struct ExtractedColumnInfo { /// This is the information used to identify the data relative to the contract, /// for storage extraction its the slot, for receipts its the event signature for example - pub extraction_identifier: [F; 8], + pub extraction_identifier: [F; PACKED_HASH_LEN], /// Column identifier pub identifier: F, /// The offset in bytes where to extract this column from some predetermined start point, @@ -141,7 +155,7 @@ pub struct ExtractedColumnInfo { /// this would be either the offset from the start of the receipt or from the start of the /// relevant log pub byte_offset: F, - /// The length (in bits) of the field to extract in the EVM word + /// The length in bytes of the field to extract in the EVM word pub length: F, /// For storage this is the EVM word, for receipts this is either 1 or 0 and indicates whether to /// use the relevant log offset or not. @@ -173,7 +187,7 @@ impl ExtractedColumnInfo { location_offset: u32, ) -> Self { let mut extraction_vec = extraction_identifier.pack(Endianness::Little); - extraction_vec.resize(8, 0u32); + extraction_vec.resize(PACKED_HASH_LEN, 0u32); extraction_vec.reverse(); let extraction_identifier = extraction_vec .into_iter() @@ -195,7 +209,10 @@ impl ExtractedColumnInfo { } /// Create a sample column info. It could be used in integration tests. - pub fn sample_storage(extraction_identifier: &[F; 8], location_offset: F) -> Self { + pub fn sample_storage( + extraction_identifier: &[F; PACKED_HASH_LEN], + location_offset: F, + ) -> Self { let rng = &mut thread_rng(); let length: usize = rng.gen_range(1..=MAPPING_LEAF_VALUE_LEN); @@ -213,9 +230,13 @@ impl ExtractedColumnInfo { } } - /// Sample a ne [`ExtractedColumnInfo`] at random, if `flag` is `true` then it will be for storage extraction, + /// Sample a new [`ExtractedColumnInfo`] at random, if `flag` is `true` then it will be for storage extraction, /// if false it will be for receipt extraction. - pub fn sample(flag: bool, extraction_identifier: &[F; 8], location_offset: F) -> Self { + pub fn sample( + flag: bool, + extraction_identifier: &[F; PACKED_HASH_LEN], + location_offset: F, + ) -> Self { if flag { ExtractedColumnInfo::sample_storage(extraction_identifier, location_offset) } else { @@ -273,13 +294,12 @@ impl ExtractedColumnInfo { } pub fn value_digest(&self, value: &[u8]) -> Point { - if self.identifier().0 == 0 { + // If the column identifier is zero then its a dummy column. This is because the column identifier + // is always computed as the output of a hash which is EXTREMELY unlikely to be exactly zero. + if self.identifier() == F::ZERO { Point::NEUTRAL } else { - let bytes = left_pad32( - &value[self.byte_offset().0 as usize - ..self.byte_offset().0 as usize + self.length.0 as usize], - ); + let bytes = self.extract_value(value); let inputs = once(self.identifier()) .chain( @@ -337,7 +357,7 @@ impl ColumnInfo for ExtractedColumnInfo { pub struct ExtractedColumnInfoTarget { /// This is the information used to identify the data relative to the contract, /// for storage extraction its the slot, for receipts its the event signature for example - pub(crate) extraction_identifier: [Target; 8], + pub(crate) extraction_identifier: [Target; PACKED_HASH_LEN], /// Column identifier pub(crate) identifier: Target, /// The offset in bytes where to extract this column from some predetermined start point, @@ -375,7 +395,7 @@ impl ExtractedColumnInfoTarget { b.map_to_curve_point(&inputs) } - pub fn extraction_id(&self) -> [Target; 8] { + pub fn extraction_id(&self) -> [Target; PACKED_HASH_LEN] { self.extraction_identifier } @@ -394,6 +414,55 @@ impl ExtractedColumnInfoTarget { pub fn location_offset(&self) -> Target { self.location_offset } + + /// Functionality used to conditionally extract data from a slice. + /// `conditional` represents whether the value should actually be extracted or not, it should be set to `false` if actual extraction occurs + /// `start` is the first index we look at. + pub fn extract_value( + &self, + b: &mut CBuilder, + conditional: BoolTarget, + value: &Array, + start: Target, + ) -> Array { + let zero = b.zero(); + let mut last_byte_found = conditional; + // Even if the constant `VALUE_LEN` is larger than 32 this is the maximum size in bytes + // of data that we extract per column + let mut result_bytes = [zero; 32]; + result_bytes + .iter_mut() + .rev() + .enumerate() + .for_each(|(i, out_byte)| { + // offset = info.byte_offset + i + let index = b.constant(F::from_canonical_usize(i)); + let offset = b.sub(start, index); + // Set to 0 if found the last byte. + let offset = b.select(last_byte_found, zero, offset); + + // Since VALUE_LEN is a constant that is determined at compile time this conditional won't + // cause any issues with the circuit. + let byte = if VALUE_LEN < 64 { + b.random_access(offset, value.arr.to_vec()) + } else { + value.random_access_large_array(b, offset) + }; + + // Now if `last_byte_found` is true we add zero, otherwise add `byte` + let to_add = b.select(last_byte_found, zero, byte); + + *out_byte = b.add(*out_byte, to_add); + // is_last_byte = offset == last_byte_offset + let is_last_byte = b.is_equal(offset, self.byte_offset); + // last_byte_found |= is_last_byte + last_byte_found = b.or(last_byte_found, is_last_byte); + }); + + let result_arr = Array::::from_array(result_bytes); + + Array::::pack(&result_arr, b, Endianness::Big) + } } /// Column info @@ -401,11 +470,11 @@ impl ExtractedColumnInfoTarget { pub struct InputColumnInfoTarget { /// This is the information used to identify the data relative to the contract, /// for storage extraction its the slot, for receipts its the event signature for example - pub extraction_identifier: [Target; 8], + pub extraction_identifier: [Target; PACKED_HASH_LEN], /// Column identifier pub identifier: Target, /// Prefix used in computing mpt metadata - pub metadata_prefix: [Target; 8], + pub metadata_prefix: [Target; PACKED_HASH_LEN], /// The length of the field to extract in the EVM word pub length: Target, } @@ -456,9 +525,9 @@ pub trait CircuitBuilderColumnInfo { impl CircuitBuilderColumnInfo for CBuilder { fn add_virtual_extracted_column_info(&mut self) -> ExtractedColumnInfoTarget { - let extraction_identifier: [Target; 8] = array::from_fn(|_| self.add_virtual_target()); - let [identifier, byte_offset, length, location_offset] = - array::from_fn(|_| self.add_virtual_target()); + let extraction_identifier: [Target; PACKED_HASH_LEN] = self.add_virtual_target_arr(); + + let [identifier, byte_offset, length, location_offset] = self.add_virtual_target_arr(); ExtractedColumnInfoTarget { extraction_identifier, @@ -470,9 +539,11 @@ impl CircuitBuilderColumnInfo for CBuilder { } fn add_virtual_input_column_info(&mut self) -> InputColumnInfoTarget { - let extraction_identifier: [Target; 8] = array::from_fn(|_| self.add_virtual_target()); - let metadata_prefix: [Target; 8] = array::from_fn(|_| self.add_virtual_target()); - let [identifier, length] = array::from_fn(|_| self.add_virtual_target()); + let extraction_identifier: [Target; PACKED_HASH_LEN] = self.add_virtual_target_arr(); + + let metadata_prefix: [Target; PACKED_HASH_LEN] = self.add_virtual_target_arr(); + + let [identifier, length] = self.add_virtual_target_arr(); InputColumnInfoTarget { extraction_identifier, diff --git a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs index d15b55db8..fed1f8494 100644 --- a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs +++ b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs @@ -6,13 +6,10 @@ use super::column_info::{ CircuitBuilderColumnInfo, ExtractedColumnInfo, ExtractedColumnInfoTarget, InputColumnInfo, InputColumnInfoTarget, WitnessWriteColumnInfo, }; -use alloy::{ - primitives::{Log, B256}, - rlp::Decodable, -}; + use itertools::Itertools; use mp2_common::{ - array::{Array, Targetable, L32}, + array::{Array, Targetable}, eth::EventLogInfo, group_hashing::CircuitBuilderGroupHashing, poseidon::H, @@ -73,9 +70,6 @@ where // Check that we don't have too many columns assert!(num_actual_columns <= MAX_COLUMNS); - // We order the columns so that the location_offset increases, then if two columns have the same location offset - // they are ordered by increasing byte offset. Then if byte offset is the same they are ordered such that if `self.is_extracted` is - // false they appear first. let mut table_info = [ExtractedColumnInfo::default(); { MAX_COLUMNS - INPUT_COLUMNS }]; table_info .iter_mut() @@ -89,7 +83,7 @@ where } } - /// Create a sample MPT metadata. It could be used in integration tests. + /// Create a sample MPT metadata. It could be used in testing. pub fn sample( flag: bool, input_prefixes: &[&[u8]; INPUT_COLUMNS], @@ -207,48 +201,14 @@ where }) } - pub fn extracted_receipt_value_digest( + pub fn extracted_receipt_value_digest( &self, value: &[u8], - event: &EventLogInfo, + event: &EventLogInfo, ) -> Point { - // Convert to Rlp form so we can use provided methods. - let node_rlp = rlp::Rlp::new(value); - - // The actual receipt data is item 1 in the list - let (receipt_rlp, receipt_off) = node_rlp.at_with_offset(1).unwrap(); - // The rlp encoded Receipt is not a list but a string that is formed of the `tx_type` followed by the remaining receipt - // data rlp encoded as a list. We retrieve the payload info so that we can work out relevant offsets later. - let receipt_str_payload = receipt_rlp.payload_info().unwrap(); - - // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` - let receipt_list = rlp::Rlp::new(&receipt_rlp.data().unwrap()[1..]); - - // The logs themselves start are the item at index 3 in this list - let (logs_rlp, logs_off) = receipt_list.at_with_offset(3).unwrap(); - - // We calculate the offset the that the logs are at from the start of the node - let logs_offset = receipt_off + receipt_str_payload.header_len + 1 + logs_off; - - // Now we produce an iterator over the logs with each logs offset. - #[allow(clippy::unnecessary_find_map)] - let relevant_log_offset = std::iter::successors(Some(0usize), |i| Some(i + 1)) - .map_while(|i| logs_rlp.at_with_offset(i).ok()) - .find_map(|(log_rlp, log_off)| { - let mut bytes = log_rlp.as_raw(); - let log = Log::decode(&mut bytes).expect("Couldn't decode log"); - - if log.address == event.address - && log - .data - .topics() - .contains(&B256::from(event.event_signature)) - { - Some(logs_offset + log_off) - } else { - Some(0usize) - } - }) + // Get the relevant log offset + let relevant_log_offset = event + .get_log_offset(value) .expect("No relevant log in the provided value"); self.extracted_columns() @@ -297,14 +257,25 @@ where F::from_canonical_usize(columns_metadata.num_actual_columns), ); } + + /// Create a new instance of [`TableMetadata`] from an [`EventLogInfo`]. Events + /// always have two input columns relating to the transaction index and gas used for the transaction. + pub fn from_event_info( + event: &EventLogInfo, + ) -> TableMetadata + where + [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA_WORDS]:, + { + TableMetadata::::from(*event) + } } -impl - From> for TableMetadata +impl + From> for TableMetadata where - [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA]:, + [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA_WORDS]:, { - fn from(event: EventLogInfo) -> Self { + fn from(event: EventLogInfo) -> Self { let extraction_id = event.event_signature; let tx_index_input = [ @@ -477,6 +448,9 @@ where /// Computes the value digest and metadata digest for the extracted columns from the supplied value /// Outputs are ordered as `(MetadataDigest, ValueDigest)`. + /// The inputs `location_no_offset` and `location` represent the MPT key for the slot of this variable without an evm word offset + /// and the MPT key of the current leaf node respectively. To determine whether we should extract a value or not we check to see if + /// `location_no_offset + column.loction_offset == location`, if this is true we extract, if false we dummy the value. pub(crate) fn extracted_digests( &self, b: &mut CBuilder, @@ -521,11 +495,7 @@ where // last_byte_found lets us know whether we continue extracting or not. // Hence if we want to extract values `extract` will be true so `last_byte_found` should be false - let mut last_byte_found = b.not(correct); - - // Even if the constant `VALUE_LEN` is larger than 32 this is the maximum size in bytes - // of data that we extract per column - let mut result_bytes = [zero; 32]; + let last_byte_found = b.not(correct); // We iterate over the result bytes in reverse order, the first element that we want to access // from `value` is `value[MAPPING_LEAF_VALUE_LEN - column.byte_offset - column.length]` and then @@ -535,46 +505,14 @@ where let start = b.sub(last_byte_offset, one); - result_bytes - .iter_mut() - .rev() - .enumerate() - .for_each(|(i, out_byte)| { - // offset = info.byte_offset + i - let index = b.constant(F::from_canonical_usize(i)); - let offset = b.sub(start, index); - // Set to 0 if found the last byte. - let offset = b.select(last_byte_found, zero, offset); - - // Since VALUE_LEN is a constant that is determined at compile time this conditional won't - // cause any issues with the circuit. - let byte = if VALUE_LEN < 64 { - b.random_access(offset, value.arr.to_vec()) - } else { - value.random_access_large_array(b, offset) - }; - - // Now if `last_byte_found` is true we add zero, otherwise add `byte` - let to_add = b.select(last_byte_found, zero, byte); - - *out_byte = b.add(*out_byte, to_add); - // is_last_byte = offset == last_byte_offset - let is_last_byte = b.is_equal(offset, column.byte_offset); - // last_byte_found |= is_last_byte - last_byte_found = b.or(last_byte_found, is_last_byte); - }); - - let result_arr = Array::::from_array(result_bytes); - - let result_packed: Array = - Array::::pack(&result_arr, b, Endianness::Big); + let result_packed = column.extract_value(b, last_byte_found, value, start); let inputs = once(column.identifier) .chain(result_packed.arr.iter().map(|t| t.to_target())) .collect_vec(); let value_digest = b.map_to_curve_point(&inputs); - let negated = b.not(correct_location); - let value_selector = b.or(negated, selector); + let value_selector = b.not(correct); + ( b.curve_select(selector, curve_zero, column_digest), b.curve_select(value_selector, curve_zero, value_digest), @@ -619,14 +557,6 @@ where let location = b.add(log_offset, column.byte_offset()); - // last_byte_found lets us know whether we continue extracting or not. - // If `selector` is false then we have data to extract - let mut last_byte_found = selector; - - // Even if the constant `VALUE_LEN` is larger than 32 this is the maximum size in bytes - // of data that we extract per column - let mut result_bytes = [zero; 32]; - // We iterate over the result bytes in reverse order, the first element that we want to access // from `value` is `value[location + column.length - 1]` and then // we keep extracting until we reach `value[location]`. @@ -635,39 +565,8 @@ where let start = b.sub(last_byte_offset, one); - result_bytes - .iter_mut() - .rev() - .enumerate() - .for_each(|(i, out_byte)| { - // offset = info.byte_offset + i - let index = b.constant(F::from_canonical_usize(i)); - let offset = b.sub(start, index); - // Set to 0 if found the last byte. - let offset = b.select(last_byte_found, zero, offset); - - // Since VALUE_LEN is a constant that is determined at compile time this conditional won't - // cause any issues with the circuit. - let byte = if VALUE_LEN < 64 { - b.random_access(offset, value.arr.to_vec()) - } else { - value.random_access_large_array(b, offset) - }; - - // Now if `last_byte_found` is true we add zero, otherwise add `byte` - let to_add = b.select(last_byte_found, zero, byte); - - *out_byte = b.add(*out_byte, to_add); - // is_last_byte = offset == last_byte_offset - let is_last_byte = b.is_equal(offset, column.byte_offset); - // last_byte_found |= is_last_byte - last_byte_found = b.or(last_byte_found, is_last_byte); - }); - - let result_arr = Array::::from_array(result_bytes); - - let result_packed: Array = - Array::::pack(&result_arr, b, Endianness::Big); + // Extract the value if selector is false + let result_packed = column.extract_value(b, selector, value, start); let inputs = once(column.identifier) .chain(result_packed.arr.iter().map(|t| t.to_target())) diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index 5ee55cb93..9ec3587d9 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -5,10 +5,7 @@ use super::{ public_inputs::{PublicInputs, PublicInputsArgs}, }; -use alloy::{ - primitives::{Address, Log, B256}, - rlp::Decodable, -}; +use alloy::primitives::Address; use anyhow::Result; use mp2_common::{ array::{Array, Targetable, Vector, VectorWire}, @@ -126,55 +123,18 @@ where [(); MAX_COLUMNS - 2]:, { /// Create a new [`ReceiptLeafCircuit`] from a [`ReceiptProofInfo`] and a [`EventLogInfo`] - pub fn new( + pub fn new( last_node: &[u8], tx_index: u64, - event: &EventLogInfo, + event: &EventLogInfo, ) -> Result where - [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA]:, + [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA_WORDS]:, { - // Convert to Rlp form so we can use provided methods. - let node_rlp = rlp::Rlp::new(last_node); - - // The actual receipt data is item 1 in the list - let (receipt_rlp, receipt_off) = node_rlp.at_with_offset(1)?; - // The rlp encoded Receipt is not a list but a string that is formed of the `tx_type` followed by the remaining receipt - // data rlp encoded as a list. We retrieve the payload info so that we can work out relevant offsets later. - let receipt_str_payload = receipt_rlp.payload_info()?; + // Get the relevant log offset + let relevant_log_offset = event.get_log_offset(last_node)?; - // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` - let receipt_list = rlp::Rlp::new(&receipt_rlp.data()?[1..]); - - // The logs themselves start are the item at index 3 in this list - let (logs_rlp, logs_off) = receipt_list.at_with_offset(3)?; - - // We calculate the offset the that the logs are at from the start of the node - let logs_offset = receipt_off + receipt_str_payload.header_len + 1 + logs_off; - - // Now we produce an iterator over the logs with each logs offset. - let relevant_log_offset = iter::successors(Some(0usize), |i| Some(i + 1)) - .map_while(|i| logs_rlp.at_with_offset(i).ok()) - .find_map(|(log_rlp, log_off)| { - let mut bytes = log_rlp.as_raw(); - let log = Log::decode(&mut bytes).ok()?; - - if log.address == event.address - && log - .data - .topics() - .contains(&B256::from(event.event_signature)) - { - Some(logs_offset + log_off) - } else { - Some(0usize) - } - }) - .ok_or(anyhow::anyhow!( - "There were no relevant logs in this transaction" - ))?; - - let EventLogInfo:: { + let EventLogInfo:: { size, address, add_rel_offset, @@ -526,19 +486,19 @@ mod tests { fn test_leaf_circuit_helper< const NO_TOPICS: usize, - const MAX_DATA: usize, + const MAX_DATA_WORDS: usize, const NODE_LEN: usize, >() where [(); PAD_LEN(NODE_LEN)]:, - [(); 7 - 2 - NO_TOPICS - MAX_DATA]:, + [(); 7 - 2 - NO_TOPICS - MAX_DATA_WORDS]:, { - let receipt_proof_infos = generate_receipt_test_info::(); + let receipt_proof_infos = generate_receipt_test_info::(); let proofs = receipt_proof_infos.proofs(); let info = proofs.first().unwrap(); let query = receipt_proof_infos.query(); - let c = ReceiptLeafCircuit::::new::( + let c = ReceiptLeafCircuit::::new::( info.mpt_proof.last().unwrap(), info.tx_index, &query.event, diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index 8c0673bab..a6e956315 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -9,9 +9,9 @@ use itertools::Itertools; use alloy::primitives::Address; use mp2_common::{ - eth::{left_pad32, EventLogInfo, StorageSlot}, + eth::{left_pad32, StorageSlot}, poseidon::{empty_poseidon_hash, hash_to_int_value, H}, - types::{GFp, HashOutput}, + types::HashOutput, utils::{Endianness, Packer, ToFields}, F, }; @@ -348,24 +348,24 @@ where } /// Prefix used for making a topic column id. -const TOPIC_PREFIX: &[u8] = b"topic"; +pub const TOPIC_PREFIX: &[u8] = b"topic"; /// [`TOPIC_PREFIX`] as a [`str`] -const TOPIC_NAME: &str = "topic"; +pub const TOPIC_NAME: &str = "topic"; /// Prefix used for making a data column id. -const DATA_PREFIX: &[u8] = b"data"; +pub const DATA_PREFIX: &[u8] = b"data"; /// [`DATA_PREFIX`] as a [`str`] -const DATA_NAME: &str = "data"; +pub const DATA_NAME: &str = "data"; /// Prefix for transaction index -const TX_INDEX_PREFIX: &[u8] = b"tx index"; +pub const TX_INDEX_PREFIX: &[u8] = b"tx_index"; /// [`TX_INDEX_PREFIX`] as a [`str`] -const TX_INDEX_NAME: &str = "tx index"; +pub const TX_INDEX_NAME: &str = "tx_index"; /// Prefix for gas used -const GAS_USED_PREFIX: &[u8] = b"gas used"; +pub const GAS_USED_PREFIX: &[u8] = b"gas_used"; /// [`GAS_USED_PREFIX`] as a [`str`] -const GAS_USED_NAME: &str = "gas used"; +pub const GAS_USED_NAME: &str = "gas_used"; pub fn identifier_block_column() -> ColumnId { let inputs: Vec = BLOCK_ID_DST.to_fields(); @@ -540,93 +540,3 @@ pub fn row_unique_data_for_mapping_of_mappings_leaf( let inputs = packed_outer_key.chain(packed_inner_key).collect_vec(); H::hash_no_pad(&inputs).into() } - -/// Function that computes the column identifiers for the non-indexed columns together with their names as [`String`]s. -pub fn compute_non_indexed_receipt_column_ids( - event: &EventLogInfo, -) -> Vec<(String, GFp)> { - let gas_used_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - GAS_USED_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; - - let topic_ids = event - .topics - .iter() - .enumerate() - .map(|(j, _)| { - let input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - TOPIC_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - ( - format!("{}_{}", TOPIC_NAME, j + 1), - H::hash_no_pad(&input).elements[0], - ) - }) - .collect::>(); - - let data_ids = event - .data - .iter() - .enumerate() - .map(|(j, _)| { - let input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - DATA_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - ( - format!("{}_{}", DATA_NAME, j + 1), - H::hash_no_pad(&input).elements[0], - ) - }) - .collect::>(); - - [ - vec![(GAS_USED_NAME.to_string(), gas_used_column_id)], - topic_ids, - data_ids, - ] - .concat() -} - -pub fn compute_all_receipt_coulmn_ids( - event: &EventLogInfo, -) -> Vec<(String, GFp)> { - let tx_index_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - TX_INDEX_PREFIX, - ] - .concat() - .into_iter() - .map(GFp::from_canonical_u8) - .collect::>(); - let tx_index_column_id = ( - TX_INDEX_NAME.to_string(), - H::hash_no_pad(&tx_index_input).elements[0], - ); - - let mut other_ids = compute_non_indexed_receipt_column_ids(event); - other_ids.insert(0, tx_index_column_id); - - other_ids -} diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index e8298492f..47f6b7b73 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -55,10 +55,10 @@ impl ProofData { } } -impl Extractable - for EventLogInfo +impl Extractable + for EventLogInfo where - [(); 7 - 2 - NO_TOPICS - MAX_DATA]:, + [(); 7 - 2 - NO_TOPICS - MAX_DATA_WORDS]:, { async fn create_update_tree( &self, @@ -66,7 +66,7 @@ where epoch: u64, provider: &RootProvider, ) -> Result> { - let query = ReceiptQuery:: { + let query = ReceiptQuery:: { contract, event: *self, }; @@ -95,7 +95,7 @@ where [(); MAX_COLUMNS - 1]:, [(); MAX_COLUMNS - 0]:, { - let query = ReceiptQuery:: { + let query = ReceiptQuery:: { contract, event: *self, }; diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index c305d99ad..a89dff2a7 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -16,12 +16,15 @@ use mp2_v1::{ ColumnID, }, values_extraction::{ - compute_non_indexed_receipt_column_ids, identifier_block_column, - identifier_for_inner_mapping_key_column, identifier_for_outer_mapping_key_column, - identifier_for_value_column, + identifier_block_column, identifier_for_inner_mapping_key_column, + identifier_for_outer_mapping_key_column, identifier_for_value_column, DATA_NAME, + DATA_PREFIX, GAS_USED_NAME, GAS_USED_PREFIX, TOPIC_NAME, TOPIC_PREFIX, }, }; -use plonky2::field::types::PrimeField64; +use plonky2::{ + field::types::{Field, PrimeField64}, + plonk::config::Hasher, +}; use rand::{thread_rng, Rng}; use ryhope::storage::RoEpochKvStorage; @@ -55,7 +58,13 @@ use alloy::{ providers::{ext::AnvilApi, ProviderBuilder, RootProvider}, sol_types::SolEvent, }; -use mp2_common::{eth::StorageSlot, proof::ProofWithVK, types::HashOutput}; +use mp2_common::{ + eth::{EventLogInfo, StorageSlot}, + poseidon::H, + proof::ProofWithVK, + types::HashOutput, + F, +}; /// Test slots for single values extraction pub(crate) const SINGLE_SLOTS: [u8; 4] = [0, 1, 2, 3]; @@ -603,7 +612,7 @@ impl TableIndexing { where T: ReceiptExtractionArgs, [(); ::NO_TOPICS]:, - [(); ::MAX_DATA]:, + [(); ::MAX_DATA_WORDS]:, { // Create a provider with the wallet for contract deployment and interaction. let provider = ProviderBuilder::new() @@ -669,7 +678,7 @@ impl TableIndexing { .into_iter() .map(|(name, identifier)| TableColumn { name, - identifier: identifier.to_canonical_u64(), + identifier, index: IndexType::None, multiplier: false, }) @@ -1003,6 +1012,79 @@ impl TableIndexing { } } +/// Function that computes the column identifiers for the non-indexed columns together with their names as [`String`]s. +pub fn compute_non_indexed_receipt_column_ids< + const NO_TOPICS: usize, + const MAX_DATA_WORDS: usize, +>( + event: &EventLogInfo, +) -> Vec<(String, ColumnID)> { + let gas_used_input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + GAS_USED_PREFIX, + ] + .concat() + .into_iter() + .map(F::from_canonical_u8) + .collect::>(); + let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; + + let topic_ids = event + .topics + .iter() + .enumerate() + .map(|(j, _)| { + let input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + TOPIC_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(F::from_canonical_u8) + .collect::>(); + ( + format!("{}_{}", TOPIC_NAME, j + 1), + H::hash_no_pad(&input).elements[0].to_canonical_u64(), + ) + }) + .collect::>(); + + let data_ids = event + .data + .iter() + .enumerate() + .map(|(j, _)| { + let input = [ + event.address.as_slice(), + event.event_signature.as_slice(), + DATA_PREFIX, + &[j as u8 + 1], + ] + .concat() + .into_iter() + .map(F::from_canonical_u8) + .collect::>(); + ( + format!("{}_{}", DATA_NAME, j + 1), + H::hash_no_pad(&input).elements[0].to_canonical_u64(), + ) + }) + .collect::>(); + + [ + vec![( + GAS_USED_NAME.to_string(), + gas_used_column_id.to_canonical_u64(), + )], + topic_ids, + data_ids, + ] + .concat() +} + /// Build the mapping table. async fn build_mapping_table( ctx: &TestContext, @@ -1192,7 +1274,9 @@ async fn build_mapping_of_mappings_table( #[derive(Debug, Clone, Copy)] pub struct ReceiptUpdate { pub event_type: (u8, u8), + /// The number of events to emit related to the event defined by `event_type` pub no_relevant: usize, + /// The number of other random events to emit. pub no_others: usize, } diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 7d6b53576..e6e1eeb2c 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -671,23 +671,23 @@ pub trait ReceiptExtractionArgs: Serialize + for<'de> Deserialize<'de> + Debug + Hash + Eq + PartialEq + Clone + Copy { const NO_TOPICS: usize; - const MAX_DATA: usize; + const MAX_DATA_WORDS: usize; fn new(address: Address, event_signature: &str) -> Self where Self: Sized; - fn get_event(&self) -> EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA }>; + fn get_event(&self) -> EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA_WORDS }>; fn get_index(&self) -> u64; fn to_table_rows( proof_infos: &[ReceiptProofInfo], - event: &EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA }>, + event: &EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA_WORDS }>, block: PrimaryIndex, ) -> Vec> where - [(); 7 - 2 - Self::NO_TOPICS - Self::MAX_DATA]:, + [(); 7 - 2 - Self::NO_TOPICS - Self::MAX_DATA_WORDS]:, { let metadata = TableMetadata::<7, 2>::from(*event); @@ -767,23 +767,23 @@ pub trait ReceiptExtractionArgs: } } -impl ReceiptExtractionArgs - for EventLogInfo +impl ReceiptExtractionArgs + for EventLogInfo { - const MAX_DATA: usize = MAX_DATA; + const MAX_DATA_WORDS: usize = MAX_DATA_WORDS; const NO_TOPICS: usize = NO_TOPICS; fn new(address: Address, event_signature: &str) -> Self where Self: Sized, { - EventLogInfo::::new(address, event_signature) + EventLogInfo::::new(address, event_signature) } - fn get_event(&self) -> EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA }> + fn get_event(&self) -> EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA_WORDS }> where [(); Self::NO_TOPICS]:, - [(); Self::MAX_DATA]:, + [(); Self::MAX_DATA_WORDS]:, { let topics: [usize; Self::NO_TOPICS] = self .topics @@ -791,13 +791,13 @@ impl ReceiptExtractionArgs .collect::>() .try_into() .unwrap(); - let data: [usize; Self::MAX_DATA] = self + let data: [usize; Self::MAX_DATA_WORDS] = self .data .into_iter() .collect::>() .try_into() .unwrap(); - EventLogInfo::<{ Self::NO_TOPICS }, { Self::MAX_DATA }> { + EventLogInfo::<{ Self::NO_TOPICS }, { Self::MAX_DATA_WORDS }> { size: self.size, address: self.address, add_rel_offset: self.add_rel_offset, @@ -830,10 +830,13 @@ impl ReceiptExtractionArgs impl TableSource for R where [(); ::NO_TOPICS]:, - [(); ::MAX_DATA]:, - [(); 7 - 2 - ::NO_TOPICS - ::MAX_DATA]:, + [(); ::MAX_DATA_WORDS]:, + [(); 7 + - 2 + - ::NO_TOPICS + - ::MAX_DATA_WORDS]:, { - type Metadata = EventLogInfo<{ R::NO_TOPICS }, { R::MAX_DATA }>; + type Metadata = EventLogInfo<{ R::NO_TOPICS }, { R::MAX_DATA_WORDS }>; fn can_query(&self) -> bool { false @@ -851,7 +854,7 @@ where let event = self.get_event(); async move { let contract_update = - ReceiptUpdate::new((R::NO_TOPICS as u8, R::MAX_DATA as u8), 5, 15); + ReceiptUpdate::new((R::NO_TOPICS as u8, R::MAX_DATA_WORDS as u8), 5, 15); let provider = ProviderBuilder::new() .with_recommended_fillers() @@ -867,7 +870,7 @@ where let block_number = ctx.block_number().await; let new_block_number = block_number as BlockPrimaryIndex; - let query = ReceiptQuery::<{ R::NO_TOPICS }, { R::MAX_DATA }> { + let query = ReceiptQuery::<{ R::NO_TOPICS }, { R::MAX_DATA_WORDS }> { contract: contract.address(), event, }; @@ -924,8 +927,11 @@ where let ChangeType::Receipt(relevant, others) = c else { panic!("Need ChangeType::Receipt, got: {:?}", c); }; - let contract_update = - ReceiptUpdate::new((R::NO_TOPICS as u8, R::MAX_DATA as u8), relevant, others); + let contract_update = ReceiptUpdate::new( + (R::NO_TOPICS as u8, R::MAX_DATA_WORDS as u8), + relevant, + others, + ); let provider = ProviderBuilder::new() .with_recommended_fillers() @@ -941,7 +947,7 @@ where let block_number = ctx.block_number().await; let new_block_number = block_number as BlockPrimaryIndex; - let query = ReceiptQuery::<{ R::NO_TOPICS }, { R::MAX_DATA }> { + let query = ReceiptQuery::<{ R::NO_TOPICS }, { R::MAX_DATA_WORDS }> { contract: contract.address(), event, }; diff --git a/mp2-v1/tests/common/mod.rs b/mp2-v1/tests/common/mod.rs index 7e53e0f50..7f7dd971f 100644 --- a/mp2-v1/tests/common/mod.rs +++ b/mp2-v1/tests/common/mod.rs @@ -83,64 +83,6 @@ pub struct TableInfo { impl TableInfo { pub fn metadata_hash(&self) -> MetadataHash { - // match &self.source { - // TableSource::Single(args) => { - // let slot = SlotInputs::Simple(args.slot_inputs.clone()); - // metadata_hash::( - // slot, - // &self.contract_address, - // self.chain_id, - // vec![], - // ) - // } - // TableSource::MappingValues(args, _) => { - // let slot_inputs = SlotInputs::Mapping(args.slot_inputs().to_vec()); - // metadata_hash::( - // slot_inputs, - // &self.contract_address, - // self.chain_id, - // vec![], - // ) - // } - // TableSource::MappingStruct(args, _) => { - // let slot_inputs = SlotInputs::Mapping(args.slot_inputs().to_vec()); - // metadata_hash::( - // slot_inputs, - // &self.contract_address, - // self.chain_id, - // vec![], - // ) - // } - // TableSource::MappingOfSingleValueMappings(args) => { - // let slot_inputs = SlotInputs::MappingOfMappings(args.slot_inputs().to_vec()); - // metadata_hash::( - // slot_inputs, - // &self.contract_address, - // self.chain_id, - // vec![], - // ) - // } - // TableSource::MappingOfStructMappings(args) => { - // let slot_inputs = SlotInputs::MappingOfMappings(args.slot_inputs().to_vec()); - // metadata_hash::( - // slot_inputs, - // &self.contract_address, - // self.chain_id, - // vec![], - // ) - // } - // TableSource::Merge(source) => { - // let single = SlotInputs::Simple(source.single.slot_inputs.clone()); - // let mapping = SlotInputs::Mapping(source.mapping.slot_inputs().to_vec()); - // merge_metadata_hash::( - // self.contract_address, - // self.chain_id, - // vec![], - // single, - // mapping, - // ) - // } - // } self.source .metadata_hash(self.contract_address, self.chain_id) } diff --git a/verifiable-db/Cargo.toml b/verifiable-db/Cargo.toml index 65d5d4e36..aa0a77052 100644 --- a/verifiable-db/Cargo.toml +++ b/verifiable-db/Cargo.toml @@ -28,6 +28,7 @@ futures.workspace = true rand.workspace = true serial_test.workspace = true tokio.workspace = true +mp2_v1 = { path = "../mp2-v1" } [features] original_poseidon = ["mp2_common/original_poseidon"] From 73f8212fc980bfaffbe3ce7fd693a9dcf0ff8718 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 29 Jan 2025 09:59:51 +0100 Subject: [PATCH 34/47] Error types for retries in mp2_common::eth --- Cargo.lock | 1 + Cargo.toml | 1 + mp2-common/src/array.rs | 10 - mp2-common/src/eth.rs | 300 +++++++++++---- mp2-common/src/mpt_sequential/key.rs | 62 +++- mp2-common/src/mpt_sequential/mod.rs | 2 +- mp2-v1/src/api.rs | 83 ++--- mp2-v1/src/lib.rs | 2 + mp2-v1/src/values_extraction/api.rs | 138 +++---- .../gadgets/metadata_gadget.rs | 344 +++++++++++------- mp2-v1/src/values_extraction/leaf_mapping.rs | 92 ++--- .../leaf_mapping_of_mappings.rs | 96 ++--- mp2-v1/src/values_extraction/leaf_receipt.rs | 161 +++----- mp2-v1/src/values_extraction/leaf_single.rs | 77 ++-- mp2-v1/src/values_extraction/mod.rs | 206 ++--------- mp2-v1/src/values_extraction/planner.rs | 213 +++++++---- mp2-v1/tests/common/cases/indexing.rs | 41 ++- mp2-v1/tests/common/cases/table_source.rs | 9 +- mp2-v1/tests/common/table.rs | 2 +- 19 files changed, 904 insertions(+), 936 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 424dee27f..ffa083363 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -88,6 +88,7 @@ dependencies = [ "alloy-contract", "alloy-core", "alloy-eips", + "alloy-json-rpc", "alloy-network", "alloy-node-bindings", "alloy-provider", diff --git a/Cargo.toml b/Cargo.toml index fab40aaec..13e11b5a1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,7 @@ alloy = { version = "0.6", default-features = false, features = [ "rlp", "rpc", "rpc-types", + "json-rpc", "signer-local", "sol-types", "transport-http", diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index 984fcc4a4..120235f62 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -693,16 +693,6 @@ where b: &mut CircuitBuilder, at: Target, ) -> Array { - let m = b.constant(F::from_canonical_usize(SUB_SIZE)); - let array_len = b.constant(F::from_canonical_usize(SIZE)); - let upper_bound = b.add(at, m); - let num_bits_size = SIZE.ilog2() + 1; - - let lt = less_than_or_equal_to_unsafe(b, upper_bound, array_len, num_bits_size as usize); - - let t = b._true(); - b.connect(t.target, lt.target); - Array:: { arr: core::array::from_fn(|i| { let i_target = b.constant(F::from_canonical_usize(i)); diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index ab78d7dbd..0fac6de3f 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -7,22 +7,28 @@ use alloy::{ primitives::{Address, Log, B256, U256}, providers::{Provider, RootProvider}, rlp::{Decodable, Encodable as AlloyEncodable}, - rpc::types::{ - Block, BlockTransactions, EIP1186AccountProofResponse, Filter, ReceiptEnvelope, Transaction, + rpc::{ + json_rpc::RpcError, + types::{ + Block, BlockTransactions, EIP1186AccountProofResponse, Filter, ReceiptEnvelope, + Transaction, TransactionReceipt, + }, }, transports::Transport, }; -use anyhow::{anyhow, bail, Context, Result}; -use eth_trie::{EthTrie, MemoryDB, Trie}; + +use eth_trie::{EthTrie, MemoryDB, Trie, TrieError}; use ethereum_types::H256; use itertools::Itertools; use log::{debug, warn}; -use rlp::{Encodable, Rlp}; +use rlp::{DecoderError, Encodable, Rlp}; use serde::{Deserialize, Serialize}; + use std::{ array::from_fn as create_array, collections::{BTreeSet, HashMap}, + fmt::{Debug, Display, Formatter}, sync::Arc, }; @@ -45,6 +51,65 @@ const MAX_RECEIPT_DATA_SIZE: usize = 32; /// The size of an event topic rlp encoded. const ENCODED_TOPIC_SIZE: usize = 33; +/// The number of bytes the transaction type takes up in a Receipts RLP encoding. +const TX_TYPE_BYTES: usize = 1; + +/// Error enum encompassing different errors that can arise in this module. +#[derive(Debug)] +pub enum MP2EthError { + /// Error occuring from a [`RpcError`], but not necessarily one we should retry. + RpcError(String), + /// An error that occurs when trying to fetch data from an RPC node, used so that we can know we should retry the call in this case. + FetchError, + /// An error that arises from a method within the [`rlp`] crate. + RlpError(DecoderError), + /// An error arising from rlp decoding methods in the [`alloy::rlp`] crate. + AlloyRlpError(alloy::rlp::Error), + /// An error arising from methods in the [`eth_trie`] crate. + TrieError(TrieError), + /// Any other error arising from the functions in this module. + InternalError(String), +} + +impl std::error::Error for MP2EthError {} + +impl Display for MP2EthError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + MP2EthError::RpcError(s) => write!(f,"Error returned when making an RPC call {{ inner: {:?} }}", s), + MP2EthError::FetchError => write!(f, "Error occured when trying to fetch data from an RPC node"), + MP2EthError::RlpError(e) => write!(f,"Error returned when performing Rlp encoding or decoding function {{ inner: {:?} }}", e), + MP2EthError::AlloyRlpError(e) => write!(f, "Error when decding to alloy type: {:?}", e), + MP2EthError::TrieError(e) => write!(f, "Error returned when construct or querying an MPT {{ inner: {:?} }}", e), + MP2EthError::InternalError(s) => write!(f, "Error occured in eth related code: {}", s) + } + } +} + +impl From> for MP2EthError { + fn from(value: RpcError) -> Self { + MP2EthError::RpcError(format!("{:?}", value)) + } +} + +impl From for MP2EthError { + fn from(value: DecoderError) -> Self { + MP2EthError::RlpError(value) + } +} + +impl From for MP2EthError { + fn from(value: TrieError) -> Self { + MP2EthError::TrieError(value) + } +} + +impl From for MP2EthError { + fn from(value: alloy::rlp::Error) -> Self { + MP2EthError::AlloyRlpError(value) + } +} + pub trait Rlpable { fn block_hash(&self) -> Vec { keccak256(&self.rlp()) @@ -84,7 +149,7 @@ pub enum NodeType { } /// Function that returns the [`NodeType`] of an RLP encoded MPT node -pub fn node_type(rlp_data: &[u8]) -> Result { +pub fn node_type(rlp_data: &[u8]) -> Result { let rlp = Rlp::new(rlp_data); let item_count = rlp.item_count()?; @@ -102,14 +167,14 @@ pub fn node_type(rlp_data: &[u8]) -> Result { match first_byte / 16 { 0 | 1 => Ok(NodeType::Extension), 2 | 3 => Ok(NodeType::Leaf), - _ => Err(anyhow!( - "Expected compact encoding beginning with 0,1,2 or 3" + _ => Err(MP2EthError::InternalError( + "Expected compact encoding beginning with 0,1,2 or 3".to_string(), )), } } else { - Err(anyhow!( + Err(MP2EthError::InternalError(format!( "RLP encoded Node item count was {item_count}, expected either 17 or 2" - )) + ))) } } @@ -150,22 +215,76 @@ pub fn left_pad(slice: &[u8]) -> [u8; N] { } } -/// Query the latest block. -pub async fn query_latest_block(provider: &RootProvider) -> Result { +/// Query a specific block. +pub async fn query_block( + provider: &RootProvider, + id: BlockNumberOrTag, +) -> Result { // Query the MPT proof with retries. - for i in 0..RETRY_NUM { - if let Ok(response) = provider - .get_block_by_number(BlockNumberOrTag::Latest, true.into()) - .await - { + for i in 0..RETRY_NUM - 1 { + if let Ok(response) = provider.get_block_by_number(id, true.into()).await { // Has one block at least. - return Ok(response.unwrap()); + return response.ok_or(MP2EthError::RpcError( + "Call to get block successful but returned None".to_string(), + )); } else { warn!("Failed to query the block - {i} time") } } - bail!("Failed to query the block "); + // For the final attempt we return the error + let resp = provider.get_block_by_number(id, true.into()).await; + + match resp { + Ok(option) => match option { + Some(block) => Ok(block), + None => Err(MP2EthError::RpcError( + "Get block by number call did not error but returned a None value".to_string(), + )), + }, + Err(_) => { + warn!("Failed to query the block - {} time", RETRY_NUM - 1); + Err(MP2EthError::FetchError) + } + } +} + +/// Query a specific block for its receipts. +pub async fn query_block_receipts( + provider: &RootProvider, + id: BlockNumberOrTag, +) -> Result, MP2EthError> { + // Query the MPT proof with retries. + for i in 0..RETRY_NUM - 1 { + if let Ok(response) = provider.get_block_receipts(id.into()).await { + // Has one block at least. + return response.ok_or(MP2EthError::InternalError( + "Call to get block receipts successful but returned None".to_string(), + )); + } else { + warn!("Failed to query the block receipts - {i} time") + } + } + + // For the final attempt we return the error + let resp = provider.get_block_receipts(id.into()).await; + + match resp { + Ok(option) => match option { + Some(block) => Ok(block), + None => Err(MP2EthError::RpcError( + "Get Receipts by block number call did not error but returned a None value" + .to_string(), + )), + }, + Err(_) => { + warn!( + "Failed to query the block receipts - {} time", + RETRY_NUM - 1 + ); + Err(MP2EthError::FetchError) + } + } } pub struct ProofQuery { @@ -271,7 +390,7 @@ impl EventLogInfo Result { + pub fn get_log_offset(&self, node: &[u8]) -> Result { let node_rlp = rlp::Rlp::new(node); // The actual receipt data is item 1 in the list @@ -281,7 +400,7 @@ impl EventLogInfo EventLogInfo) -> Result { + pub fn new_mapping(parent: StorageSlot, mapping_key: Vec) -> Result { let parent = Box::new(parent); if !matches!( *parent, StorageSlot::Mapping(_, _) | StorageSlot::Node(Self::Mapping(_, _)) ) { - bail!("The parent of a Slot mapping entry must be type of mapping"); + return Err(MP2EthError::InternalError( + "The parent of a Slot mapping entry must be type of mapping".to_string(), + )); } Ok(Self::Mapping(parent, mapping_key)) @@ -480,10 +601,10 @@ impl ProofQuery { &self, provider: &RootProvider, block: BlockNumberOrTag, - ) -> Result { + ) -> Result { // Query the MPT proof with retries. - for i in 0..RETRY_NUM { - let location = self.slot.location(); + let location = self.slot.location(); + for i in 0..RETRY_NUM - 1 { debug!( "Querying MPT proof:\n\tslot = {:?}, location = {:?}", self.slot, @@ -499,11 +620,20 @@ impl ProofQuery { } } - bail!("Failed to query the MPT proof {RETRY_NUM} in total"); + match provider + .get_proof(self.contract, vec![location]) + .block_id(block.into()) + .await + { + Ok(response) => Ok(response), + Err(_) => Err(MP2EthError::FetchError), + } } /// Returns the raw value from the storage proof, not the one "interpreted" by the /// JSON RPC so we can see how the encoding is done. - pub fn verify_storage_proof(proof: &EIP1186AccountProofResponse) -> Result> { + pub fn verify_storage_proof( + proof: &EIP1186AccountProofResponse, + ) -> Result, MP2EthError> { let memdb = Arc::new(MemoryDB::new(true)); let tx_trie = EthTrie::new(Arc::clone(&memdb)); let proof_key_bytes = proof.storage_proof[0].key.as_b256(); @@ -517,18 +647,17 @@ impl ProofQuery { .iter() .map(|b| b.to_vec()) .collect(), - ); - // key must be valid, proof must be valid and value must exist - if is_valid.is_err() { - bail!("proof is not valid"); - } - if let Some(ext_value) = is_valid.unwrap() { + )?; + + if let Some(ext_value) = is_valid { Ok(ext_value) } else { - bail!("proof says the value associated with that key does not exist"); + Err(MP2EthError::InternalError( + "proof says the value associated with that key does not exist".to_string(), + )) } } - pub fn verify_state_proof(&self, res: &EIP1186AccountProofResponse) -> Result<()> { + pub fn verify_state_proof(&self, res: &EIP1186AccountProofResponse) -> Result<(), MP2EthError> { let memdb = Arc::new(MemoryDB::new(true)); let tx_trie = EthTrie::new(Arc::clone(&memdb)); @@ -542,26 +671,29 @@ impl ProofQuery { state_root_hash, &mpt_key, res.account_proof.iter().map(|b| b.to_vec()).collect(), - ); + )?; - if is_valid.is_err() { - bail!("Account proof is invalid"); - } - if is_valid.unwrap().is_none() { - bail!("Account proof says the value associated with that key does not exist"); + if is_valid.is_none() { + return Err(MP2EthError::InternalError( + "Account proof says the value associated with that key does not exist".to_string(), + )); } // The length of acount node must be 104 bytes (8 + 32 + 32 + 32) as: // [nonce (U64), balance (U256), storage_hash (H256), code_hash (H256)] - let account_node = res.account_proof.last().unwrap(); - assert_eq!(account_node.len(), 104); - - Ok(()) + let account_node = res.account_proof.last().ok_or(MP2EthError::InternalError( + "Account proof response was empty".to_string(), + ))?; + if account_node.len() != 104 { + Err(MP2EthError::InternalError(format!("The length of acount node must be 104 bytes (8 + 32 + 32 + 32), retrieved node length: {}", account_node.len()))) + } else { + Ok(()) + } } } impl ReceiptProofInfo { - pub fn to_receipt(&self) -> Result { + pub fn to_receipt(&self) -> Result { let memdb = Arc::new(MemoryDB::new(true)); let tx_trie = EthTrie::new(Arc::clone(&memdb)); @@ -569,11 +701,12 @@ impl ReceiptProofInfo { let valid = tx_trie .verify_proof(self.mpt_root, &mpt_key, self.mpt_proof.clone())? - .ok_or(anyhow!("No proof found when verifying"))?; + .ok_or(MP2EthError::InternalError( + "No proof found when verifying".to_string(), + ))?; let rlp_receipt = rlp::Rlp::new(&valid[1..]); - ReceiptWithBloom::decode(&mut rlp_receipt.as_raw()) - .map_err(|e| anyhow!("Could not decode receipt got: {}", e)) + ReceiptWithBloom::decode(&mut rlp_receipt.as_raw()).map_err(|e| e.into()) } } @@ -592,7 +725,7 @@ impl ReceiptQuery, block: BlockNumberOrTag, - ) -> Result> { + ) -> Result, MP2EthError> { // Retrieve the transaction indices for the relevant logs let tx_indices = self.retrieve_tx_indices(provider, block).await?; @@ -606,24 +739,40 @@ impl ReceiptQuery, block: BlockNumberOrTag, - ) -> Result> { + ) -> Result, MP2EthError> { let filter = Filter::new() .select(block) .address(self.contract) .event_signature(B256::from(self.event.event_signature)); - let logs = provider.get_logs(&filter).await?; - - // For each of the logs return the transacion its included in, then sort and remove duplicates. - Ok(BTreeSet::from_iter( - logs.iter().map_while(|log| log.transaction_index), - )) + for i in 0..RETRY_NUM - 1 { + debug!( + "Querying Receipt logs:\n\tevent signature = {:?}", + self.event.event_signature, + ); + match provider.get_logs(&filter).await { + // For each of the logs return the transacion its included in, then sort and remove duplicates. + Ok(response) => { + return Ok(BTreeSet::from_iter( + response.iter().map_while(|log| log.transaction_index), + )) + } + Err(e) => println!("Failed to query the Receipt logs at {i} time: {e:?}"), + } + } + match provider.get_logs(&filter).await { + // For each of the logs return the transacion its included in, then sort and remove duplicates. + Ok(response) => Ok(BTreeSet::from_iter( + response.iter().map_while(|log| log.transaction_index), + )), + Err(_) => Err(MP2EthError::FetchError), + } } /// Function that takes a list of transaction indices in the form of a [`BTreeSet`] and a [`BlockUtil`] and returns a list of [`ReceiptProofInfo`] pub fn extract_info( tx_indices: &BTreeSet, block_util: &mut BlockUtil, - ) -> Result> { + ) -> Result, MP2EthError> { let mpt_root = block_util.receipts_trie.root_hash()?; let proofs = tx_indices .iter() @@ -638,7 +787,7 @@ impl ReceiptQuery, eth_trie::TrieError>>()?; + .collect::, MP2EthError>>()?; Ok(proofs) } @@ -692,17 +841,14 @@ impl BlockUtil { pub async fn fetch( t: &RootProvider, id: BlockNumberOrTag, - ) -> Result { - let block = t - .get_block(id.into(), alloy::rpc::types::BlockTransactionsKind::Full) - .await? - .context("can't get block")?; - let receipts = t - .get_block_receipts(id.into()) - .await? - .context("can't get receipts")?; + ) -> Result { + let block = query_block(t, id).await?; + + let receipts = query_block_receipts(t, id).await?; let BlockTransactions::Full(all_tx) = block.transactions() else { - bail!("can't see full transactions"); + return Err(MP2EthError::InternalError( + "Could not recover full transactions from Block".to_string(), + )); }; // check receipt root let all_tx_map = HashMap::::from_iter( @@ -753,11 +899,11 @@ impl BlockUtil { }) } - // recompute the receipts trie by first converting all receipts form RPC type to consensus type - // since in Alloy these are two different types and RLP functions are only implemented for - // consensus ones. + /// recompute the receipts trie by first converting all receipts form RPC type to consensus type + /// since in Alloy these are two different types and RLP functions are only implemented for + /// consensus ones. #[cfg(test)] - fn check(&mut self) -> Result<()> { + fn check(&mut self) -> Result<(), MP2EthError> { let computed = self.receipts_trie.root_hash()?; let tx_computed = self.transactions_trie.root_hash()?; let expected = self.block.header.receipts_root; @@ -801,7 +947,7 @@ mod test { providers::{Provider, ProviderBuilder}, rlp::Decodable, }; - + use anyhow::{anyhow, Context, Result}; use eth_trie::Nibbles; use ethereum_types::U64; use ethers::{ diff --git a/mp2-common/src/mpt_sequential/key.rs b/mp2-common/src/mpt_sequential/key.rs index 45424c623..1426ce548 100644 --- a/mp2-common/src/mpt_sequential/key.rs +++ b/mp2-common/src/mpt_sequential/key.rs @@ -1,6 +1,11 @@ //! MPT key gadget -use crate::{array::Array, keccak::PACKED_HASH_LEN, rlp::MAX_KEY_NIBBLE_LEN}; +use crate::{ + array::Array, + keccak::PACKED_HASH_LEN, + rlp::MAX_KEY_NIBBLE_LEN, + utils::{less_than, less_than_or_equal_to_unsafe}, +}; use core::array::from_fn as create_array; use eth_trie::Nibbles; use plonky2::{ @@ -159,4 +164,59 @@ impl MPTKeyWireGeneric { pointer: b.constant(F::from_canonical_usize(KEY_LENGTH - 1)), } } + + /// This function folds the MPT Key down into a single value, it is used in receipts to recover the transaction index. + pub fn fold_key, const D: usize>( + &self, + b: &mut CircuitBuilder, + ) -> Target { + let t = b._true(); + let zero = b.zero(); + let one = b.one(); + + // First we check that the pointer is at most 15, other wise the result will not fit in a Target + // (without overflow) + let sixteen = b.constant(F::from_canonical_u8(16)); + let check = less_than(b, self.pointer, sixteen, 5); + b.connect(check.target, t.target); + + // We have to check if the first two nibbles sum to precisely 128, we should + // always have at least two nibbles otherwise the key was empty. + let first_nibbles = &self.key.arr[..2]; + let tmp = b.mul(first_nibbles[0], sixteen); + let tmp = b.add(tmp, first_nibbles[1]); + + let one_two_eight = b.constant(F::from_canonical_u8(128)); + + let first_byte_128 = b.is_equal(one_two_eight, tmp); + + // If the pointer is 1 then we should make sure we return zero as the value + let pointer_is_one = b.is_equal(self.pointer, one); + let byte_selector = b.and(pointer_is_one, first_byte_128); + + let initial = b.select(byte_selector, zero, tmp); + + let combiner = b.constant(F::from_canonical_u32(1u32 << 8)); + // We fold over the remaining nibbles of the key + self.key + .arr + .chunks(2) + .enumerate() + .skip(1) + .fold(initial, |acc, (i, chunk)| { + // First we multiply the accumulator by 2^8, then recreate the current byte by multiplying the large_nibble by 16 and adding the current small_nibble + let tmp = b.mul(chunk[0], sixteen); + let tmp = b.add(tmp, chunk[1]); + + let tmp_acc = b.mul(acc, combiner); + let tmp = b.add(tmp_acc, tmp); + + // Convert the index to a target + let index = b.constant(F::from_canonical_usize(2 * i)); + + // If the index is lees than the pointer we return tmp, otherwise we return acc. + let selector = less_than_or_equal_to_unsafe(b, index, self.pointer, 8); + b.select(selector, tmp, acc) + }) + } } diff --git a/mp2-common/src/mpt_sequential/mod.rs b/mp2-common/src/mpt_sequential/mod.rs index 4ebaa4e81..4f68c7c3c 100644 --- a/mp2-common/src/mpt_sequential/mod.rs +++ b/mp2-common/src/mpt_sequential/mod.rs @@ -362,7 +362,7 @@ where /// And it returns: /// * New key with the pointer moved. /// * The child hash / value of the node. -/// * A boolean that must be true if the given node is a leaf or an extension. +/// * A boolean that must be true if the given node is a branch. /// * The nibble position before this advance. pub fn advance_key_branch< F: RichField + Extendable, diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 09254151e..201ce0ecb 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -11,9 +11,12 @@ use crate::{ }, values_extraction::{ self, compute_id_with_prefix, - gadgets::column_info::{ExtractedColumnInfo, InputColumnInfo}, - identifier_block_column, identifier_for_value_column, ColumnMetadata, INNER_KEY_ID_PREFIX, - KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, + gadgets::{ + column_info::{ExtractedColumnInfo, InputColumnInfo}, + metadata_gadget::TableMetadata, + }, + identifier_block_column, identifier_for_value_column, INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, + OUTER_KEY_ID_PREFIX, }, MAX_RECEIPT_LEAF_NODE_LEN, }; @@ -46,31 +49,26 @@ pub struct InputNode { /// We use `512` in as the `NODE_LEN` in [`values_extraction::CircuitInput`] to represent /// the maximum length of a Receipt Trie leaf node. The Storage trie leaf node size is now hard coded into /// the circuits. -type ValuesExtractionInput = - values_extraction::CircuitInput<512, MAX_COLUMNS>; +type ValuesExtractionInput = + values_extraction::CircuitInput<512, MAX_EXTRACTED_COLUMNS>; /// We use `512` in as the `NODE_LEN` in [`values_extraction::PublicParameters`] to represent /// the maximum length of a Receipt Trie leaf node. The Storage trie leaf node size is now hard coded into /// the circuits. -type ValuesExtractionParameters = - values_extraction::PublicParameters<512, MAX_COLUMNS>; +type ValuesExtractionParameters = + values_extraction::PublicParameters<512, MAX_EXTRACTED_COLUMNS>; fn sanity_check() { assert_eq!(MAX_RECEIPT_LEAF_NODE_LEN, 512); } /// Set of inputs necessary to generate proofs for each circuit employed in the /// pre-processing stage of LPN -pub enum CircuitInput -where - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, -{ +pub enum CircuitInput { /// Contract extraction input ContractExtraction(contract_extraction::CircuitInput), /// Length extraction input LengthExtraction(LengthCircuitInput), /// Values extraction input - ValuesExtraction(ValuesExtractionInput), + ValuesExtraction(ValuesExtractionInput), /// Block extraction necessary input BlockExtraction(block_extraction::CircuitInput), /// Final extraction input @@ -87,26 +85,16 @@ where #[derive(Serialize, Deserialize)] /// Parameters defining all the circuits employed for the pre-processing stage of LPN -pub struct PublicParameters -where - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, -{ +pub struct PublicParameters { contract_extraction: contract_extraction::PublicParameters, length_extraction: length_extraction::PublicParameters, - values_extraction: ValuesExtractionParameters, + values_extraction: ValuesExtractionParameters, block_extraction: block_extraction::PublicParameters, final_extraction: final_extraction::PublicParameters, tree_creation: verifiable_db::api::PublicParameters>, } -impl PublicParameters -where - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, -{ +impl PublicParameters { pub fn get_params_info(&self) -> Result> { self.tree_creation.get_params_info() } @@ -115,19 +103,18 @@ where pub fn empty_cell_tree_proof(&self) -> Result> { self.tree_creation.empty_cell_tree_proof() } - pub fn get_value_extraction_params(&self) -> &ValuesExtractionParameters { + + pub fn get_value_extraction_params( + &self, + ) -> &ValuesExtractionParameters { &self.values_extraction } } /// Instantiate the circuits employed for the pre-processing stage of LPN, /// returning their corresponding parameters -pub fn build_circuits_params() -> PublicParameters -where - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, -{ +pub fn build_circuits_params( +) -> PublicParameters { sanity_check(); log::info!("Building contract_extraction parameters..."); let contract_extraction = contract_extraction::build_circuits_params(); @@ -161,15 +148,10 @@ where /// Generate a proof for a circuit in the set of circuits employed in the /// pre-processing stage of LPN, employing `CircuitInput` to specify for which /// circuit the proof should be generated -pub fn generate_proof( - params: &PublicParameters, - input: CircuitInput, -) -> Result> -where - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, -{ +pub fn generate_proof( + params: &PublicParameters, + input: CircuitInput, +) -> Result> { match input { CircuitInput::ContractExtraction(input) => { contract_extraction::generate_proof(¶ms.contract_extraction, input) @@ -253,7 +235,7 @@ impl SlotInputs { contract_address: &Address, chain_id: u64, extra: Vec, - ) -> ColumnMetadata { + ) -> TableMetadata { let (slot, extracted_columns) = match self { SlotInputs::Simple(ref inner) | SlotInputs::Mapping(ref inner) @@ -306,7 +288,7 @@ impl SlotInputs { _ => vec![], }; - ColumnMetadata::new(input_columns, extracted_columns) + TableMetadata::new(&input_columns, &extracted_columns) } } @@ -463,18 +445,17 @@ pub fn metadata_hash( extra: Vec, ) -> MetadataHash { // closure to compute the metadata digest associated to a mapping variable - let (value_digest, length_digest) = - value_metadata(slot_input, contract_address, chain_id, extra); + let (md_digest, length_digest) = value_metadata(slot_input, contract_address, chain_id, extra); // Correspond to the computation of final extraction base circuit. - let value_digest = map_to_curve_point(&value_digest.to_fields()); + let md_digest = map_to_curve_point(&md_digest.to_fields()); // add contract digest let contract_digest = contract_metadata_digest(contract_address); debug!( "METADATA_HASH ->\n\tvalues_ext_md = {:?}\n\tcontract_md = {:?}\n\tfinal_ex_md(contract + values_ex) = {:?}", - value_digest.to_weierstrass(), + md_digest.to_weierstrass(), contract_digest.to_weierstrass(), - (contract_digest + value_digest).to_weierstrass(), + (contract_digest + md_digest).to_weierstrass(), ); // compute final hash - combine_digest_and_block(contract_digest + value_digest + length_digest) + combine_digest_and_block(contract_digest + md_digest + length_digest) } diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index 3c64b501c..4c70520c4 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -30,6 +30,8 @@ pub const L32_LEAF_VALUE_LEN: usize = L32(MAX_LEAF_VALUE_LEN); /// The maximum size of receipt leaf that we accept in the code, any larger causes additiona keccak hashing to occur resulting in /// different circuits. pub const MAX_RECEIPT_LEAF_NODE_LEN: usize = 512; +/// This is the maxoimum number fo columns that are extracted from a log in a receipt, it corresponds to three topics and two EVM words of additional data +pub const MAX_RECEIPT_COLUMNS: usize = 5; pub mod api; pub mod block_extraction; diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 2ed9b555d..d2607e165 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -1,5 +1,5 @@ //! Values extraction APIs -#![allow(clippy::identity_op)] + use super::{ branch::{BranchCircuit, BranchWires}, extension::{ExtensionNodeCircuit, ExtensionNodeWires}, @@ -14,7 +14,7 @@ use super::{ public_inputs::PublicInputs, INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }; -use crate::{api::InputNode, MAX_BRANCH_NODE_LEN}; +use crate::{api::InputNode, MAX_BRANCH_NODE_LEN, MAX_RECEIPT_COLUMNS}; use anyhow::{bail, ensure, Result}; use log::debug; use mp2_common::{ @@ -46,27 +46,22 @@ const NUM_IO: usize = PublicInputs::::TOTAL_LEN; /// CircuitInput is a wrapper around the different specialized circuits that can /// be used to prove a MPT node recursively. #[derive(Serialize, Deserialize)] -pub enum CircuitInput +pub enum CircuitInput where [(); PAD_LEN(LEAF_LEN)]:, - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, { - LeafSingle(LeafSingleCircuit), - LeafMapping(LeafMappingCircuit), - LeafMappingOfMappings(LeafMappingOfMappingsCircuit), - LeafReceipt(ReceiptLeafCircuit), + LeafSingle(LeafSingleCircuit), + LeafMapping(LeafMappingCircuit), + LeafMappingOfMappings(LeafMappingOfMappingsCircuit), + LeafReceipt(ReceiptLeafCircuit), Extension(ExtensionInput), Branch(BranchInput), } -impl CircuitInput +impl + CircuitInput where [(); PAD_LEN(LEAF_LEN)]:, - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, { /// Create a circuit input for proving a leaf MPT node of single variable. pub fn new_single_variable_leaf( @@ -75,7 +70,7 @@ where evm_word: u32, table_info: Vec, ) -> Self { - let metadata = TableMetadata::::new(&[], &table_info); + let metadata = TableMetadata::new(&[], &table_info); let slot = SimpleSlot::new(slot); @@ -98,7 +93,7 @@ where ) -> Self { let input_column = InputColumnInfo::new(&[slot], key_id, KEY_ID_PREFIX, 32); - let metadata = TableMetadata::::new(&[input_column], &table_info); + let metadata = TableMetadata::new(&[input_column], &table_info); let slot = MappingSlot::new(slot, mapping_key); @@ -128,10 +123,7 @@ where let inner_input_column = InputColumnInfo::new(&[slot], inner_key_id, INNER_KEY_ID_PREFIX, 32); - let metadata = TableMetadata::::new( - &[outer_input_column, inner_input_column], - &table_info, - ); + let metadata = TableMetadata::new(&[outer_input_column, inner_input_column], &table_info); let slot = MappingSlot::new(slot, outer_key); @@ -146,16 +138,13 @@ where /// Create a circuit input for proving a leaf MPT node of a transaction receipt. pub fn new_receipt_leaf( - last_node: &[u8], + node: &[u8], tx_index: u64, event: &EventLogInfo, - ) -> Self - where - [(); 7 - 2 - NO_TOPICS - MAX_DATA_WORDS]:, - { + ) -> Self { CircuitInput::LeafReceipt( - ReceiptLeafCircuit::::new::( - last_node, tx_index, event, + ReceiptLeafCircuit::::new::( + node, tx_index, event, ) .expect("Could not construct Receipt Leaf Circuit"), ) @@ -183,18 +172,16 @@ where /// Most notably, it holds them in a way to use the recursion framework allowing /// us to specialize circuits according to the situation. #[derive(Eq, PartialEq, Serialize, Deserialize)] -pub struct PublicParameters +pub struct PublicParameters where [(); PAD_LEN(LEAF_LEN)]:, - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, { - leaf_single: CircuitWithUniversalVerifier>, - leaf_mapping: CircuitWithUniversalVerifier>, + leaf_single: CircuitWithUniversalVerifier>, + leaf_mapping: CircuitWithUniversalVerifier>, leaf_mapping_of_mappings: - CircuitWithUniversalVerifier>, - leaf_receipt: CircuitWithUniversalVerifier>, + CircuitWithUniversalVerifier>, + leaf_receipt: + CircuitWithUniversalVerifier>, extension: CircuitWithUniversalVerifier, #[cfg(not(test))] branches: BranchCircuits, @@ -208,13 +195,10 @@ where /// Public API employed to build the MPT circuits, which are returned in /// serialized form. -pub fn build_circuits_params( -) -> PublicParameters +pub fn build_circuits_params( +) -> PublicParameters where [(); PAD_LEN(LEAF_LEN)]:, - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, { PublicParameters::build() } @@ -222,15 +206,12 @@ where /// Public API employed to generate a proof for the circuit specified by /// `CircuitInput`, employing the `circuit_params` generated with the /// `build_circuits_params` API. -pub fn generate_proof( - circuit_params: &PublicParameters, - circuit_type: CircuitInput, +pub fn generate_proof( + circuit_params: &PublicParameters, + circuit_type: CircuitInput, ) -> Result> where [(); PAD_LEN(LEAF_LEN)]:, - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, { circuit_params.generate_proof(circuit_type)?.serialize() } @@ -386,13 +367,11 @@ impl_branch_circuits!(TestBranchCircuits, 1, 4, 9); /// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping + 1 leaf mapping of mappings + 1 leaf receipt const MAPPING_CIRCUIT_SET_SIZE: usize = 8; -impl PublicParameters +impl + PublicParameters where [(); PAD_LEN(LEAF_LEN)]:, [(); >::HASH_SIZE]:, - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, { /// Generates the circuit parameters for the MPT circuits. fn build() -> Self { @@ -409,17 +388,20 @@ where ); debug!("Building leaf single circuit"); - let leaf_single = circuit_builder.build_circuit::>(()); + let leaf_single = + circuit_builder.build_circuit::>(()); debug!("Building leaf mapping circuit"); - let leaf_mapping = circuit_builder.build_circuit::>(()); + let leaf_mapping = + circuit_builder.build_circuit::>(()); debug!("Building leaf mapping of mappings circuit"); - let leaf_mapping_of_mappings = - circuit_builder.build_circuit::>(()); + let leaf_mapping_of_mappings = circuit_builder + .build_circuit::>(()); debug!("Building leaf receipt circuit"); - let leaf_receipt = circuit_builder.build_circuit::>(()); + let leaf_receipt = circuit_builder + .build_circuit::>(()); debug!("Building extension circuit"); let extension = circuit_builder.build_circuit::(()); @@ -456,7 +438,7 @@ where fn generate_proof( &self, - circuit_type: CircuitInput, + circuit_type: CircuitInput, ) -> Result { let set = &self.get_circuit_set(); match circuit_type { @@ -514,7 +496,9 @@ mod tests { super::{public_inputs, StorageSlotInfo}, *, }; - use crate::{tests::TEST_MAX_COLUMNS, MAX_RECEIPT_LEAF_NODE_LEN}; + use crate::{ + tests::TEST_MAX_COLUMNS, values_extraction::storage_value_digest, MAX_RECEIPT_LEAF_NODE_LEN, + }; use alloy::primitives::Address; use eth_trie::{EthTrie, MemoryDB, Trie}; use itertools::Itertools; @@ -1041,7 +1025,7 @@ mod tests { let value: [u8; 32] = leaf_tuple[1][1..].to_vec().try_into().unwrap(); let evm_word = test_slot.evm_word(); - let location_offset = F::from_canonical_u32(evm_word); + let table_info = test_slot.table_info(); // Build the identifier extra data, it's used to compute the key IDs. @@ -1057,12 +1041,7 @@ mod tests { // Simple variable slot StorageSlot::Simple(slot) => { let metadata_digest = metadata.digest(); - let values_digest = metadata.storage_values_digest( - &[], - value.as_slice(), - &[*slot as u8], - location_offset, - ); + let values_digest = storage_value_digest(&metadata, &[], &value, evm_word); let circuit_input = CircuitInput::new_single_variable_leaf( node, @@ -1077,12 +1056,8 @@ mod tests { StorageSlot::Mapping(mapping_key, slot) => { let padded_key = left_pad32(mapping_key); let metadata_digest = metadata.digest(); - let values_digest = metadata.storage_values_digest( - &[&padded_key], - value.as_slice(), - &[*slot as u8], - location_offset, - ); + let values_digest = + storage_value_digest(&metadata, &[&padded_key], &value, evm_word); let outer_key_id = metadata.input_columns()[0].identifier().0; @@ -1101,12 +1076,7 @@ mod tests { // Simple Struct StorageSlot::Simple(slot) => { let metadata_digest = metadata.digest(); - let values_digest = metadata.storage_values_digest( - &[], - value.as_slice(), - &[slot as u8], - location_offset, - ); + let values_digest = storage_value_digest(&metadata, &[], &value, evm_word); let circuit_input = CircuitInput::new_single_variable_leaf( node, @@ -1121,12 +1091,8 @@ mod tests { StorageSlot::Mapping(mapping_key, slot) => { let padded_key = left_pad32(&mapping_key); let metadata_digest = metadata.digest(); - let values_digest = metadata.storage_values_digest( - &[&padded_key], - value.as_slice(), - &[slot as u8], - location_offset, - ); + let values_digest = + storage_value_digest(&metadata, &[&padded_key], &value, evm_word); let outer_key_id = metadata.input_columns()[0].identifier().0; @@ -1148,11 +1114,11 @@ mod tests { let padded_outer_key = left_pad32(&outer_mapping_key); let padded_inner_key = left_pad32(&inner_mapping_key); let metadata_digest = metadata.digest(); - let values_digest = metadata.storage_values_digest( + let values_digest = storage_value_digest( + &metadata, &[&padded_outer_key, &padded_inner_key], - value.as_slice(), - &[slot as u8], - location_offset, + &value, + evm_word, ); let key_ids = metadata diff --git a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs index fed1f8494..a952bb84c 100644 --- a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs +++ b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs @@ -10,92 +10,81 @@ use super::column_info::{ use itertools::Itertools; use mp2_common::{ array::{Array, Targetable}, - eth::EventLogInfo, + eth::{left_pad32, EventLogInfo}, group_hashing::CircuitBuilderGroupHashing, - poseidon::H, - serialization::{deserialize_long_array, serialize_long_array}, + poseidon::{empty_poseidon_hash, hash_to_int_value, H}, + serialization::{ + deserialize_array, deserialize_long_array, serialize_array, serialize_long_array, + }, types::{CBuilder, HashOutput}, - u256::{CircuitBuilderU256, UInt256Target}, - utils::{Endianness, Packer}, + utils::{Endianness, Packer, ToFields}, F, }; use plonky2::{ field::types::{Field, PrimeField64}, + hash::hash_types::HashOut, iop::{ - target::Target, + target::{BoolTarget, Target}, witness::{PartialWitness, WitnessWrite}, }, plonk::config::Hasher, }; use plonky2_crypto::u32::arithmetic_u32::U32Target; use plonky2_ecgfp5::{ - curve::curve::Point, + curve::{curve::Point, scalar_field::Scalar}, gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, }; use rand::{thread_rng, Rng}; use serde::{Deserialize, Serialize}; -use std::{array, iter::once}; +use std::{array, borrow::Borrow, iter::once}; #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct TableMetadata -where - [(); MAX_COLUMNS - INPUT_COLUMNS]:, -{ +/// This struct stores the [`InputColumnInfo`] and [`ExtractedColumnInfo`] for an object that we wish to index. +/// `input_columns` are columns whose values must be provided to an extraction circuit as witness directly, for instance mapping keys for storage variables +/// or the transaction index for receipts. There will be fixed amount of them per object type that we are indexing so we can safely store them as a vec. +/// `extracted_columns` are columns whose values are stored in the value part of an MPT node. +/// `num_actual_columns` is the number of columns that aren't dummy columns. We need this since a circuit has to always have the same number of columns but not every table will need all of them. +/// +/// We use this struct so we can store all information about the columns of a table easily and use it to calculate value and metadata digests. +pub struct TableMetadata { /// Columns that aren't extracted from the node, like the mapping keys - #[serde( - serialize_with = "serialize_long_array", - deserialize_with = "deserialize_long_array" - )] - pub(crate) input_columns: [InputColumnInfo; INPUT_COLUMNS], + pub(crate) input_columns: Vec, /// The extracted column info - #[serde( - serialize_with = "serialize_long_array", - deserialize_with = "deserialize_long_array" - )] - pub(crate) extracted_columns: [ExtractedColumnInfo; MAX_COLUMNS - INPUT_COLUMNS], + pub(crate) extracted_columns: Vec, /// Actual column number pub(crate) num_actual_columns: usize, } -impl TableMetadata -where - [(); MAX_COLUMNS - INPUT_COLUMNS]:, -{ - /// Create a new instance of [`TableColumns`] from a slice of [`ColumnInfo`] we assume that the columns are sorted into a predetermined order. +impl TableMetadata { + /// Create a new instance of [`TableMetadata`] from a slice of [`InputColumnInfo`] and a slice of [`ExtractedColumnInfo`] we assume that the columns are sorted into a predetermined order. pub fn new( - input_columns: &[InputColumnInfo; INPUT_COLUMNS], + input_columns: &[InputColumnInfo], extracted_columns: &[ExtractedColumnInfo], - ) -> TableMetadata { - let num_actual_columns = extracted_columns.len() + INPUT_COLUMNS; - // Check that we don't have too many columns - assert!(num_actual_columns <= MAX_COLUMNS); - - let mut table_info = [ExtractedColumnInfo::default(); { MAX_COLUMNS - INPUT_COLUMNS }]; - table_info - .iter_mut() - .zip(extracted_columns) - .for_each(|(ti, &column)| *ti = column); - - TableMetadata:: { - input_columns: input_columns.clone(), - extracted_columns: table_info, + ) -> TableMetadata { + let num_actual_columns = extracted_columns.len() + input_columns.len(); + + TableMetadata { + input_columns: input_columns.to_vec(), + extracted_columns: extracted_columns.to_vec(), num_actual_columns, } } /// Create a sample MPT metadata. It could be used in testing. - pub fn sample( + pub fn sample( flag: bool, - input_prefixes: &[&[u8]; INPUT_COLUMNS], + input_prefixes: &[&[u8]], extraction_identifier: &[u8], location_offset: F, ) -> Self { let rng = &mut thread_rng(); let input_columns = input_prefixes - .map(|prefix| InputColumnInfo::new(extraction_identifier, rng.gen(), prefix, 32)); + .iter() + .map(|prefix| InputColumnInfo::new(extraction_identifier, rng.gen(), prefix, 32)) + .collect::>(); - let num_actual_columns = rng.gen_range(1..=MAX_COLUMNS - INPUT_COLUMNS); + let num_actual_columns = rng.gen_range(1..=NUM_EXTRACTED_COLUMNS); let mut extraction_vec = extraction_identifier.pack(Endianness::Little); extraction_vec.resize(8, 0u32); @@ -111,7 +100,7 @@ where .map(|_| ExtractedColumnInfo::sample(flag, &extraction_id, location_offset)) .collect::>(); - TableMetadata::::new(&input_columns, &extracted_columns) + TableMetadata::new(&input_columns, &extracted_columns) } /// Get the input columns @@ -121,7 +110,7 @@ where /// Get the columns we actually extract from pub fn extracted_columns(&self) -> &[ExtractedColumnInfo] { - &self.extracted_columns[..self.num_actual_columns - INPUT_COLUMNS] + &self.extracted_columns[..self.num_actual_columns - self.input_columns.len()] } /// Compute the metadata digest. @@ -145,54 +134,36 @@ where } /// Computes the value digest for a provided value array and the unique row_id - pub fn input_value_digest( - &self, - input_vals: &[&[u8; 32]; INPUT_COLUMNS], - ) -> (Point, HashOutput) { + pub fn input_value_digest>(&self, input_vals: &[T]) -> (Point, HashOutput) { + // Make sure we have the same number of input values and columns + assert_eq!(input_vals.len(), self.input_columns.len()); + let point = self .input_columns() .iter() .zip(input_vals.iter()) .fold(Point::NEUTRAL, |acc, (column, value)| { - acc + column.value_digest(value.as_slice()) + acc + column.value_digest(value.borrow()) }); let row_id_input = input_vals - .map(|key| { - key.pack(Endianness::Big) + .iter() + .flat_map(|key| { + key.borrow() + .pack(Endianness::Big) .into_iter() .map(F::from_canonical_u32) }) - .into_iter() - .flatten() .collect::>(); (point, H::hash_no_pad(&row_id_input).into()) } - pub fn extracted_value_digest( - &self, - value: &[u8], - extraction_id: &[u8], - location_offset: F, - ) -> Point { - let mut extraction_vec = extraction_id.pack(Endianness::Little); - extraction_vec.resize(8, 0u32); - extraction_vec.reverse(); - let extraction_id: [F; 8] = extraction_vec - .into_iter() - .map(F::from_canonical_u32) - .collect::>() - .try_into() - .expect("This should never fail"); - + pub fn extracted_value_digest(&self, value: &[u8], location_offset: F) -> Point { self.extracted_columns() .iter() .fold(Point::NEUTRAL, |acc, column| { - let correct_id = extraction_id == column.extraction_id(); - let correct_offset = location_offset == column.location_offset(); - let correct_location = correct_id && correct_offset; - + let correct_location = location_offset == column.location_offset(); if correct_location { acc + column.value_digest(value) } else { @@ -221,59 +192,166 @@ where pub fn num_actual_columns(&self) -> usize { self.num_actual_columns } + + /// Create a new instance of [`TableMetadata`] from an [`EventLogInfo`]. Events + /// always have two input columns relating to the transaction index and gas used for the transaction. + pub fn from_event_info( + event: &EventLogInfo, + ) -> TableMetadata { + TableMetadata::from(*event) + } + + /// Function to calculate the full receipt value digest from a receipt leaf node and [`EventLogInfo`] + pub fn receipt_value_digest( + &self, + tx_index: u64, + value: &[u8], + event: &EventLogInfo, + ) -> Point { + let mut tx_index_input = [0u8; 32]; + tx_index_input[31] = tx_index as u8; + + // The actual receipt data is item 1 in the list + let node_rlp = rlp::Rlp::new(value); + let receipt_rlp = node_rlp.at(1).unwrap(); + + // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` + let receipt_list = rlp::Rlp::new(&receipt_rlp.data().unwrap()[1..]); + + // The logs themselves start are the item at index 3 in this list + let gas_used_rlp = receipt_list.at(1).unwrap(); + + let gas_used_bytes = left_pad32(gas_used_rlp.data().unwrap()); + + let (input_d, row_unique_data) = + self.input_value_digest(&[&tx_index_input, &gas_used_bytes]); + let extracted_vd = self.extracted_receipt_value_digest(value, event); + + let total = input_d + extracted_vd; + + // row_id = H2int(row_unique_data || num_actual_columns) + let inputs = HashOut::from(row_unique_data) + .to_fields() + .into_iter() + .chain(std::iter::once(F::from_canonical_usize( + self.num_actual_columns, + ))) + .collect::>(); + let hash = H::hash_no_pad(&inputs); + let row_id = hash_to_int_value(hash); + + // values_digest = values_digest * row_id + let row_id = Scalar::from_noncanonical_biguint(row_id); + + total * row_id + } + + /// Computes storage values digest + pub(crate) fn storage_values_digest( + &self, + input_vals: &[[u8; 32]], + value: &[u8], + location_offset: u32, + ) -> Point { + let location_offset = F::from_canonical_u32(location_offset); + let (input_vd, row_unique) = self.input_value_digest(input_vals); + + let extract_vd = self.extracted_value_digest(value, location_offset); + + let inputs = if self.input_columns().is_empty() { + empty_poseidon_hash() + .to_fields() + .into_iter() + .chain(once(F::from_canonical_usize( + self.input_columns().len() + self.extracted_columns().len(), + ))) + .collect_vec() + } else { + HashOut::from(row_unique) + .to_fields() + .into_iter() + .chain(once(F::from_canonical_usize( + self.input_columns().len() + self.extracted_columns().len(), + ))) + .collect_vec() + }; + let hash = H::hash_no_pad(&inputs); + let row_id = hash_to_int_value(hash); + + // values_digest = values_digest * row_id + let row_id = Scalar::from_noncanonical_biguint(row_id); + if location_offset.0 == 0 { + (extract_vd + input_vd) * row_id + } else { + extract_vd * row_id + } + } } -pub struct TableMetadataGadget; +pub struct TableMetadataGadget; -impl - TableMetadataGadget -where - [(); MAX_COLUMNS - INPUT_COLUMNS]:, +impl + TableMetadataGadget { - pub(crate) fn build(b: &mut CBuilder) -> TableMetadataTarget { + pub(crate) fn build( + b: &mut CBuilder, + ) -> TableMetadataTarget { TableMetadataTarget { input_columns: array::from_fn(|_| b.add_virtual_input_column_info()), extracted_columns: array::from_fn(|_| b.add_virtual_extracted_column_info()), + real_columns: array::from_fn(|_| b.add_virtual_bool_target_safe()), num_actual_columns: b.add_virtual_target(), } } pub(crate) fn assign( pw: &mut PartialWitness, - columns_metadata: &TableMetadata, - metadata_target: &TableMetadataTarget, + columns_metadata: &TableMetadata, + metadata_target: &TableMetadataTarget, ) { + // First we check that we are trying to assign from a `TableMetadata` with the correct + // number of columns + assert_eq!( + columns_metadata.input_columns.len(), + metadata_target.input_columns.len() + ); + + assert!(columns_metadata.extracted_columns.len() <= MAX_EXTRACTED_COLUMNS); + pw.set_input_column_info_target_arr( metadata_target.input_columns.as_slice(), columns_metadata.input_columns.as_slice(), ); + let padded_extracted_columns = columns_metadata + .extracted_columns + .iter() + .copied() + .chain(std::iter::repeat(ExtractedColumnInfo::default())) + .take(MAX_EXTRACTED_COLUMNS) + .collect::>(); pw.set_extracted_column_info_target_arr( metadata_target.extracted_columns.as_slice(), - columns_metadata.extracted_columns.as_slice(), + padded_extracted_columns.as_slice(), ); + + metadata_target + .real_columns + .iter() + .enumerate() + .for_each(|(i, &b_target)| { + pw.set_bool_target(b_target, i < columns_metadata.extracted_columns.len()) + }); + pw.set_target( metadata_target.num_actual_columns, F::from_canonical_usize(columns_metadata.num_actual_columns), ); } - - /// Create a new instance of [`TableMetadata`] from an [`EventLogInfo`]. Events - /// always have two input columns relating to the transaction index and gas used for the transaction. - pub fn from_event_info( - event: &EventLogInfo, - ) -> TableMetadata - where - [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA_WORDS]:, - { - TableMetadata::::from(*event) - } } -impl - From> for TableMetadata -where - [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA_WORDS]:, +impl + From> for TableMetadata { fn from(event: EventLogInfo) -> Self { let extraction_id = event.event_signature; @@ -357,7 +435,7 @@ where let extracted_columns = [topic_columns, data_columns].concat(); - TableMetadata::::new( + TableMetadata::new( &[tx_index_input_column, gas_used_index_column], &extracted_columns, ) @@ -365,10 +443,10 @@ where } #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub(crate) struct TableMetadataTarget -where - [(); MAX_COLUMNS - INPUT_COLUMNS]:, -{ +pub(crate) struct TableMetadataTarget< + const MAX_EXTRACTED_COLUMNS: usize, + const INPUT_COLUMNS: usize, +> { #[serde( serialize_with = "serialize_long_array", deserialize_with = "deserialize_long_array" @@ -380,7 +458,13 @@ where deserialize_with = "deserialize_long_array" )] /// Information about all extracted columns of the table - pub(crate) extracted_columns: [ExtractedColumnInfoTarget; MAX_COLUMNS - INPUT_COLUMNS], + pub(crate) extracted_columns: [ExtractedColumnInfoTarget; MAX_EXTRACTED_COLUMNS], + /// An Array signaling whether an extracted column is real or not + #[serde( + serialize_with = "serialize_array", + deserialize_with = "deserialize_array" + )] + pub(crate) real_columns: [BoolTarget; MAX_EXTRACTED_COLUMNS], /// The number of actual columns pub(crate) num_actual_columns: Target, } @@ -392,10 +476,8 @@ type ReceiptExtractedOutput = ( CurveTarget, ); -impl - TableMetadataTarget -where - [(); MAX_COLUMNS - INPUT_COLUMNS]:, +impl + TableMetadataTarget { #[cfg(test)] pub fn metadata_digest(&self, b: &mut CBuilder) -> CurveTarget { @@ -404,15 +486,15 @@ where .iter() .map(|column| column.digest(b)) .collect::>(); - let zero = b.zero(); + let curve_zero = b.curve_zero(); let extracted_points = self .extracted_columns .iter() - .map(|column| { - let selector = b.is_equal(zero, column.identifier()); + .zip(self.real_columns.iter()) + .map(|(column, &selector)| { let poss_digest = column.digest(b); - b.select_curve_point(selector, curve_zero, poss_digest) + b.select_curve_point(selector, poss_digest, curve_zero) }) .collect::>(); @@ -455,11 +537,9 @@ where &self, b: &mut CBuilder, value: &Array, - location_no_offset: &UInt256Target, - location: &UInt256Target, + offset: Target, extraction_id: &[Target; 8], ) -> (CurveTarget, CurveTarget) { - let zero = b.zero(); let one = b.one(); let curve_zero = b.curve_zero(); @@ -469,18 +549,14 @@ where let (metadata_points, value_points): (Vec, Vec) = self .extracted_columns .into_iter() - .map(|column| { + .zip(self.real_columns) + .map(|(column, selector)| { // Calculate the column digest let column_digest = column.digest(b); - // The column is real if the identifier is non-zero so we use it as a selector - let selector = b.is_equal(zero, column.identifier()); // Now we work out if the column is to be extracted, if it is we will take the value we recover from `value[column.byte_offset..column.byte_offset + column.length]` // left padded. - let loc_offset_u256 = - UInt256Target::new_from_target_unsafe(b, column.location_offset()); - let (sum, _) = b.add_u256(&loc_offset_u256, location_no_offset); - let correct_offset = b.is_equal_u256(&sum, location); + let correct_offset = b.is_equal(offset, column.location_offset()); // We check that we have the correct base extraction id let column_ex_id_arr = Array::::from(column.extraction_id()); @@ -488,10 +564,10 @@ where // We only extract if we are in the correct location AND `column.is_extracted` is true let correct_location = b.and(correct_offset, correct_extraction_id); - let not_selector = b.not(selector); + // We also make sure we should actually extract for this column, otherwise we have issues // when indexing into the array. - let correct = b.and(not_selector, correct_location); + let correct = b.and(selector, correct_location); // last_byte_found lets us know whether we continue extracting or not. // Hence if we want to extract values `extract` will be true so `last_byte_found` should be false @@ -514,7 +590,7 @@ where let value_selector = b.not(correct); ( - b.curve_select(selector, curve_zero, column_digest), + b.curve_select(selector, column_digest, curve_zero), b.curve_select(value_selector, curve_zero, value_digest), ) }) @@ -536,7 +612,6 @@ where address_offset: Target, signature_offset: Target, ) -> ReceiptExtractedOutput { - let zero = b.zero(); let one = b.one(); let curve_zero = b.curve_zero(); @@ -549,11 +624,12 @@ where let (metadata_points, value_points): (Vec, Vec) = self .extracted_columns .into_iter() - .map(|column| { + .zip(self.real_columns) + .map(|(column, selector)| { // Calculate the column digest let column_digest = column.digest(b); - // The column is real if the identifier is non-zero so we use it as a selector - let selector = b.is_equal(zero, column.identifier()); + // If selector is true (from self.real_columns) we need it to be false when we feed it into `column.extract_value()` later. + let selector = b.not(selector); let location = b.add(log_offset, column.byte_offset()); @@ -598,7 +674,7 @@ pub(crate) mod tests { #[derive(Clone, Debug)] struct TestMedataCircuit { - columns_metadata: TableMetadata, + columns_metadata: TableMetadata, slot: u8, expected_num_actual_columns: usize, expected_metadata_digest: Point, @@ -655,7 +731,7 @@ pub(crate) mod tests { let slot = rng.gen(); let evm_word = rng.gen(); - let metadata = TableMetadata::::sample( + let metadata = TableMetadata::sample::( true, &[], &[slot], diff --git a/mp2-v1/src/values_extraction/leaf_mapping.rs b/mp2-v1/src/values_extraction/leaf_mapping.rs index 37643936c..3502e9112 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping.rs @@ -14,7 +14,6 @@ use mp2_common::{ public_inputs::PublicInputCommon, storage_key::{MappingSlot, MappingStructSlotWires}, types::{CBuilder, GFp}, - u256::UInt256Target, utils::{Endianness, ToTargets}, CHasher, D, F, }; @@ -36,10 +35,7 @@ use std::iter::once; use super::gadgets::metadata_gadget::{TableMetadata, TableMetadataGadget, TableMetadataTarget}; #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct LeafMappingWires -where - [(); MAX_COLUMNS - 1]:, -{ +pub struct LeafMappingWires { /// Full node from the MPT proof pub(crate) node: VectorWire, /// Leaf value @@ -49,28 +45,22 @@ where /// Storage mapping variable slot pub(crate) slot: MappingStructSlotWires, /// MPT metadata - metadata: TableMetadataTarget, + metadata: TableMetadataTarget, /// The offset from the base slot offset: Target, } /// Circuit to prove the correct derivation of the MPT key from a mapping slot #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct LeafMappingCircuit -where - [(); MAX_COLUMNS - 1]:, -{ +pub struct LeafMappingCircuit { pub(crate) node: Vec, pub(crate) slot: MappingSlot, - pub(crate) metadata: TableMetadata, + pub(crate) metadata: TableMetadata, pub(crate) offset: u32, } -impl LeafMappingCircuit -where - [(); MAX_COLUMNS - 1]:, -{ - pub fn build(b: &mut CBuilder) -> LeafMappingWires { +impl LeafMappingCircuit { + pub fn build(b: &mut CBuilder) -> LeafMappingWires { let zero = b.zero(); let metadata = TableMetadataGadget::build(b); @@ -85,19 +75,6 @@ where let node = wires.node; let root = wires.root; - let key_input_no_offset = slot - .keccak_mpt - .base - .keccak_location - .output - .pack(b, Endianness::Big); - let key_input_with_offset = slot.keccak_mpt.location_bytes.pack(b, Endianness::Big); - - let u256_no_off = - UInt256Target::new_from_be_limbs(key_input_no_offset.arr.as_slice()).unwrap(); - let u256_loc = - UInt256Target::new_from_be_limbs(key_input_with_offset.arr.as_slice()).unwrap(); - // Left pad the leaf value. let value: Array = left_pad_leaf_value(b, &wires.value); @@ -109,8 +86,7 @@ where let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests::<32>( b, &value, - &u256_no_off, - &u256_loc, + offset, &[zero, zero, zero, zero, zero, zero, zero, slot.mapping_slot], ); @@ -167,7 +143,11 @@ where } } - pub fn assign(&self, pw: &mut PartialWitness, wires: &LeafMappingWires) { + pub fn assign( + &self, + pw: &mut PartialWitness, + wires: &LeafMappingWires, + ) { let padded_node = Vector::::from_vec(&self.node).expect("Invalid node"); wires.node.assign(pw, &padded_node); @@ -184,12 +164,11 @@ where } /// Num of children = 0 -impl CircuitLogicWires for LeafMappingWires -where - [(); MAX_COLUMNS - 1]:, +impl CircuitLogicWires + for LeafMappingWires { type CircuitBuilderParams = (); - type Inputs = LeafMappingCircuit; + type Inputs = LeafMappingCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; @@ -210,16 +189,18 @@ where #[cfg(test)] mod tests { use super::*; - use crate::{tests::TEST_MAX_COLUMNS, values_extraction::KEY_ID_PREFIX}; + use crate::{ + tests::TEST_MAX_COLUMNS, + values_extraction::{storage_value_digest, KEY_ID_PREFIX}, + }; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, eth::{StorageSlot, StorageSlotNode}, mpt_sequential::utils::bytes_to_nibbles, - poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, types::MAPPING_LEAF_VALUE_LEN, - utils::{keccak256, Endianness, Packer, ToFields}, + utils::{keccak256, Endianness, Packer}, C, D, F, }; use mp2_test::{ @@ -229,11 +210,8 @@ mod tests { }; use plonky2::{ field::types::Field, - hash::hash_types::HashOut, iop::{target::Target, witness::PartialWitness}, - plonk::config::Hasher, }; - use plonky2_ecgfp5::curve::scalar_field::Scalar; use rand::{thread_rng, Rng}; type LeafCircuit = LeafMappingCircuit; @@ -281,7 +259,7 @@ mod tests { let evm_word = storage_slot.evm_offset(); // Compute the metadata digest. - let table_metadata = TableMetadata::::sample( + let table_metadata = TableMetadata::sample::( true, &[KEY_ID_PREFIX], &[slot], @@ -289,29 +267,15 @@ mod tests { ); let metadata_digest = table_metadata.digest(); - let (input_val_digest, row_unique_data) = table_metadata.input_value_digest(&[mapping_key]); - let extracted_val_digest = - table_metadata.extracted_value_digest(&value, &[slot], F::from_canonical_u32(evm_word)); - let slot = MappingSlot::new(slot, mapping_key.to_vec()); - // row_id = H2int(row_unique_data || num_actual_columns) - let inputs = HashOut::from(row_unique_data) - .to_fields() - .into_iter() - .chain(once(F::from_canonical_usize( - table_metadata.num_actual_columns, - ))) - .collect::>(); - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); + let values_digest = storage_value_digest( + &table_metadata, + &[mapping_key], + &value.clone().try_into().unwrap(), + evm_word, + ); - // values_digest = values_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - let values_digest = if evm_word == 0 { - (extracted_val_digest + input_val_digest) * row_id - } else { - extracted_val_digest * row_id - }; + let slot = MappingSlot::new(slot, mapping_key.to_vec()); let c = LeafMappingCircuit:: { node: node.clone(), diff --git a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs index f48778b54..148b0617a 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs @@ -17,7 +17,6 @@ use mp2_common::{ public_inputs::PublicInputCommon, storage_key::{MappingOfMappingsSlotWires, MappingSlot}, types::{CBuilder, GFp}, - u256::UInt256Target, utils::{Endianness, ToTargets}, CHasher, D, F, }; @@ -39,10 +38,7 @@ use std::iter::once; use super::gadgets::metadata_gadget::TableMetadata; #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct LeafMappingOfMappingsWires -where - [(); MAX_COLUMNS - 2]:, -{ +pub struct LeafMappingOfMappingsWires { /// Full node from the MPT proof pub(crate) node: VectorWire, /// Leaf value @@ -52,48 +48,29 @@ where /// Mapping slot associating wires including outer and inner mapping keys pub(crate) slot: MappingOfMappingsSlotWires, /// MPT metadata - metadata: TableMetadataTarget, + metadata: TableMetadataTarget, offset: Target, } /// Circuit to prove the correct derivation of the MPT key from mappings where /// the value stored in each mapping entry is another mapping #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct LeafMappingOfMappingsCircuit -where - [(); MAX_COLUMNS - 2]:, -{ +pub struct LeafMappingOfMappingsCircuit { pub(crate) node: Vec, pub(crate) slot: MappingSlot, pub(crate) inner_key: Vec, - pub(crate) metadata: TableMetadata, + pub(crate) metadata: TableMetadata, pub(crate) evm_word: u8, } -impl LeafMappingOfMappingsCircuit -where - [(); MAX_COLUMNS - 2]:, -{ - pub fn build(b: &mut CBuilder) -> LeafMappingOfMappingsWires { +impl LeafMappingOfMappingsCircuit { + pub fn build(b: &mut CBuilder) -> LeafMappingOfMappingsWires { let offset = b.add_virtual_target(); - let metadata = TableMetadataGadget::::build(b); + let metadata = TableMetadataGadget::::build(b); let slot = MappingSlot::build_mapping_of_mappings(b, offset); let zero = b.zero(); - let key_input_no_offset = slot - .keccak_mpt - .base - .keccak_location - .output - .pack(b, Endianness::Big); - let key_input_with_offset = slot.keccak_mpt.location_bytes.pack(b, Endianness::Big); - - let u256_no_off = - UInt256Target::new_from_be_limbs(key_input_no_offset.arr.as_slice()).unwrap(); - let u256_loc = - UInt256Target::new_from_be_limbs(key_input_with_offset.arr.as_slice()).unwrap(); - // Build the node wires. let wires = MPTLeafOrExtensionNode::build_and_advance_key::<_, D, 69, 33>( b, @@ -113,8 +90,7 @@ where let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests::<32>( b, &value, - &u256_no_off, - &u256_loc, + offset, &[zero, zero, zero, zero, zero, zero, zero, slot.mapping_slot], ); @@ -177,7 +153,7 @@ where pub fn assign( &self, pw: &mut PartialWitness, - wires: &LeafMappingOfMappingsWires, + wires: &LeafMappingOfMappingsWires, ) { let padded_node = Vector::::from_vec(&self.node).expect("Invalid node"); @@ -194,19 +170,21 @@ where &self.inner_key, self.evm_word as u32, ); - TableMetadataGadget::::assign(pw, &self.metadata, &wires.metadata); + TableMetadataGadget::::assign( + pw, + &self.metadata, + &wires.metadata, + ); pw.set_target(wires.offset, F::from_canonical_u8(self.evm_word)); } } /// Num of children = 0 -impl CircuitLogicWires - for LeafMappingOfMappingsWires -where - [(); MAX_COLUMNS - 2]:, +impl CircuitLogicWires + for LeafMappingOfMappingsWires { type CircuitBuilderParams = (); - type Inputs = LeafMappingOfMappingsCircuit; + type Inputs = LeafMappingOfMappingsCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; @@ -229,17 +207,16 @@ mod tests { use super::*; use crate::{ tests::TEST_MAX_COLUMNS, - values_extraction::{INNER_KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX}, + values_extraction::{storage_value_digest, INNER_KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX}, }; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, eth::{StorageSlot, StorageSlotNode}, mpt_sequential::utils::bytes_to_nibbles, - poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, types::MAPPING_LEAF_VALUE_LEN, - utils::{keccak256, Endianness, Packer, ToFields}, + utils::{keccak256, Endianness, Packer}, C, D, F, }; use mp2_test::{ @@ -249,13 +226,9 @@ mod tests { }; use plonky2::{ field::types::Field, - hash::hash_types::HashOut, iop::{target::Target, witness::PartialWitness}, - plonk::config::Hasher, }; - use plonky2_ecgfp5::curve::scalar_field::Scalar; - use rand::{thread_rng, Rng}; use std::array; @@ -307,7 +280,7 @@ mod tests { let slot = storage_slot.slot(); let evm_word = storage_slot.evm_offset(); // Compute the metadata digest. - let table_metadata = TableMetadata::::sample( + let table_metadata = TableMetadata::sample::( true, &[OUTER_KEY_ID_PREFIX, INNER_KEY_ID_PREFIX], &[slot], @@ -315,29 +288,12 @@ mod tests { ); let metadata_digest = table_metadata.digest(); - let (input_val_digest, row_unique_data) = - table_metadata.input_value_digest(&[outer_key, inner_key]); - let extracted_val_digest = - table_metadata.extracted_value_digest(&value, &[slot], F::from_canonical_u32(evm_word)); - - // row_id = H2int(row_unique_data || num_actual_columns) - let inputs = HashOut::from(row_unique_data) - .to_fields() - .into_iter() - .chain(once(F::from_canonical_usize( - table_metadata.num_actual_columns, - ))) - .collect::>(); - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); - - // values_digest = values_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - let values_digest = if evm_word == 0 { - (extracted_val_digest + input_val_digest) * row_id - } else { - extracted_val_digest * row_id - }; + let values_digest = storage_value_digest( + &table_metadata, + &[outer_key, inner_key], + &value.clone().try_into().unwrap(), + evm_word, + ); let slot = MappingSlot::new(slot, outer_key.to_vec()); diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index 9ec3587d9..74369da0b 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -12,12 +12,12 @@ use mp2_common::{ eth::EventLogInfo, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN}, - mpt_sequential::{MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, + mpt_sequential::{utils::bytes_to_nibbles, MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, poseidon::hash_to_int_target, public_inputs::PublicInputCommon, rlp::MAX_KEY_NIBBLE_LEN, types::{CBuilder, GFp}, - utils::{less_than, less_than_or_equal_to_unsafe, ToTargets}, + utils::{less_than_or_equal_to_unsafe, less_than_unsafe, ToTargets}, CHasher, D, F, }; use plonky2::{ @@ -36,32 +36,28 @@ use plonky2_ecgfp5::gadgets::curve::CircuitBuilderEcGFp5; use recursion_framework::circuit_builder::CircuitLogicWires; use rlp::Encodable; use serde::{Deserialize, Serialize}; -use std::iter; /// The number of bytes that `gas_used` could take up in the receipt. /// We set a max of 3 here because this would be over half the gas in the block for Ethereum. const MAX_GAS_SIZE: u64 = 3; #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct ReceiptLeafWires +pub(crate) struct ReceiptLeafWires where [(); PAD_LEN(NODE_LEN)]:, - [(); MAX_COLUMNS - 2]:, { /// The event we are monitoring for - pub event: EventWires, + pub(crate) event: EventWires, /// The node bytes - pub node: VectorWire, + pub(crate) node: VectorWire, /// the hash of the node bytes - pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, - /// The index of this receipt in the block - pub index: Target, + pub(crate) root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, /// The offsets of the relevant logs inside the node - pub relevant_log_offset: Target, + pub(crate) relevant_log_offset: Target, /// The key in the MPT Trie - pub mpt_key: MPTKeyWire, + pub(crate) mpt_key: MPTKeyWire, /// The table metadata - pub(crate) metadata: TableMetadataTarget, + pub(crate) metadata: TableMetadataTarget, } /// Contains all the information for an [`Event`] in rlp form @@ -81,10 +77,7 @@ pub struct EventWires { /// Circuit to prove a transaction receipt contains logs relating to a specific event. #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ReceiptLeafCircuit -where - [(); MAX_COLUMNS - 2]:, -{ +pub struct ReceiptLeafCircuit { /// This is the RLP encoded leaf node in the Receipt Trie. pub node: Vec, /// The transaction index, telling us where the receipt is in the block. The RLP encoding of the index @@ -103,34 +96,20 @@ where /// This is the offset in the node to the start of the log that relates to `event_info` pub relevant_log_offset: usize, /// The table metadata - pub metadata: TableMetadata, + pub metadata: TableMetadata, } -/// Contains all the information for data contained in an [`Event`] -#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] -pub struct LogDataInfo { - /// The column id of this piece of info - pub column_id: GFp, - /// The byte offset from the beggining of the log to this target - pub rel_byte_offset: usize, - /// The length of this piece of data - pub len: usize, -} - -impl ReceiptLeafCircuit +impl + ReceiptLeafCircuit where [(); PAD_LEN(NODE_LEN)]:, - [(); MAX_COLUMNS - 2]:, { /// Create a new [`ReceiptLeafCircuit`] from a [`ReceiptProofInfo`] and a [`EventLogInfo`] pub fn new( last_node: &[u8], tx_index: u64, event: &EventLogInfo, - ) -> Result - where - [(); MAX_COLUMNS - 2 - NO_TOPICS - MAX_DATA_WORDS]:, - { + ) -> Result { // Get the relevant log offset let relevant_log_offset = event.get_log_offset(last_node)?; @@ -144,7 +123,7 @@ where } = *event; // Construct the table metadata from the event - let metadata = TableMetadata::::from(*event); + let metadata = TableMetadata::from(*event); Ok(Self { node: last_node.to_vec(), @@ -159,7 +138,7 @@ where }) } - pub fn build(b: &mut CBuilder) -> ReceiptLeafWires { + pub(crate) fn build(b: &mut CBuilder) -> ReceiptLeafWires { // Build the event wires let event_wires = Self::build_event_wires(b); // Build the metadata @@ -168,13 +147,12 @@ where let one = b.one(); let two = b.two(); - let t = b._true(); - // Add targets for the data specific to this receipt - let index = b.add_virtual_target(); + // Add targets for the data specific to this receipt let relevant_log_offset = b.add_virtual_target(); let mpt_key = MPTKeyWire::new(b); + let index = mpt_key.fold_key(b); // Build the node wires. let wires = MPTReceiptLeafNode::build_and_advance_key::<_, D, NODE_LEN>(b, &mpt_key); @@ -190,7 +168,7 @@ where ); let key_header = node.arr.random_access_large_array(b, header_len_len); let less_than_val = b.constant(F::from_canonical_u8(128)); - let single_value = less_than(b, key_header, less_than_val, 8); + let single_value = less_than_unsafe(b, key_header, less_than_val, 8); let key_len_maybe = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); let key_len = b.select(single_value, one, key_len_maybe); @@ -225,13 +203,10 @@ where // If we have extracted a value from an index in the desired range (so lte final_gas_index) we want to add it. // If access_index was strictly less than final_gas_index we need to multiply by 1 << 8 after (since the encoding is big endian) let valid = less_than_or_equal_to_unsafe(b, access_index, final_gas_index, 12); - let need_scalar = less_than(b, access_index, final_gas_index, 12); - let to_add = b.select(valid, array_value, zero); - - let scalar = b.select(need_scalar, combiner, one); - let tmp = b.add(acc, to_add); - b.mul(tmp, scalar) + let tmp = b.mul(acc, combiner); + let tmp = b.add(tmp, array_value); + b.select(valid, tmp, acc) }); let zero_u32 = b.zero_u32(); @@ -269,11 +244,8 @@ where event_wires.sig_rel_offset, ); - let address_check = address_extract.equals(b, &event_wires.address); - let sig_check = signature_extract.equals(b, &event_wires.event_signature); - - b.connect(t.target, address_check.target); - b.connect(t.target, sig_check.target); + address_extract.enforce_equal(b, &event_wires.address); + signature_extract.enforce_equal(b, &event_wires.event_signature); let dm = b.add_curve_point(&[input_metadata_digest, extracted_metadata_digest]); @@ -316,7 +288,6 @@ where event: event_wires, node, root, - index, relevant_log_offset, mpt_key, metadata, @@ -347,10 +318,10 @@ where } } - pub fn assign( + pub(crate) fn assign( &self, pw: &mut PartialWitness, - wires: &ReceiptLeafWires, + wires: &ReceiptLeafWires, ) { self.assign_event_wires(pw, &wires.event); @@ -362,25 +333,27 @@ where &wires.root, &InputData::Assigned(&pad_node), ); - pw.set_target(wires.index, GFp::from_canonical_u64(self.tx_index)); pw.set_target( wires.relevant_log_offset, GFp::from_canonical_usize(self.relevant_log_offset), ); let key_encoded = self.tx_index.rlp_bytes(); - let key_nibbles: [u8; MAX_KEY_NIBBLE_LEN] = key_encoded - .iter() - .flat_map(|byte| [byte / 16, byte % 16]) - .chain(iter::repeat(0u8)) - .take(MAX_KEY_NIBBLE_LEN) - .collect::>() + let mut nibbles = bytes_to_nibbles(&key_encoded); + let ptr = nibbles.len() - 1; + nibbles.resize(MAX_KEY_NIBBLE_LEN, 0u8); + + let key_nibbles: [u8; MAX_KEY_NIBBLE_LEN] = nibbles .try_into() .expect("Couldn't create mpt key with correct length"); - wires.mpt_key.assign(pw, &key_nibbles, key_encoded.len()); + wires.mpt_key.assign(pw, &key_nibbles, ptr); - TableMetadataGadget::::assign(pw, &self.metadata, &wires.metadata); + TableMetadataGadget::::assign( + pw, + &self.metadata, + &wires.metadata, + ); } pub fn assign_event_wires(&self, pw: &mut PartialWitness, wires: &EventWires) { @@ -407,15 +380,14 @@ where } /// Num of children = 0 -impl CircuitLogicWires - for ReceiptLeafWires +impl CircuitLogicWires + for ReceiptLeafWires where [(); PAD_LEN(NODE_LEN)]:, - [(); MAX_COLUMNS - 2]:, { type CircuitBuilderParams = (); - type Inputs = ReceiptLeafCircuit; + type Inputs = ReceiptLeafCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; @@ -443,20 +415,17 @@ mod tests { use super::*; use mp2_common::{ - eth::left_pad32, - poseidon::{hash_to_int_value, H}, - utils::{keccak256, Endianness, Packer, ToFields}, + utils::{keccak256, Endianness, Packer}, C, }; use mp2_test::{ circuit::{run_circuit, UserCircuit}, mpt_sequential::generate_receipt_test_info, }; - use plonky2::{hash::hash_types::HashOut, plonk::config::Hasher}; - use plonky2_ecgfp5::curve::scalar_field::Scalar; + #[derive(Clone, Debug)] struct TestReceiptLeafCircuit { - c: ReceiptLeafCircuit, + c: ReceiptLeafCircuit, } impl UserCircuit for TestReceiptLeafCircuit @@ -464,10 +433,10 @@ mod tests { [(); PAD_LEN(NODE_LEN)]:, { // Leaf wires + expected extracted value - type Wires = ReceiptLeafWires; + type Wires = ReceiptLeafWires; fn build(b: &mut CircuitBuilder) -> Self::Wires { - ReceiptLeafCircuit::::build(b) + ReceiptLeafCircuit::::build(b) } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { @@ -491,14 +460,13 @@ mod tests { >() where [(); PAD_LEN(NODE_LEN)]:, - [(); 7 - 2 - NO_TOPICS - MAX_DATA_WORDS]:, { let receipt_proof_infos = generate_receipt_test_info::(); let proofs = receipt_proof_infos.proofs(); let info = proofs.first().unwrap(); let query = receipt_proof_infos.query(); - let c = ReceiptLeafCircuit::::new::( + let c = ReceiptLeafCircuit::::new::( info.mpt_proof.last().unwrap(), info.tx_index, &query.event, @@ -510,21 +478,6 @@ mod tests { let node = info.mpt_proof.last().unwrap().clone(); - let mut tx_index_input = [0u8; 32]; - tx_index_input[31] = info.tx_index as u8; - - let node_rlp = rlp::Rlp::new(&node); - // The actual receipt data is item 1 in the list - let receipt_rlp = node_rlp.at(1).unwrap(); - - // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` - let receipt_list = rlp::Rlp::new(&receipt_rlp.data().unwrap()[1..]); - - // The logs themselves start are the item at index 3 in this list - let gas_used_rlp = receipt_list.at(1).unwrap(); - - let gas_used_bytes = left_pad32(gas_used_rlp.data().unwrap()); - assert!(node.len() <= NODE_LEN); let proof = run_circuit::(test_circuit); let pi = PublicInputs::new(&proof.public_inputs); @@ -537,27 +490,7 @@ mod tests { // Check value digest { - let (input_d, row_unique_data) = - metadata.input_value_digest(&[&tx_index_input, &gas_used_bytes]); - let extracted_vd = metadata.extracted_receipt_value_digest(&node, &query.event); - - let total = input_d + extracted_vd; - - // row_id = H2int(row_unique_data || num_actual_columns) - let inputs = HashOut::from(row_unique_data) - .to_fields() - .into_iter() - .chain(std::iter::once(GFp::from_canonical_usize( - metadata.num_actual_columns, - ))) - .collect::>(); - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); - - // values_digest = values_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - - let exp_digest = total * row_id; + let exp_digest = metadata.receipt_value_digest(info.tx_index, &node, &query.event); assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); } diff --git a/mp2-v1/src/values_extraction/leaf_single.rs b/mp2-v1/src/values_extraction/leaf_single.rs index 57a2b827c..5695aff25 100644 --- a/mp2-v1/src/values_extraction/leaf_single.rs +++ b/mp2-v1/src/values_extraction/leaf_single.rs @@ -15,8 +15,7 @@ use mp2_common::{ public_inputs::PublicInputCommon, storage_key::{SimpleSlot, SimpleStructSlotWires}, types::{CBuilder, GFp}, - u256::UInt256Target, - utils::{Endianness, ToTargets}, + utils::ToTargets, CHasher, D, F, }; use plonky2::{ @@ -35,10 +34,7 @@ use serde::{Deserialize, Serialize}; use std::iter::once; #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub struct LeafSingleWires -where - [(); MAX_COLUMNS - 0]:, -{ +pub struct LeafSingleWires { /// Full node from the MPT proof node: VectorWire, /// Leaf value @@ -48,28 +44,22 @@ where /// Storage single variable slot slot: SimpleStructSlotWires, /// MPT metadata - metadata: TableMetadataTarget, + metadata: TableMetadataTarget, /// Offset from the base slot, offset: Target, } /// Circuit to prove the correct derivation of the MPT key from a simple slot #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct LeafSingleCircuit -where - [(); MAX_COLUMNS - 0]:, -{ +pub struct LeafSingleCircuit { pub(crate) node: Vec, pub(crate) slot: SimpleSlot, - pub(crate) metadata: TableMetadata, + pub(crate) metadata: TableMetadata, pub(crate) offset: u32, } -impl LeafSingleCircuit -where - [(); MAX_COLUMNS - 0]:, -{ - pub fn build(b: &mut CBuilder) -> LeafSingleWires { +impl LeafSingleCircuit { + pub fn build(b: &mut CBuilder) -> LeafSingleWires { let metadata = TableMetadataGadget::build(b); let offset = b.add_virtual_target(); let slot = SimpleSlot::build_struct(b, offset); @@ -82,12 +72,6 @@ where let node = wires.node; let root = wires.root; - let key_input_with_offset = slot.location_bytes.pack(b, Endianness::Big); - - let u256_no_off = UInt256Target::new_from_target_unsafe(b, slot.base.slot); - let u256_loc = - UInt256Target::new_from_be_limbs(key_input_with_offset.arr.as_slice()).unwrap(); - // Left pad the leaf value. let value: Array = left_pad_leaf_value(b, &wires.value); @@ -95,8 +79,7 @@ where let (metadata_digest, value_digest) = metadata.extracted_digests::<32>( b, &value, - &u256_no_off, - &u256_loc, + offset, &[zero, zero, zero, zero, zero, zero, zero, slot.base.slot], ); @@ -137,7 +120,11 @@ where } } - pub fn assign(&self, pw: &mut PartialWitness, wires: &LeafSingleWires) { + pub fn assign( + &self, + pw: &mut PartialWitness, + wires: &LeafSingleWires, + ) { let padded_node = Vector::::from_vec(&self.node).expect("Invalid node"); wires.node.assign(pw, &padded_node); @@ -153,12 +140,11 @@ where } /// Num of children = 0 -impl CircuitLogicWires for LeafSingleWires -where - [(); MAX_COLUMNS - 0]:, +impl CircuitLogicWires + for LeafSingleWires { type CircuitBuilderParams = (); - type Inputs = LeafSingleCircuit; + type Inputs = LeafSingleCircuit; const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; @@ -179,16 +165,15 @@ where #[cfg(test)] mod tests { use super::*; - use crate::tests::TEST_MAX_COLUMNS; + use crate::{tests::TEST_MAX_COLUMNS, values_extraction::storage_value_digest}; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, eth::{StorageSlot, StorageSlotNode}, mpt_sequential::utils::bytes_to_nibbles, - poseidon::{hash_to_int_value, H}, rlp::MAX_KEY_NIBBLE_LEN, types::MAPPING_LEAF_VALUE_LEN, - utils::{keccak256, Endianness, Packer, ToFields}, + utils::{keccak256, Endianness, Packer}, C, D, F, }; use mp2_test::{ @@ -199,9 +184,7 @@ mod tests { use plonky2::{ field::types::Field, iop::{target::Target, witness::PartialWitness}, - plonk::config::Hasher, }; - use plonky2_ecgfp5::curve::scalar_field::Scalar; type LeafCircuit = LeafSingleCircuit; type LeafWires = LeafSingleWires; @@ -247,7 +230,7 @@ mod tests { let slot = storage_slot.slot(); let evm_word = storage_slot.evm_offset(); // Compute the metadata digest. - let table_metadata = TableMetadata::::sample( + let table_metadata = TableMetadata::sample::( true, &[], &[slot], @@ -255,23 +238,13 @@ mod tests { ); let metadata_digest = table_metadata.digest(); - let extracted_val_digest = - table_metadata.extracted_value_digest(&value, &[slot], F::from_canonical_u32(evm_word)); + let values_digest = storage_value_digest( + &table_metadata, + &[], + &value.clone().try_into().unwrap(), + evm_word, + ); - // row_id = H2int(row_unique_data || num_actual_columns) - let inputs = empty_poseidon_hash() - .to_fields() - .into_iter() - .chain(once(F::from_canonical_usize( - table_metadata.num_actual_columns, - ))) - .collect::>(); - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); - - // values_digest = values_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - let values_digest = extracted_val_digest * row_id; let slot = SimpleSlot::new(slot); let c = LeafCircuit { node: node.clone(), diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index a6e956315..a201ba3c0 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -1,6 +1,5 @@ use crate::api::SlotInput; -use anyhow::anyhow; use gadgets::{ column_info::{ExtractedColumnInfo, InputColumnInfo}, metadata_gadget::TableMetadata, @@ -9,20 +8,18 @@ use itertools::Itertools; use alloy::primitives::Address; use mp2_common::{ + digest::Digest, eth::{left_pad32, StorageSlot}, - poseidon::{empty_poseidon_hash, hash_to_int_value, H}, - types::HashOutput, + poseidon::{empty_poseidon_hash, H}, + types::{HashOutput, MAPPING_LEAF_VALUE_LEN}, utils::{Endianness, Packer, ToFields}, F, }; use plonky2::{ field::types::{Field, PrimeField64}, - hash::hash_types::HashOut, plonk::config::Hasher, }; -use plonky2_ecgfp5::curve::{curve::Point, scalar_field::Scalar}; - use serde::{Deserialize, Serialize}; use std::iter::once; @@ -132,7 +129,7 @@ impl StorageSlotInfo { contract_address: &Address, chain_id: u64, extra: Vec, - ) -> ColumnMetadata { + ) -> TableMetadata { let slot = self.slot().slot(); let num_mapping_keys = self.slot().mapping_keys().len(); @@ -172,178 +169,7 @@ impl StorageSlotInfo { _ => vec![], }; - ColumnMetadata::new(input_columns, self.table_info().to_vec()) - } -} - -/// Struct that mirrors [`TableMetadata`] but without having to specify generic constants. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ColumnMetadata { - pub input_columns: Vec, - pub extracted_columns: Vec, -} - -impl ColumnMetadata { - /// Create a new instance of [`ColumnMetadata`] - pub fn new( - input_columns: Vec, - extracted_columns: Vec, - ) -> ColumnMetadata { - ColumnMetadata { - input_columns, - extracted_columns, - } - } - - /// Getter for the [`InputColumnInfo`] - pub fn input_columns(&self) -> &[InputColumnInfo] { - &self.input_columns - } - - /// Getter for the [`ExtractedColumnInfo`] - pub fn extracted_columns(&self) -> &[ExtractedColumnInfo] { - &self.extracted_columns - } - - /// Computes storage values digest - pub fn storage_values_digest( - &self, - input_vals: &[&[u8; 32]], - value: &[u8], - extraction_id: &[u8], - location_offset: F, - ) -> Point { - let (input_vd, row_unique) = self.input_value_digest(input_vals); - - let extract_vd = self.extracted_value_digest(value, extraction_id, location_offset); - - let inputs = if self.input_columns().is_empty() { - empty_poseidon_hash() - .to_fields() - .into_iter() - .chain(once(F::from_canonical_usize( - self.input_columns().len() + self.extracted_columns().len(), - ))) - .collect_vec() - } else { - HashOut::from(row_unique) - .to_fields() - .into_iter() - .chain(once(F::from_canonical_usize( - self.input_columns().len() + self.extracted_columns().len(), - ))) - .collect_vec() - }; - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); - - // values_digest = values_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - if location_offset.0 == 0 { - (extract_vd + input_vd) * row_id - } else { - extract_vd * row_id - } - } - - /// Computes the value digest for a provided value array and the unique row_id - pub fn input_value_digest(&self, input_vals: &[&[u8; 32]]) -> (Point, HashOutput) { - let point = self - .input_columns() - .iter() - .zip(input_vals.iter()) - .fold(Point::NEUTRAL, |acc, (column, value)| { - acc + column.value_digest(value.as_slice()) - }); - - let row_id_input = input_vals - .iter() - .flat_map(|key| { - key.pack(Endianness::Big) - .into_iter() - .map(F::from_canonical_u32) - }) - .collect::>(); - - (point, H::hash_no_pad(&row_id_input).into()) - } - - /// Compute the metadata digest. - pub fn digest(&self) -> Point { - let input_iter = self - .input_columns() - .iter() - .map(|column| column.digest()) - .collect::>(); - - let extracted_iter = self - .extracted_columns() - .iter() - .map(|column| column.digest()) - .collect::>(); - - input_iter - .into_iter() - .chain(extracted_iter) - .fold(Point::NEUTRAL, |acc, b| acc + b) - } - - pub fn extracted_value_digest( - &self, - value: &[u8], - extraction_id: &[u8], - location_offset: F, - ) -> Point { - let mut extraction_vec = extraction_id.pack(Endianness::Little); - extraction_vec.resize(8, 0u32); - extraction_vec.reverse(); - let extraction_id: [F; 8] = extraction_vec - .into_iter() - .map(F::from_canonical_u32) - .collect::>() - .try_into() - .expect("This should never fail"); - - self.extracted_columns() - .iter() - .fold(Point::NEUTRAL, |acc, column| { - let correct_id = extraction_id == column.extraction_id(); - let correct_offset = location_offset == column.location_offset(); - let correct_location = correct_id && correct_offset; - - if correct_location { - acc + column.value_digest(value) - } else { - acc - } - }) - } -} - -impl TryFrom - for TableMetadata -where - [(); MAX_COLUMNS - INPUT_COLUMNS]:, -{ - type Error = anyhow::Error; - - fn try_from(value: ColumnMetadata) -> Result { - let ColumnMetadata { - input_columns, - extracted_columns, - } = value; - let input_array: [InputColumnInfo; INPUT_COLUMNS] = - input_columns.try_into().map_err(|e| { - anyhow!( - "Could not convert input columns to fixed length array: {:?}", - e - ) - })?; - - Ok(TableMetadata::::new( - &input_array, - &extracted_columns, - )) + TableMetadata::new(&input_columns, self.table_info()) } } @@ -540,3 +366,25 @@ pub fn row_unique_data_for_mapping_of_mappings_leaf( let inputs = packed_outer_key.chain(packed_inner_key).collect_vec(); H::hash_no_pad(&inputs).into() } + +/// Function to compute a storage value digest +pub fn storage_value_digest( + table_metadata: &TableMetadata, + keys: &[&[u8]], + value: &[u8; MAPPING_LEAF_VALUE_LEN], + evm_word: u32, +) -> Digest { + let padded_keys = keys + .iter() + .map(|slice| left_pad32(slice)) + .collect::>(); + // Panic if the number of keys provided is not equal to the number of input columns + assert_eq!( + keys.len(), + table_metadata.input_columns.len(), + "Number of keys: {}, does not equal the number of input columns: {}", + keys.len(), + table_metadata.input_columns.len() + ); + table_metadata.storage_values_digest(padded_keys.as_slice(), value.as_slice(), evm_word) +} diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index 47f6b7b73..ba91ddcba 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -1,5 +1,4 @@ //! This code returns an [`UpdateTree`] used to plan how we prove a series of values was extracted from a Merkle Patricia Trie. -#![allow(clippy::identity_op)] use alloy::{ network::Ethereum, primitives::{keccak256, Address, B256}, @@ -7,13 +6,67 @@ use alloy::{ transports::Transport, }; use anyhow::Result; -use mp2_common::eth::{node_type, EventLogInfo, NodeType, ReceiptQuery}; +use mp2_common::eth::{node_type, EventLogInfo, MP2EthError, NodeType, ReceiptQuery}; use ryhope::storage::updatetree::{Next, UpdateTree}; use std::future::Future; -use std::collections::HashMap; +use std::{ + collections::HashMap, + error::Error, + fmt::{Display, Formatter}, + write, +}; use super::{generate_proof, CircuitInput, PublicParameters}; + +#[derive(Debug)] +/// Error enum used for Extractable data +pub enum MP2PlannerError { + /// An error that occurs when trying to fetch data from an RPC node, used so that we can know we should retry the call in this case. + FetchError, + /// An error that occurs when the [`UpdateTree`] returns an unexpected output from one of its methods. + UpdateTreeError(String), + /// A conversion from the error type defined in [`mp2_common::eth`] that is not a [`MP2EthError::FetchError`]. + EthError(MP2EthError), + /// An error that occurs from a method in the proving API. + ProvingError(String), +} + +impl Error for MP2PlannerError {} + +impl Display for MP2PlannerError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + MP2PlannerError::FetchError => write!( + f, + "Error occured when trying to fetch data from an RPC node" + ), + MP2PlannerError::UpdateTreeError(s) => write!( + f, + "Error occured when working with the update Tree: {{ inner: {} }}", + s + ), + MP2PlannerError::EthError(e) => write!( + f, + "Error occured in call from mp2_common::eth function {{ inner: {:?} }}", + e + ), + MP2PlannerError::ProvingError(s) => { + write!(f, "Error while proving, extra message: {}", s) + } + } + } +} + +impl From for MP2PlannerError { + fn from(value: MP2EthError) -> Self { + match value { + MP2EthError::FetchError => MP2PlannerError::FetchError, + _ => MP2PlannerError::EthError(value), + } + } +} + /// Trait that is implemented for all data that we can provably extract. pub trait Extractable { fn create_update_tree( @@ -21,7 +74,7 @@ pub trait Extractable { contract: Address, epoch: u64, provider: &RootProvider, - ) -> impl Future>>; + ) -> impl Future, MP2PlannerError>>; fn prove_value_extraction( &self, @@ -29,11 +82,7 @@ pub trait Extractable { epoch: u64, pp: &PublicParameters<512, MAX_COLUMNS>, provider: &RootProvider, - ) -> impl Future>> - where - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:; + ) -> impl Future, MP2PlannerError>>; } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] @@ -57,15 +106,13 @@ impl ProofData { impl Extractable for EventLogInfo -where - [(); 7 - 2 - NO_TOPICS - MAX_DATA_WORDS]:, { async fn create_update_tree( &self, contract: Address, epoch: u64, provider: &RootProvider, - ) -> Result> { + ) -> Result, MP2PlannerError> { let query = ReceiptQuery:: { contract, event: *self, @@ -83,18 +130,13 @@ where Ok(UpdateTree::::from_paths(key_paths, epoch as i64)) } - async fn prove_value_extraction( + async fn prove_value_extraction( &self, contract: Address, epoch: u64, - pp: &PublicParameters<512, MAX_COLUMNS>, + pp: &PublicParameters<512, MAX_EXTRACTED_COLUMNS>, provider: &RootProvider, - ) -> Result> - where - [(); MAX_COLUMNS - 2]:, - [(); MAX_COLUMNS - 1]:, - [(); MAX_COLUMNS - 0]:, - { + ) -> Result, MP2PlannerError> { let query = ReceiptQuery:: { contract, event: *self, @@ -124,9 +166,9 @@ where Ok(node_key) }) - .collect::>>() + .collect::, MP2PlannerError>>() }) - .collect::>>>()?; + .collect::>, MP2PlannerError>>()?; let update_tree = UpdateTree::::from_paths(key_paths, epoch as i64); @@ -135,66 +177,82 @@ where while let Some(Next::Ready(work_plan_item)) = update_plan.next() { let node_type = data_store .get(work_plan_item.k()) - .ok_or(anyhow::anyhow!( + .ok_or(MP2PlannerError::UpdateTreeError(format!( "No ProofData found for key: {:?}", work_plan_item.k() - ))? + )))? .node_type; - let update_tree_node = - update_tree - .get_node(work_plan_item.k()) - .ok_or(anyhow::anyhow!( - "No UpdateTreeNode found for key: {:?}", - work_plan_item.k() - ))?; + let update_tree_node = update_tree.get_node(work_plan_item.k()).ok_or( + MP2PlannerError::UpdateTreeError(format!( + "No UpdateTreeNode found for key: {:?}", + work_plan_item.k(), + )), + )?; match node_type { NodeType::Leaf => { - let proof_data = - data_store - .get_mut(work_plan_item.k()) - .ok_or(anyhow::anyhow!( - "No ProofData found for key: {:?}", - work_plan_item.k() - ))?; + let proof_data = data_store.get_mut(work_plan_item.k()).ok_or( + MP2PlannerError::UpdateTreeError(format!( + "No ProofData found for key: {:?}", + work_plan_item.k() + )), + )?; let input = CircuitInput::new_receipt_leaf( &proof_data.node, proof_data.tx_index.unwrap(), self, ); - let proof = generate_proof(pp, input)?; + let proof = generate_proof(pp, input).map_err(|_| { + MP2PlannerError::ProvingError( + "Error calling generate proof API".to_string(), + ) + })?; proof_data.proof = Some(proof); - update_plan.done(&work_plan_item)?; + update_plan.done(&work_plan_item).map_err(|_| { + MP2PlannerError::UpdateTreeError( + "Could not mark work plan item as done".to_string(), + ) + })?; } NodeType::Extension => { let child_key = update_tree.get_child_keys(update_tree_node); if child_key.len() != 1 { - return Err(anyhow::anyhow!("When proving extension node had {} many child keys when we should only have 1", child_key.len())); + return Err(MP2PlannerError::ProvingError(format!( + "Expected nodes child keys to have length 1, actual length: {}", + child_key.len() + ))); } let child_proof = data_store .get(&child_key[0]) - .ok_or(anyhow::anyhow!( + .ok_or(MP2PlannerError::UpdateTreeError(format!( "Extension node child had no proof data for key: {:?}", child_key[0] - ))? + )))? .clone(); - let proof_data = - data_store - .get_mut(work_plan_item.k()) - .ok_or(anyhow::anyhow!( - "No ProofData found for key: {:?}", - work_plan_item.k() - ))?; + let proof_data = data_store.get_mut(work_plan_item.k()).ok_or( + MP2PlannerError::UpdateTreeError(format!( + "No ProofData found for key: {:?}", + work_plan_item.k() + )), + )?; let input = CircuitInput::new_extension( proof_data.node.clone(), - child_proof.proof.ok_or(anyhow::anyhow!( - "Extension node child proof was a None value" + child_proof.proof.ok_or(MP2PlannerError::UpdateTreeError( + "Extension node child proof was a None value".to_string(), ))?, ); - let proof = generate_proof(pp, input)?; + let proof = generate_proof(pp, input).map_err(|_| { + MP2PlannerError::ProvingError( + "Error calling generate proof API".to_string(), + ) + })?; proof_data.proof = Some(proof); - update_plan.done(&work_plan_item)?; + update_plan.done(&work_plan_item).map_err(|_| { + MP2PlannerError::UpdateTreeError( + "Could not mark work plan item as done".to_string(), + ) + })?; } NodeType::Branch => { let child_keys = update_tree.get_child_keys(update_tree_node); @@ -203,38 +261,49 @@ where .map(|key| { data_store .get(key) - .ok_or(anyhow::anyhow!( + .ok_or(MP2PlannerError::UpdateTreeError(format!( "Branch child data could not be found for key: {:?}", key - ))? + )))? .clone() .proof - .ok_or(anyhow::anyhow!("No proof found in brnach node child")) + .ok_or(MP2PlannerError::UpdateTreeError( + "No proof found in brnach node child".to_string(), + )) }) - .collect::>>>()?; - let proof_data = - data_store - .get_mut(work_plan_item.k()) - .ok_or(anyhow::anyhow!( - "No ProofData found for key: {:?}", - work_plan_item.k() - ))?; + .collect::>, MP2PlannerError>>()?; + let proof_data = data_store.get_mut(work_plan_item.k()).ok_or( + MP2PlannerError::UpdateTreeError(format!( + "No ProofData found for key: {:?}", + work_plan_item.k() + )), + )?; let input = CircuitInput::new_branch(proof_data.node.clone(), child_proofs); - let proof = generate_proof(pp, input)?; + let proof = generate_proof(pp, input).map_err(|_| { + MP2PlannerError::ProvingError( + "Error calling generate proof API".to_string(), + ) + })?; proof_data.proof = Some(proof); - update_plan.done(&work_plan_item)?; + update_plan.done(&work_plan_item).map_err(|_| { + MP2PlannerError::UpdateTreeError( + "Could not mark work plan item as done".to_string(), + ) + })?; } } } let final_data = data_store .get(update_tree.root()) - .ok_or(anyhow::anyhow!("No data for root of update tree found"))? + .ok_or(MP2PlannerError::UpdateTreeError( + "No data for root of update tree found".to_string(), + ))? .clone(); - final_data - .proof - .ok_or(anyhow::anyhow!("No proof stored for final data")) + final_data.proof.ok_or(MP2PlannerError::UpdateTreeError( + "No proof stored for final data".to_string(), + )) } } @@ -308,7 +377,7 @@ pub mod tests { event: event_info, }; - let metadata = TableMetadata::<7, 2>::from(event_info); + let metadata = TableMetadata::from(event_info); let metadata_digest = metadata.digest(); diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index a89dff2a7..82166dad8 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -26,7 +26,13 @@ use plonky2::{ plonk::config::Hasher, }; use rand::{thread_rng, Rng}; -use ryhope::storage::RoEpochKvStorage; +use ryhope::{ + storage::{ + pgsql::{SqlServerConnection, SqlStorageSettings}, + RoEpochKvStorage, + }, + InitSettings, +}; use crate::common::{ bindings::eventemitter::EventEmitter::{self, EventEmitterInstance}, @@ -43,10 +49,10 @@ use crate::common::{ TableIndexing, }, proof_storage::{ProofKey, ProofStorage}, - rowtree::SecondaryIndexCell, + rowtree::{MerkleRowTree, SecondaryIndexCell}, table::{ - CellsUpdate, IndexType, IndexUpdate, Table, TableColumn, TableColumns, TableRowUniqueID, - TreeRowUpdate, TreeUpdateType, + row_table_name, CellsUpdate, IndexType, IndexUpdate, Table, TableColumn, TableColumns, + TableRowUniqueID, TreeRowUpdate, TreeUpdateType, }, TableInfo, TestContext, }; @@ -736,20 +742,19 @@ impl TableIndexing { // If we are dealing with receipts we need to remove everything already in the row tree let bn = ctx.block_number().await as BlockPrimaryIndex; - let table_row_updates = if let ChangeType::Receipt(..) = ut { - let current_row_epoch = self.table.row.current_epoch(); - let current_row_keys = self - .table - .row - .keys_at(current_row_epoch) - .await - .into_iter() - .map(TableRowUpdate::::Deletion) - .collect::>(); - [current_row_keys, table_row_updates].concat() - } else { - table_row_updates - }; + if let ChangeType::Receipt(..) = ut { + let db_url = + std::env::var("DB_URL").unwrap_or("host=localhost dbname=storage".to_string()); + self.table.row = MerkleRowTree::new( + InitSettings::MustExist, + SqlStorageSettings { + table: row_table_name(&self.table.public_name), + source: SqlServerConnection::NewConnection(db_url.clone()), + }, + ) + .await + .unwrap(); + } log::info!("Applying follow up updates to contract done - now at block {bn}",); // we first run the initial preprocessing and db creation. diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index e6e1eeb2c..c5f1b15c2 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -685,11 +685,8 @@ pub trait ReceiptExtractionArgs: proof_infos: &[ReceiptProofInfo], event: &EventLogInfo<{ Self::NO_TOPICS }, { Self::MAX_DATA_WORDS }>, block: PrimaryIndex, - ) -> Vec> - where - [(); 7 - 2 - Self::NO_TOPICS - Self::MAX_DATA_WORDS]:, - { - let metadata = TableMetadata::<7, 2>::from(*event); + ) -> Vec> { + let metadata = TableMetadata::from(*event); let (_, row_id) = metadata.input_value_digest(&[&[0u8; 32]; 2]); let input_columns_ids = metadata @@ -963,7 +960,7 @@ where } fn metadata_hash(&self, _contract_address: Address, _chain_id: u64) -> MetadataHash { - let table_metadata = TableMetadata::<7, 2>::from(self.get_event()); + let table_metadata = TableMetadata::from(self.get_event()); let digest = table_metadata.digest(); combine_digest_and_block(digest) } diff --git a/mp2-v1/tests/common/table.rs b/mp2-v1/tests/common/table.rs index d6edf9e82..ac60547a6 100644 --- a/mp2-v1/tests/common/table.rs +++ b/mp2-v1/tests/common/table.rs @@ -194,7 +194,7 @@ pub struct Table { pub(crate) db_pool: DBPool, } -fn row_table_name(name: &str) -> String { +pub(crate) fn row_table_name(name: &str) -> String { format!("row_{}", name) } fn index_table_name(name: &str) -> String { From 3783c4f8a175258d2a2e789c1b9e905e6d9ee8af Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 29 Jan 2025 17:44:05 +0100 Subject: [PATCH 35/47] Added empty block tree proof --- mp2-common/src/u256.rs | 20 +- mp2-v1/tests/common/cases/indexing.rs | 46 ++- mp2-v1/tests/common/cases/slot_info.rs | 3 +- mp2-v1/tests/common/cases/table_source.rs | 6 +- mp2-v1/tests/integrated_tests.rs | 12 +- verifiable-db/src/block_tree/api.rs | 51 +++- verifiable-db/src/block_tree/empty.rs | 283 ++++++++++++++++++ verifiable-db/src/block_tree/mod.rs | 1 + .../universal_circuit/basic_operation.rs | 2 +- .../src/query/universal_circuit/cells.rs | 2 +- .../output_with_aggregation.rs | 2 +- .../src/revelation/placeholders_check.rs | 2 +- 12 files changed, 382 insertions(+), 48 deletions(-) create mode 100644 verifiable-db/src/block_tree/empty.rs diff --git a/mp2-common/src/u256.rs b/mp2-common/src/u256.rs index 677388588..685693908 100644 --- a/mp2-common/src/u256.rs +++ b/mp2-common/src/u256.rs @@ -60,7 +60,7 @@ pub fn is_less_than_or_equal_to_u256_arr(left: &[U256], right: &[U256]) -> (bool } /// Circuit representation of u256 -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, Copy)] pub struct UInt256Target([U32Target; NUM_LIMBS]); impl PartialEq for UInt256Target { @@ -231,7 +231,7 @@ impl, const D: usize> CircuitBuilderU256 } fn add_virtual_u256(&mut self) -> UInt256Target { - self.add_virtual_u256_arr::<1>()[0].clone() + self.add_virtual_u256_arr::<1>()[0] } fn add_virtual_u256_arr(&mut self) -> [UInt256Target; N] { @@ -530,8 +530,8 @@ impl, const D: usize> CircuitBuilderU256 ) -> UInt256Target { // first check if `cond` is a constant match self.target_as_constant(cond.target) { - Some(val) if val == F::ZERO => return right.clone(), - Some(val) if val == F::ONE => return left.clone(), + Some(val) if val == F::ZERO => return *right, + Some(val) if val == F::ONE => return *left, _ => (), }; let limbs = create_array(|i| { @@ -739,10 +739,10 @@ impl UInt256Target { let quotient = b.add_virtual_u256(); let remainder = b.add_virtual_u256(); b.add_simple_generator(UInt256DivGenerator { - dividend: self.clone(), - divisor: other.clone(), - quotient: quotient.clone(), - remainder: remainder.clone(), + dividend: *self, + divisor: *other, + quotient, + remainder, is_div, }); // enforce that remainder < other, if other != 0 and is_div == true; @@ -771,9 +771,9 @@ impl UInt256Target { // otherwise, prod = quotient*other, as we need to later check that quotient*other + remainder == self let mul_input = if let Some(val) = b.target_as_constant(is_div.target) { if val == F::ONE { - quotient.clone() + quotient } else { - self.clone() + *self } } else { b.select_u256(is_div, "ient, self) diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index 82166dad8..62fc455b1 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -26,13 +26,7 @@ use plonky2::{ plonk::config::Hasher, }; use rand::{thread_rng, Rng}; -use ryhope::{ - storage::{ - pgsql::{SqlServerConnection, SqlStorageSettings}, - RoEpochKvStorage, - }, - InitSettings, -}; +use ryhope::storage::RoEpochKvStorage; use crate::common::{ bindings::eventemitter::EventEmitter::{self, EventEmitterInstance}, @@ -49,10 +43,10 @@ use crate::common::{ TableIndexing, }, proof_storage::{ProofKey, ProofStorage}, - rowtree::{MerkleRowTree, SecondaryIndexCell}, + rowtree::SecondaryIndexCell, table::{ - row_table_name, CellsUpdate, IndexType, IndexUpdate, Table, TableColumn, TableColumns, - TableRowUniqueID, TreeRowUpdate, TreeUpdateType, + CellsUpdate, IndexType, IndexUpdate, Table, TableColumn, TableColumns, TableRowUniqueID, + TreeRowUpdate, TreeUpdateType, }, TableInfo, TestContext, }; @@ -125,7 +119,7 @@ fn single_value_slot_inputs() -> Vec { slot_inputs } -pub(crate) const TX_INDEX_COLUMN: &str = "tx index"; +pub(crate) const TX_INDEX_COLUMN: &str = "tx_index"; impl TableIndexing { pub(crate) async fn merge_table_test_case( @@ -703,7 +697,7 @@ impl TableIndexing { .await; Ok(( TableIndexing:: { - value_column: "".to_string(), + value_column: table.columns.rest[0].name.clone(), source, table, contract, @@ -739,22 +733,24 @@ impl TableIndexing { if table_row_updates.is_empty() { continue; } + // If we are dealing with receipts we need to remove everything already in the row tree let bn = ctx.block_number().await as BlockPrimaryIndex; - if let ChangeType::Receipt(..) = ut { - let db_url = - std::env::var("DB_URL").unwrap_or("host=localhost dbname=storage".to_string()); - self.table.row = MerkleRowTree::new( - InitSettings::MustExist, - SqlStorageSettings { - table: row_table_name(&self.table.public_name), - source: SqlServerConnection::NewConnection(db_url.clone()), - }, - ) - .await - .unwrap(); - } + let table_row_updates = if let ChangeType::Receipt(..) = ut { + let current_row_epoch = self.table.row.current_epoch(); + let current_row_keys = self + .table + .row + .keys_at(current_row_epoch) + .await + .into_iter() + .map(TableRowUpdate::::Deletion) + .collect::>(); + [current_row_keys, table_row_updates].concat() + } else { + table_row_updates + }; log::info!("Applying follow up updates to contract done - now at block {bn}",); // we first run the initial preprocessing and db creation. diff --git a/mp2-v1/tests/common/cases/slot_info.rs b/mp2-v1/tests/common/cases/slot_info.rs index 43eab11ba..1e099df05 100644 --- a/mp2-v1/tests/common/cases/slot_info.rs +++ b/mp2-v1/tests/common/cases/slot_info.rs @@ -497,7 +497,8 @@ impl StorageSlotValue for U256 { fn sample_u256() -> U256 { let rng = &mut thread_rng(); - U256::from_limbs(rng.gen()) + let sampled: u64 = rng.gen(); + U256::from(sampled) } fn sample_u128() -> u128 { diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index c5f1b15c2..53b139df3 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -828,15 +828,11 @@ impl TableSource for R where [(); ::NO_TOPICS]:, [(); ::MAX_DATA_WORDS]:, - [(); 7 - - 2 - - ::NO_TOPICS - - ::MAX_DATA_WORDS]:, { type Metadata = EventLogInfo<{ R::NO_TOPICS }, { R::MAX_DATA_WORDS }>; fn can_query(&self) -> bool { - false + true } fn get_data(&self) -> Self::Metadata { diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index 022b2edf1..43db34501 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -75,6 +75,7 @@ const PROOF_STORE_FILE: &str = "test_proofs.store"; const MAPPING_TABLE_INFO_FILE: &str = "mapping_column_info.json"; const MAPPING_OF_MAPPING_TABLE_INFO_FILE: &str = "mapping_of_mapping_column_info.json"; const MERGE_TABLE_INFO_FILE: &str = "merge_column_info.json"; +const RECEIPT_TABLE_INFO_FILE: &str = "receipt_column_info.json"; #[test(tokio::test)] #[ignore] @@ -97,7 +98,7 @@ async fn integrated_indexing() -> Result<()> { let changes = vec![ ChangeType::Receipt(1, 10), ChangeType::Receipt(10, 1), - ChangeType::Receipt(0, 10), + ChangeType::Receipt(5, 5), ]; receipt.run(&mut ctx, genesis, changes.clone()).await?; @@ -176,6 +177,7 @@ async fn integrated_indexing() -> Result<()> { MAPPING_OF_MAPPING_TABLE_INFO_FILE, mapping_of_struct_mappings.table_info(), )?; + write_table_info(RECEIPT_TABLE_INFO_FILE, receipt.table_info())?; Ok(()) } @@ -225,6 +227,14 @@ async fn integrated_querying_mapping_of_mappings_table() -> Result<()> { let table_info = read_table_info(MAPPING_OF_MAPPING_TABLE_INFO_FILE)?; integrated_querying(table_info).await } +#[test(tokio::test)] +#[ignore] +async fn integrated_querying_receipt_table() -> Result<()> { + let _ = env_logger::try_init(); + info!("Running QUERY test for merged table"); + let table_info: TableInfo> = read_table_info(RECEIPT_TABLE_INFO_FILE)?; + integrated_querying(table_info).await +} fn table_info_path(f: &str) -> PathBuf { let cfg = TestContextConfig::init_from_env() diff --git a/verifiable-db/src/block_tree/api.rs b/verifiable-db/src/block_tree/api.rs index e8c7d33dd..642426912 100644 --- a/verifiable-db/src/block_tree/api.rs +++ b/verifiable-db/src/block_tree/api.rs @@ -3,6 +3,7 @@ use crate::extraction::{ExtractionPI, ExtractionPIWrap}; use super::{ + empty::{EmptyCircuit, RecursiveEmptyInput, RecursiveEmptyWires}, leaf::{LeafCircuit, RecursiveLeafInput, RecursiveLeafWires}, membership::{MembershipCircuit, MembershipWires}, parent::{ParentCircuit, RecursiveParentInput, RecursiveParentWires}, @@ -40,6 +41,10 @@ pub enum CircuitInput { witness: MembershipCircuit, right_child_proof: Vec, }, + Empty { + witness: EmptyCircuit, + extraction_proof: Vec, + }, } impl CircuitInput { @@ -104,6 +109,22 @@ impl CircuitInput { right_child_proof, } } + + /// Create a circuit input for proving an "empty" node. So we prove a node that contained no values + /// so it isn't added to the tree. Used to not break the IVC proof. + pub fn new_empty( + index_identifier: u64, + tree_hash: &HashOutput, + extraction_proof: Vec, + ) -> Self { + CircuitInput::Empty { + witness: EmptyCircuit { + index_identifier: F::from_canonical_u64(index_identifier), + h_old: HashOut::::from_bytes(tree_hash.into()), + }, + extraction_proof, + } + } } /// Main struct holding the different circuit parameters for each of the circuits defined here. @@ -116,14 +137,15 @@ where leaf: CircuitWithUniversalVerifier>, parent: CircuitWithUniversalVerifier>, membership: CircuitWithUniversalVerifier, + empty: CircuitWithUniversalVerifier>, set: RecursiveCircuits, } const BLOCK_INDEX_IO_LEN: usize = PublicInputs::::TOTAL_LEN; /// Number of circuits in the set -/// 1 leaf + 1 parent + 1 membership -const CIRCUIT_SET_SIZE: usize = 3; +/// 1 leaf + 1 parent + 1 membership + 1 empty +const CIRCUIT_SET_SIZE: usize = 4; impl PublicParameters where @@ -146,12 +168,14 @@ where let leaf = builder.build_circuit((extraction_set.clone(), rows_tree_set.clone())); let parent = builder.build_circuit((extraction_set.clone(), rows_tree_set.clone())); let membership = builder.build_circuit(()); + let empty = builder.build_circuit(extraction_set.clone()); // Build the circuit set. let circuits = vec![ prepare_recursive_circuit_for_circuit_set(&leaf), prepare_recursive_circuit_for_circuit_set(&parent), prepare_recursive_circuit_for_circuit_set(&membership), + prepare_recursive_circuit_for_circuit_set(&empty), ]; let set = RecursiveCircuits::::new(circuits); @@ -159,6 +183,7 @@ where leaf, parent, membership, + empty, set, } } @@ -204,6 +229,10 @@ where witness, right_child_proof, } => self.generate_membership_proof(witness, right_child_proof), + CircuitInput::Empty { + witness, + extraction_proof, + } => self.genererate_empty_proof(witness, extraction_proof, extraction_set), } } @@ -262,6 +291,24 @@ where .generate_proof(&self.membership, [child_proof], [&child_vk], witness)?; ProofWithVK::from((proof, self.membership.circuit_data().verifier_only.clone())).serialize() } + + fn genererate_empty_proof( + &self, + witness: EmptyCircuit, + extraction_proof: Vec, + extraction_set: &RecursiveCircuits, + ) -> Result> { + let extraction_proof = ProofWithVK::deserialize(&extraction_proof)?; + + let empty = RecursiveEmptyInput { + witness, + extraction_proof, + extraction_set: extraction_set.clone(), + }; + + let proof = self.set.generate_proof(&self.empty, [], [], empty)?; + ProofWithVK::from((proof, self.empty.circuit_data().verifier_only.clone())).serialize() + } } #[cfg(test)] diff --git a/verifiable-db/src/block_tree/empty.rs b/verifiable-db/src/block_tree/empty.rs new file mode 100644 index 000000000..56dc2f133 --- /dev/null +++ b/verifiable-db/src/block_tree/empty.rs @@ -0,0 +1,283 @@ +//! Module with the circuit used when we don't update the Block tree. For instance in the case of Receipts +//! if there are no relevent event logs in a block we still have to advance the IVC proof + +use super::public_inputs::PublicInputs; +use crate::extraction::{ExtractionPI, ExtractionPIWrap}; + +use anyhow::Result; +use mp2_common::{ + default_config, + poseidon::H, + proof::ProofWithVK, + public_inputs::PublicInputCommon, + serialization::{deserialize, serialize}, + types::CBuilder, + u256::CircuitBuilderU256, + utils::ToTargets, + CHasher, C, D, F, +}; +use plonky2::{ + hash::hash_types::{HashOut, HashOutTarget}, + iop::{ + target::Target, + witness::{PartialWitness, WitnessWrite}, + }, + plonk::{circuit_builder::CircuitBuilder, config::Hasher, proof::ProofWithPublicInputsTarget}, +}; + +use plonky2_ecgfp5::gadgets::curve::CircuitBuilderEcGFp5; +use recursion_framework::{ + circuit_builder::CircuitLogicWires, + framework::{ + RecursiveCircuits, RecursiveCircuitsVerifierGagdet, RecursiveCircuitsVerifierTarget, + }, +}; +use serde::{Deserialize, Serialize}; +use std::{iter, marker::PhantomData}; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EmptyWires { + /// Identifier of the block number column + pub(crate) index_identifier: Target, + /// The old root of the tree, + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] + pub(crate) h_old: HashOutTarget, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EmptyCircuit { + /// Identifier of the block number column + pub(crate) index_identifier: F, + /// The old root of the tree, + pub(crate) h_old: HashOut, +} + +impl EmptyCircuit { + fn build(b: &mut CBuilder, extraction_pi: &[Target]) -> EmptyWires { + let zero_256 = b.zero_u256(); + let curve_zero = b.curve_zero(); + let index_identifier = b.add_virtual_target(); + + let extraction_pi = E::PI::from_slice(extraction_pi); + + let block_number = extraction_pi.primary_index_value(); + + // Compute the hash of table metadata, to be exposed as public input to prove to + // the verifier that we extracted the correct storage slots and we place the data + // in the expected columns of the constructed tree; we add also the identifier + // of the block number column to the table metadata. + // metadata_hash = H(extraction_proof.DM || block_id) + let inputs = extraction_pi + .metadata_set_digest() + .to_targets() + .iter() + .cloned() + .chain(iter::once(index_identifier)) + .collect(); + let metadata_hash = b.hash_n_to_hash_no_pad::(inputs).elements; + + let h_old = b.add_virtual_hash(); + + // Register the public inputs. + PublicInputs::new( + &h_old.to_targets(), + &h_old.to_targets(), + &zero_256.to_targets(), + &zero_256.to_targets(), + &block_number, + &extraction_pi.commitment(), + &extraction_pi.prev_commitment(), + &metadata_hash, + &curve_zero.to_targets(), + ) + .register(b); + + EmptyWires { + index_identifier, + h_old, + } + } + + /// Assign the wires. + fn assign(&self, pw: &mut PartialWitness, wires: &EmptyWires) { + pw.set_target(wires.index_identifier, self.index_identifier); + pw.set_hash_target(wires.h_old, self.h_old); + } +} + +#[derive(Clone, Serialize, Deserialize)] +pub(crate) struct RecursiveEmptyWires { + empty_wires: EmptyWires, + extraction_verifier: RecursiveCircuitsVerifierTarget, + _e: PhantomData, +} + +#[derive(Clone, Debug)] +pub(crate) struct RecursiveEmptyInput { + pub(crate) witness: EmptyCircuit, + pub(crate) extraction_proof: ProofWithVK, + pub(crate) extraction_set: RecursiveCircuits, +} + +impl CircuitLogicWires for RecursiveEmptyWires +where + [(); E::PI::TOTAL_LEN]:, + [(); >::HASH_SIZE]:, +{ + // Final extraction circuit set + rows tree circuit set + type CircuitBuilderParams = RecursiveCircuits; + + type Inputs = RecursiveEmptyInput; + + const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; + + fn circuit_logic( + builder: &mut CircuitBuilder, + _verified_proofs: [&ProofWithPublicInputsTarget; 0], + builder_parameters: Self::CircuitBuilderParams, + ) -> Self { + let extraction_verifier = + RecursiveCircuitsVerifierGagdet::::new( + default_config(), + &builder_parameters, + ); + let extraction_verifier = extraction_verifier.verify_proof_in_circuit_set(builder); + let extraction_pi = + extraction_verifier.get_public_input_targets::(); + + let empty_wires = EmptyCircuit::build::(builder, extraction_pi); + + RecursiveEmptyWires { + empty_wires, + extraction_verifier, + _e: PhantomData, + } + } + + fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> Result<()> { + inputs.witness.assign(pw, &self.empty_wires); + + let (proof, vd) = inputs.extraction_proof.into(); + self.extraction_verifier + .set_target(pw, &inputs.extraction_set, &proof, &vd) + } +} + +#[cfg(test)] +mod tests { + use crate::block_tree::{ + leaf::tests::compute_expected_hash, + tests::{TestPIField, TestPITargets}, + }; + + use super::{super::tests::random_extraction_pi, *}; + use alloy::primitives::U256; + use mp2_common::{ + digest::Digest, + utils::{Fieldable, ToFields}, + }; + use mp2_test::{ + circuit::{run_circuit, UserCircuit}, + utils::random_vector, + }; + use plonky2::{field::types::Field, hash::hash_types::NUM_HASH_OUT_ELTS}; + use rand::{thread_rng, Rng}; + + #[derive(Clone, Debug)] + struct TestEmptyCircuit<'a> { + c: EmptyCircuit, + extraction_pi: &'a [F], + } + + impl UserCircuit for TestEmptyCircuit<'_> { + // Parent node wires + extraction public inputs + type Wires = (EmptyWires, Vec); + + fn build(b: &mut CBuilder) -> Self::Wires { + let extraction_pi = b.add_virtual_targets(TestPITargets::TOTAL_LEN); + + let empty_wires = EmptyCircuit::build::(b, &extraction_pi); + + (empty_wires, extraction_pi) + } + + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + self.c.assign(pw, &wires.0); + + assert_eq!(wires.1.len(), TestPITargets::TOTAL_LEN); + pw.set_target_arr(&wires.1, self.extraction_pi); + } + } + + #[test] + fn test_block_index_parent_circuit() { + test_empty_circuit(); + } + + fn test_empty_circuit() { + let mut rng = thread_rng(); + + let index_identifier = rng.gen::().to_field(); + + let h_old = HashOut::from_vec(random_vector::(NUM_HASH_OUT_ELTS).to_fields()); + + let extraction_pi = + &random_extraction_pi(&mut rng, U256::from(1), &Digest::NEUTRAL.to_fields(), false); + + let test_circuit = TestEmptyCircuit { + c: EmptyCircuit { + index_identifier, + h_old, + }, + extraction_pi, + }; + + let proof = run_circuit::(test_circuit); + let pi = PublicInputs::from_slice(&proof.public_inputs); + let extraction_pi = TestPIField::from_slice(extraction_pi); + + let block_number = extraction_pi.block_number_raw(); + + // Check old hash + { + assert_eq!(pi.h_old, h_old.to_fields()); + } + // Check new hash + { + assert_eq!(pi.h_new, h_old.to_fields()); + } + // Check minimum block number + { + assert_eq!(pi.min, [F::ZERO; 8]); + } + // Check maximum block number + { + assert_eq!(pi.max, [F::ZERO; 8]); + } + // Check block number + { + assert_eq!(pi.block_number, block_number); + } + // Check block hash + { + assert_eq!(pi.block_hash, extraction_pi.block_hash_raw()); + } + // Check previous block hash + { + assert_eq!(pi.prev_block_hash, extraction_pi.prev_block_hash_raw()); + } + // Check metadata hash + { + let exp_hash = compute_expected_hash(&extraction_pi, index_identifier); + + assert_eq!(pi.metadata_hash, exp_hash.elements); + } + // Check new node digest + { + assert_eq!( + pi.new_value_set_digest_point(), + Digest::NEUTRAL.to_weierstrass() + ); + } + } +} diff --git a/verifiable-db/src/block_tree/mod.rs b/verifiable-db/src/block_tree/mod.rs index 4c07d6462..6070aa4a4 100644 --- a/verifiable-db/src/block_tree/mod.rs +++ b/verifiable-db/src/block_tree/mod.rs @@ -1,4 +1,5 @@ mod api; +mod empty; mod leaf; mod membership; mod parent; diff --git a/verifiable-db/src/query/universal_circuit/basic_operation.rs b/verifiable-db/src/query/universal_circuit/basic_operation.rs index 793a062cd..cd3a38afa 100644 --- a/verifiable-db/src/query/universal_circuit/basic_operation.rs +++ b/verifiable-db/src/query/universal_circuit/basic_operation.rs @@ -234,7 +234,7 @@ impl BasicOperationInputs { let op_selector = b.add_virtual_target(); let input_wires = BasicOperationInputWires { - constant_operand: constant_operand.clone(), + constant_operand: *constant_operand, placeholder_values: placeholder_values.to_vec().try_into().unwrap(), placeholder_ids, first_input_selector, diff --git a/verifiable-db/src/query/universal_circuit/cells.rs b/verifiable-db/src/query/universal_circuit/cells.rs index f57e5b04f..69886c3c9 100644 --- a/verifiable-db/src/query/universal_circuit/cells.rs +++ b/verifiable-db/src/query/universal_circuit/cells.rs @@ -140,7 +140,7 @@ mod tests { // Compute the root hash of cells tree. let (input_ids, input_values): (Vec<_>, Vec<_>) = - input_cells.iter().map(|c| (c.id, c.value.clone())).unzip(); + input_cells.iter().map(|c| (c.id, c.value)).unzip(); let real_root_hash = build_cells_tree(b, &input_values, &input_ids, &is_real_cell); // Check the output root hash. diff --git a/verifiable-db/src/query/universal_circuit/output_with_aggregation.rs b/verifiable-db/src/query/universal_circuit/output_with_aggregation.rs index cee481838..3b99a1069 100644 --- a/verifiable-db/src/query/universal_circuit/output_with_aggregation.rs +++ b/verifiable-db/src/query/universal_circuit/output_with_aggregation.rs @@ -87,7 +87,7 @@ impl OutputComponentValueWires for ValueWires Self::FirstT { - self.output_values[0].clone() + self.output_values[0] } fn other_output_values(&self) -> &[UInt256Target] { diff --git a/verifiable-db/src/revelation/placeholders_check.rs b/verifiable-db/src/revelation/placeholders_check.rs index cd53ce875..49b0bdc7f 100644 --- a/verifiable-db/src/revelation/placeholders_check.rs +++ b/verifiable-db/src/revelation/placeholders_check.rs @@ -380,7 +380,7 @@ pub(crate) fn check_placeholders( "random_access function cannot handle more than 64 elements" ); padded_placeholder_ids.resize(pad_len, placeholder_ids[0]); - padded_placeholder_values.resize(pad_len, placeholder_values[0].clone()); + padded_placeholder_values.resize(pad_len, placeholder_values[0]); let mut check_placeholder_pair = |id: &Target, value, pos| { // Check that the pair (id, value) is same as: From d7c6135a662f7e1e8468b1f03a7d006ba88b5009 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 30 Jan 2025 10:46:21 +0000 Subject: [PATCH 36/47] Integrated querying encodes 0 to hex string correctly now --- mp2-common/src/eth.rs | 2 +- .../common/cases/query/aggregated_queries.rs | 19 ++++++++++++++----- mp2-v1/tests/integrated_tests.rs | 2 +- 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 0fac6de3f..4b5f278ae 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -768,7 +768,7 @@ impl ReceiptQuery, block_util: &mut BlockUtil, diff --git a/mp2-v1/tests/common/cases/query/aggregated_queries.rs b/mp2-v1/tests/common/cases/query/aggregated_queries.rs index e0cf75396..9841b4ae1 100644 --- a/mp2-v1/tests/common/cases/query/aggregated_queries.rs +++ b/mp2-v1/tests/common/cases/query/aggregated_queries.rs @@ -459,7 +459,7 @@ pub(crate) async fn cook_query_secondary_index_nonexisting_placeholder, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; - let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); + let key_value = encode_hex(longest_key.value); info!( "Longest sequence is for key {longest_key:?} -> from block {:?} to {:?}, hex -> {}", min_block, max_block, key_value @@ -507,7 +507,7 @@ pub(crate) async fn cook_query_secondary_index_placeholder( info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; - let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); + let key_value = encode_hex(longest_key.value); info!( "Longest sequence is for key {longest_key:?} -> from block {:?} to {:?}, hex -> {}", min_block, max_block, key_value @@ -552,7 +552,7 @@ pub(crate) async fn cook_query_unique_secondary_index( info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; - let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); + let key_value = encode_hex(longest_key.value); info!( "Longest sequence is for key {longest_key:?} -> from block {:?} to {:?}, hex -> {}", min_block, max_block, key_value @@ -628,7 +628,7 @@ pub(crate) async fn cook_query_partial_block_range( info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, false).await?; - let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); + let key_value = encode_hex(longest_key.value); info!( "Longest sequence is for key {longest_key:?} -> from block {:?} to {:?}, hex -> {}", min_block, max_block, key_value @@ -696,7 +696,7 @@ pub(crate) async fn cook_query_non_matching_entries_some_blocks( info: &TableInfo, ) -> Result { let (longest_key, (min_block, max_block)) = find_longest_lived_key(table, true).await?; - let key_value = hex::encode(longest_key.value.to_be_bytes_trimmed_vec()); + let key_value = encode_hex(longest_key.value); info!( "Longest sequence is for key {longest_key:?} -> from block {:?} to {:?}, hex -> {}", min_block, max_block, key_value @@ -885,3 +885,12 @@ async fn check_correct_cells_tree( ); Ok(()) } + +/// Function for encoding [`U256`] values as hex strings which accounts for the value being zero +fn encode_hex(value: U256) -> String { + if value != U256::ZERO { + hex::encode(value.to_be_bytes_trimmed_vec()) + } else { + hex::encode([0]) + } +} diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index 43db34501..09956e67f 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -231,7 +231,7 @@ async fn integrated_querying_mapping_of_mappings_table() -> Result<()> { #[ignore] async fn integrated_querying_receipt_table() -> Result<()> { let _ = env_logger::try_init(); - info!("Running QUERY test for merged table"); + info!("Running QUERY test for receipt table"); let table_info: TableInfo> = read_table_info(RECEIPT_TABLE_INFO_FILE)?; integrated_querying(table_info).await } From 85dd6ed942e64f29b32e04593abbbef53bdeaa6c Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 30 Jan 2025 14:03:10 +0000 Subject: [PATCH 37/47] IQ debugging --- mp2-common/src/eth.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 4b5f278ae..d7d2590c2 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -789,6 +789,15 @@ impl ReceiptQuery, MP2EthError>>()?; + // // In the case when proofs is empty we just need to provide a proof for the root node + // if proofs.is_empty() { + // // Transaction index 0 should always be present + // let key = 0u64.rlp_bytes(); + + // // Get the proof but just use the first node of the proof + // let proof = block_util.receipts_trie.get_proof(&key[..])?; + + // } Ok(proofs) } } From da9660444725a2c68bce189d92a7f36f8910e8c5 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Fri, 31 Jan 2025 10:20:19 +0000 Subject: [PATCH 38/47] Rebased back on top of Generic Extraction --- mp2-v1/src/query/planner.rs | 4 ++-- mp2-v1/src/values_extraction/api.rs | 24 ++++++++----------- mp2-v1/src/values_extraction/mod.rs | 5 ++++ mp2-v1/tests/integrated_tests.rs | 3 ++- ryhope/src/tree/scapegoat.rs | 2 +- .../src/query/circuits/non_existence.rs | 2 +- verifiable-db/src/query/merkle_path.rs | 10 ++++---- verifiable-db/src/query/output_computation.rs | 4 ++-- .../row_chunk_gadgets/aggregate_chunks.rs | 8 +++---- .../row_chunk_gadgets/consecutive_rows.rs | 4 ++-- .../row_chunk_gadgets/row_process_gadget.rs | 2 +- .../universal_query_gadget.rs | 6 ++--- verifiable-db/src/query/utils.rs | 2 +- 13 files changed, 39 insertions(+), 37 deletions(-) diff --git a/mp2-v1/src/query/planner.rs b/mp2-v1/src/query/planner.rs index 7426da087..a01fe91e7 100644 --- a/mp2-v1/src/query/planner.rs +++ b/mp2-v1/src/query/planner.rs @@ -217,7 +217,7 @@ pub trait TreeFetcher: Sized } else { // we don't found the right child node in the tree, which means that the // successor might be out of range, so we return None - return None; + None } } else { // find successor among the ancestors of current node: we go up in the path @@ -298,7 +298,7 @@ pub trait TreeFetcher: Sized } else { // we don't found the left child node in the tree, which means that the // predecessor might be out of range, so we return None - return None; + None } } else { // find predecessor among the ancestors of current node: we go up in the path diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index d2607e165..d7411f84b 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -12,7 +12,7 @@ use super::{ leaf_receipt::{ReceiptLeafCircuit, ReceiptLeafWires}, leaf_single::{LeafSingleCircuit, LeafSingleWires}, public_inputs::PublicInputs, - INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, + ColumnId, MappingKey, INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }; use crate::{api::InputNode, MAX_BRANCH_NODE_LEN, MAX_RECEIPT_COLUMNS}; use anyhow::{bail, ensure, Result}; @@ -86,7 +86,7 @@ where pub fn new_mapping_variable_leaf( node: Vec, slot: u8, - mapping_key: Vec, + mapping_key: MappingKey, key_id: u64, evm_word: u32, table_info: Vec, @@ -119,18 +119,18 @@ where // but are used in proving we are looking at the correct node. For instance mapping keys are used to calculate the position of a leaf node // that we need to extract from, but only the output of a keccak hash of some combination of them is included in the node, hence we feed them in as witness. let outer_input_column = - InputColumnInfo::new(&[slot], outer_key_id, OUTER_KEY_ID_PREFIX, 32); + InputColumnInfo::new(&[slot], outer_key_data.1, OUTER_KEY_ID_PREFIX, 32); let inner_input_column = - InputColumnInfo::new(&[slot], inner_key_id, INNER_KEY_ID_PREFIX, 32); + InputColumnInfo::new(&[slot], inner_key_data.1, INNER_KEY_ID_PREFIX, 32); let metadata = TableMetadata::new(&[outer_input_column, inner_input_column], &table_info); - let slot = MappingSlot::new(slot, outer_key); + let slot = MappingSlot::new(slot, outer_key_data.0); CircuitInput::LeafMappingOfMappings(LeafMappingOfMappingsCircuit { node, slot, - inner_key, + inner_key: inner_key_data.0, metadata, evm_word: evm_word as u8, }) @@ -863,10 +863,8 @@ mod tests { let encoded = test_circuit_input(CircuitInput::new_mapping_of_mappings_leaf( proof.last().unwrap().to_vec(), TEST_SLOT, - TEST_OUTER_KEY.to_vec(), - TEST_INNER_KEY.to_vec(), - outer_key_id, - inner_key_id, + (TEST_OUTER_KEY.to_vec(), outer_key_id), + (TEST_INNER_KEY.to_vec(), inner_key_id), TEST_EVM_WORD, table_info, )); @@ -1130,10 +1128,8 @@ mod tests { let circuit_input = CircuitInput::new_mapping_of_mappings_leaf( node, slot as u8, - outer_mapping_key, - inner_mapping_key, - key_ids[0], - key_ids[1], + (outer_mapping_key, key_ids[0]), + (inner_mapping_key, key_ids[1]), evm_word, table_info.to_vec(), ); diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index a201ba3c0..2c06dc88a 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -47,6 +47,11 @@ pub(crate) const OUTER_KEY_ID_PREFIX: &[u8] = b"\0OUT_KEY"; pub(crate) const BLOCK_ID_DST: &[u8] = b"BLOCK_NUMBER"; +/// Type for mapping keys +pub type MappingKey = Vec; +/// Type for column ID +pub type ColumnId = u64; + /// Storage slot information for generating the extraction proof #[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)] pub struct StorageSlotInfo { diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index 09956e67f..b98b2a6ef 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -224,7 +224,8 @@ async fn integrated_querying_merged_table() -> Result<()> { async fn integrated_querying_mapping_of_mappings_table() -> Result<()> { let _ = env_logger::try_init(); info!("Running QUERY test for merged table"); - let table_info = read_table_info(MAPPING_OF_MAPPING_TABLE_INFO_FILE)?; + let table_info: TableInfo> = + read_table_info(MAPPING_OF_MAPPING_TABLE_INFO_FILE)?; integrated_querying(table_info).await } #[test(tokio::test)] diff --git a/ryhope/src/tree/scapegoat.rs b/ryhope/src/tree/scapegoat.rs index b97165277..4ff0cea9c 100644 --- a/ryhope/src/tree/scapegoat.rs +++ b/ryhope/src/tree/scapegoat.rs @@ -312,7 +312,7 @@ impl Deserialize } } } else { - return Err(RyhopeError::fatal("the tree is empty")); + Err(RyhopeError::fatal("the tree is empty")) } } diff --git a/verifiable-db/src/query/circuits/non_existence.rs b/verifiable-db/src/query/circuits/non_existence.rs index 18cc90303..ebdd876b1 100644 --- a/verifiable-db/src/query/circuits/non_existence.rs +++ b/verifiable-db/src/query/circuits/non_existence.rs @@ -139,7 +139,7 @@ where // since they are exposed as public inputs let index_path = MerklePathWithNeighborsGadget::build( b, - index_node_value.clone(), + index_node_value, index_node_subtree_hash, primary_index, ); diff --git a/verifiable-db/src/query/merkle_path.rs b/verifiable-db/src/query/merkle_path.rs index 571cc8bb2..693088692 100644 --- a/verifiable-db/src/query/merkle_path.rs +++ b/verifiable-db/src/query/merkle_path.rs @@ -540,8 +540,8 @@ where embedded_tree_hash: end_node_tree_hash, child_hashes: [left_child_hash, right_child_hash], value: end_node_value, - min: end_node_info.node_min.clone(), - max: end_node_info.node_max.clone(), + min: end_node_info.node_min, + max: end_node_info.node_max, }; let end_node_hash = end_node.compute_node_hash(b, index_id); let (inputs, path) = MerklePathGadget::build_path(b, end_node_hash, index_id); @@ -665,8 +665,8 @@ where self.path_gadget.assign(pw, &wires.path_inputs); pw.set_u256_target_arr( &[ - wires.end_node_inputs.node_min.clone(), - wires.end_node_inputs.node_max.clone(), + wires.end_node_inputs.node_min, + wires.end_node_inputs.node_max, ], &[self.end_node_min, self.end_node_max], ); @@ -869,7 +869,7 @@ pub(crate) mod tests { let end_node_value = c.add_virtual_u256_unsafe(); let merkle_path_wires = MerklePathWithNeighborsGadget::build( c, - end_node_value.clone(), + end_node_value, end_node_tree_hash, index_id, ); diff --git a/verifiable-db/src/query/output_computation.rs b/verifiable-db/src/query/output_computation.rs index 70b5f232c..a58d7506b 100644 --- a/verifiable-db/src/query/output_computation.rs +++ b/verifiable-db/src/query/output_computation.rs @@ -96,8 +96,8 @@ where // which each field may be out of range of an Uint32 (to combine an Uint256). sum_value = b.select_u256(is_op_id, &u256_zero, &sum_value); } - let mut min_value = sum_value.clone(); - let mut max_value = sum_value.clone(); + let mut min_value = sum_value; + let mut max_value = sum_value; for p in outputs[1..].iter() { // Get the current proof value. let mut value = p.value_target_at_index(i); diff --git a/verifiable-db/src/query/row_chunk_gadgets/aggregate_chunks.rs b/verifiable-db/src/query/row_chunk_gadgets/aggregate_chunks.rs index 8cd1d1ec4..c38913687 100644 --- a/verifiable-db/src/query/row_chunk_gadgets/aggregate_chunks.rs +++ b/verifiable-db/src/query/row_chunk_gadgets/aggregate_chunks.rs @@ -196,25 +196,25 @@ mod tests { array::from_fn(|_| b.add_virtual_hash()); let left_boundary_row_path = MerklePathWithNeighborsGadget::build( b, - left_boundary_row_value.clone(), + left_boundary_row_value, left_boundary_row_subtree_hash, secondary_index_id, ); let left_boundary_index_path = MerklePathWithNeighborsGadget::build( b, - left_boundary_index_value.clone(), + left_boundary_index_value, left_boundary_row_path.root, primary_index_id, ); let right_boundary_row_path = MerklePathWithNeighborsGadget::build( b, - right_boundary_row_value.clone(), + right_boundary_row_value, right_boundary_row_subtree_hash, secondary_index_id, ); let right_boundary_index_path = MerklePathWithNeighborsGadget::build( b, - right_boundary_index_value.clone(), + right_boundary_index_value, right_boundary_row_path.root, primary_index_id, ); diff --git a/verifiable-db/src/query/row_chunk_gadgets/consecutive_rows.rs b/verifiable-db/src/query/row_chunk_gadgets/consecutive_rows.rs index 984f6828d..b1e545036 100644 --- a/verifiable-db/src/query/row_chunk_gadgets/consecutive_rows.rs +++ b/verifiable-db/src/query/row_chunk_gadgets/consecutive_rows.rs @@ -294,13 +294,13 @@ mod tests { let index_id = c.add_virtual_target(); let first_node_path = MerklePathWithNeighborsGadget::build( c, - first_node_value.clone(), + first_node_value, first_node_tree_hash, index_id, ); let second_node_path = MerklePathWithNeighborsGadget::build( c, - second_node_value.clone(), + second_node_value, second_node_tree_hash, index_id, ); diff --git a/verifiable-db/src/query/row_chunk_gadgets/row_process_gadget.rs b/verifiable-db/src/query/row_chunk_gadgets/row_process_gadget.rs index 30b9c84b6..d9d9bcb80 100644 --- a/verifiable-db/src/query/row_chunk_gadgets/row_process_gadget.rs +++ b/verifiable-db/src/query/row_chunk_gadgets/row_process_gadget.rs @@ -236,7 +236,7 @@ where let [primary_index_id, secondary_index_id] = array::from_fn(|i| hash_input_wires.column_extraction_wires.column_ids[i]); let [primary_index_value, secondary_index_value] = - array::from_fn(|i| value_wires.input_wires.column_values[i].clone()); + array::from_fn(|i| value_wires.input_wires.column_values[i]); let row_path = MerklePathWithNeighborsGadget::build( b, secondary_index_value, diff --git a/verifiable-db/src/query/universal_circuit/universal_query_gadget.rs b/verifiable-db/src/query/universal_circuit/universal_query_gadget.rs index 77991622b..08df6a889 100644 --- a/verifiable-db/src/query/universal_circuit/universal_query_gadget.rs +++ b/verifiable-db/src/query/universal_circuit/universal_query_gadget.rs @@ -763,8 +763,8 @@ where &output_component_wires.computational_hash(), ); - let min_secondary = min_query_secondary.get_bound_value().clone(); - let max_secondary = max_query_secondary.get_bound_value().clone(); + let min_secondary = *min_query_secondary.get_bound_value(); + let max_secondary = *max_query_secondary.get_bound_value(); let num_bound_overflows = QueryBoundTarget::num_overflows_for_query_bound_operations( b, &min_query_secondary, @@ -1043,7 +1043,7 @@ where if i == 0 { self.first_output.as_u256_target() } else { - self.other_outputs[i - 1].clone() + self.other_outputs[i - 1] } } diff --git a/verifiable-db/src/query/utils.rs b/verifiable-db/src/query/utils.rs index 1ee5e70db..873e9fd58 100644 --- a/verifiable-db/src/query/utils.rs +++ b/verifiable-db/src/query/utils.rs @@ -321,7 +321,7 @@ impl NodeInfoTarget { .into_iter() .for_each(|(target, value)| pw.set_hash_target(target, value)); pw.set_u256_target_arr( - &[self.min.clone(), self.max.clone(), self.value.clone()], + &[self.min, self.max, self.value], &[inputs.min, inputs.max, inputs.value], ); } From 1ccc21299de42dc330e9057120beacb5d7755e03 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Fri, 31 Jan 2025 13:26:00 +0000 Subject: [PATCH 39/47] Added dummy value extraction circuit --- mp2-common/src/eth.rs | 2 +- .../circuit_data_serialization.rs | 43 +++- mp2-v1/src/values_extraction/api.rs | 52 +++- mp2-v1/src/values_extraction/dummy.rs | 222 ++++++++++++++++++ mp2-v1/src/values_extraction/mod.rs | 1 + mp2-v1/src/values_extraction/planner.rs | 3 +- 6 files changed, 316 insertions(+), 7 deletions(-) create mode 100644 mp2-v1/src/values_extraction/dummy.rs diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index d7d2590c2..a3e03388d 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -141,7 +141,7 @@ pub fn extract_child_hashes(rlp_data: &[u8]) -> Vec> { } /// Enum used to distinguish between different types of node in an MPT. -#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] pub enum NodeType { Branch, Extension, diff --git a/mp2-common/src/serialization/circuit_data_serialization.rs b/mp2-common/src/serialization/circuit_data_serialization.rs index 07895189a..44b28202f 100644 --- a/mp2-common/src/serialization/circuit_data_serialization.rs +++ b/mp2-common/src/serialization/circuit_data_serialization.rs @@ -1,5 +1,7 @@ use std::marker::PhantomData; +use plonky2::field::extension::quintic::QuinticExtension; +use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::circuit_data::VerifierCircuitData; use plonky2::{ field::extension::Extendable, @@ -61,7 +63,7 @@ use plonky2_crypto::{ }, }; use plonky2_ecgfp5::{ - curve::base_field::InverseOrZero, + curve::{base_field::InverseOrZero, curve::Point}, gadgets::base_field::{QuinticQuotientGenerator, QuinticSqrtGenerator}, }; use poseidon2_plonky2::poseidon2_gate::{Poseidon2Gate, Poseidon2Generator}; @@ -71,6 +73,29 @@ use crate::u256::UInt256DivGenerator; use super::{FromBytes, SerializationError, ToBytes}; +impl ToBytes for Point { + fn to_bytes(&self) -> Vec { + let mut buffer = Vec::new(); + let encoded = self.encode(); + buffer + .write_field_ext::(encoded) + .expect("Writing to a byte-vector cannot fail."); + buffer + } +} + +impl FromBytes for Point { + fn from_bytes(bytes: &[u8]) -> Result { + let mut buffer = Buffer::new(bytes); + let compact: QuinticExtension = + buffer.read_field_ext::()?; + + Point::decode(compact).ok_or(SerializationError( + "Could not decode quintic extension to point".to_string(), + )) + } +} + impl> ToBytes for MerkleTree { fn to_bytes(&self) -> Vec { let mut buffer = Vec::new(); @@ -367,4 +392,20 @@ pub(super) mod tests { assert_eq!(decoded_mt.0, mt.0); } + + #[test] + fn test_point_serialization() { + let point = Point::rand(); + + #[derive(Serialize, Deserialize)] + struct TestPointSerialization( + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] Point, + ); + + let p = TestPointSerialization(point); + let encoded = bincode::serialize(&p).unwrap(); + let decoded_p: TestPointSerialization = bincode::deserialize(&encoded).unwrap(); + + assert_eq!(decoded_p.0, point); + } } diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index d7411f84b..1c83a3ad8 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -2,6 +2,7 @@ use super::{ branch::{BranchCircuit, BranchWires}, + dummy::{DummyNodeCircuit, DummyNodeWires}, extension::{ExtensionNodeCircuit, ExtensionNodeWires}, gadgets::{ column_info::{ExtractedColumnInfo, InputColumnInfo}, @@ -15,6 +16,7 @@ use super::{ ColumnId, MappingKey, INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }; use crate::{api::InputNode, MAX_BRANCH_NODE_LEN, MAX_RECEIPT_COLUMNS}; +use alloy::primitives::B256; use anyhow::{bail, ensure, Result}; use log::debug; use mp2_common::{ @@ -28,6 +30,7 @@ use mp2_common::{ }; use paste::paste; use plonky2::{field::types::PrimeField64, hash::hash_types::HashOut, plonk::config::Hasher}; +use plonky2_ecgfp5::curve::curve::Point; #[cfg(test)] use recursion_framework::framework_testing::{ new_universal_circuit_builder_for_testing, TestingRecursiveCircuits, @@ -56,6 +59,7 @@ where LeafReceipt(ReceiptLeafCircuit), Extension(ExtensionInput), Branch(BranchInput), + Dummy(DummyNodeCircuit), } impl @@ -165,6 +169,14 @@ where serialized_child_proofs: child_proofs, }) } + + /// Create a circuit input for proving a dummy node. + pub fn new_dummy(trie_root: B256, metadata_digest: Point) -> Self { + CircuitInput::Dummy(DummyNodeCircuit { + root_hash: trie_root, + metadata_digest, + }) + } } /// Main struct holding the different circuit parameters for each of the MPT @@ -183,6 +195,7 @@ where leaf_receipt: CircuitWithUniversalVerifier>, extension: CircuitWithUniversalVerifier, + dummy: CircuitWithUniversalVerifier, #[cfg(not(test))] branches: BranchCircuits, #[cfg(test)] @@ -364,8 +377,8 @@ impl_branch_circuits!(BranchCircuits, 2, 9, 16); impl_branch_circuits!(TestBranchCircuits, 1, 4, 9); /// Number of circuits in the set -/// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping + 1 leaf mapping of mappings + 1 leaf receipt -const MAPPING_CIRCUIT_SET_SIZE: usize = 8; +/// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping + 1 leaf mapping of mappings + 1 leaf receipt + 1 dummy circuit +const MAPPING_CIRCUIT_SET_SIZE: usize = 9; impl PublicParameters @@ -406,6 +419,9 @@ where debug!("Building extension circuit"); let extension = circuit_builder.build_circuit::(()); + debug!("Building dummy circuit"); + let dummy = circuit_builder.build_circuit::(()); + debug!("Building branch circuits"); #[cfg(not(test))] let branches = BranchCircuits::new(&circuit_builder); @@ -418,6 +434,7 @@ where leaf_mapping_of_mappings.get_verifier_data().circuit_digest, leaf_receipt.get_verifier_data().circuit_digest, extension.get_verifier_data().circuit_digest, + dummy.get_verifier_data().circuit_digest, ]; circuits_set.extend(branches.circuit_set()); assert_eq!(circuits_set.len(), MAPPING_CIRCUIT_SET_SIZE); @@ -428,6 +445,7 @@ where leaf_mapping_of_mappings, leaf_receipt, extension, + dummy, branches, #[cfg(not(test))] set: RecursiveCircuits::new_from_circuit_digests(circuits_set), @@ -454,7 +472,9 @@ where CircuitInput::LeafReceipt(leaf) => set .generate_proof(&self.leaf_receipt, [], [], leaf) .map(|p| (p, self.leaf_receipt.get_verifier_data().clone()).into()), - + CircuitInput::Dummy(dummy) => set + .generate_proof(&self.dummy, [], [], dummy) + .map(|p| (p, self.leaf_receipt.get_verifier_data().clone()).into()), CircuitInput::Extension(ext) => { let mut child_proofs = ext.get_child_proofs()?; let (child_proof, child_vk) = child_proofs @@ -513,7 +533,7 @@ mod tests { mpt_sequential::{generate_random_storage_mpt, generate_receipt_test_info}, utils::random_vector, }; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use plonky2_ecgfp5::curve::curve::Point; use rand::{thread_rng, Rng}; use std::{str::FromStr, sync::Arc}; @@ -877,6 +897,11 @@ mod tests { }, serialized_child_proofs: vec![encoded], })); + + // Test for dummy + let dummy_hash = B256::random(); + let dummy_md = Point::rand(); + test_circuit_input(CircuitInput::new_dummy(dummy_hash, dummy_md)); } fn test_api(test_slots: [StorageSlotInfo; 2]) { @@ -1015,6 +1040,25 @@ mod tests { ); } + #[test] + fn test_dummy_api() { + println!("Generating params..."); + let params = build_circuits_params(); + + let dummy_hash = B256::random(); + let dummy_md = Point::rand(); + + println!("Proving dummy circuit"); + let dummy_input = CircuitInput::new_dummy(dummy_hash, dummy_md); + + let now = std::time::Instant::now(); + generate_proof(¶ms, dummy_input).unwrap(); + println!( + "Proof for dummy node generated in {} ms", + now.elapsed().as_millis() + ); + } + /// Generate a leaf proof. fn prove_leaf(params: &PublicParameters, node: Vec, test_slot: StorageSlotInfo) -> Vec { // RLP(RLP(compact(partial_key_in_nibble)), RLP(value)) diff --git a/mp2-v1/src/values_extraction/dummy.rs b/mp2-v1/src/values_extraction/dummy.rs new file mode 100644 index 000000000..c05208ae6 --- /dev/null +++ b/mp2-v1/src/values_extraction/dummy.rs @@ -0,0 +1,222 @@ +//! Module containing circuit code for a dummy value extraction circuit when an MPT contains no relevant data +//! for the object we are indexing. + +use super::public_inputs::{PublicInputs, PublicInputsArgs}; +use alloy::primitives::B256; +use anyhow::Result; +use mp2_common::{ + array::Array, + keccak::{OutputHash, PACKED_HASH_LEN}, + mpt_sequential::MPTKeyWire, + public_inputs::PublicInputCommon, + rlp::MAX_KEY_NIBBLE_LEN, + serialization::{deserialize, serialize}, + types::{CBuilder, GFp}, + utils::{Endianness, Packer, ToFields, ToTargets}, + D, +}; +use plonky2::{ + field::types::{Field, PrimeField64}, + iop::{ + target::Target, + witness::{PartialWitness, WitnessWrite}, + }, + plonk::proof::ProofWithPublicInputsTarget, +}; +use plonky2_ecgfp5::{curve::curve::Point, gadgets::curve::CircuitBuilderEcGFp5}; +use recursion_framework::circuit_builder::CircuitLogicWires; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct DummyNodeWires { + root: Array, + metadata_digest: Vec, + key: MPTKeyWire, +} + +/// Circuit to proving the processing of an extension node +#[derive(Clone, Debug, Copy, Serialize, Deserialize)] +pub struct DummyNodeCircuit { + pub(crate) root_hash: B256, + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] + pub(crate) metadata_digest: Point, +} + +impl DummyNodeCircuit { + pub fn build(b: &mut CBuilder) -> DummyNodeWires { + // Build the key wire which will have all zeroes for nibbles and the pointer set to F::NEG_ONE + let key = MPTKeyWire::new(b); + + // Build the output hash array + let root = OutputHash::new(b); + + // Build the metadata target + let dm = b.add_virtual_curve_target(); + + // Expose the public inputs. + PublicInputsArgs { + h: &root, + k: &key, + dv: b.curve_zero(), + dm, + n: b.zero(), + } + .register(b); + + DummyNodeWires { + root: root.downcast_to_targets(), + metadata_digest: dm.to_targets(), + key, + } + } + + pub fn assign(&self, pw: &mut PartialWitness, wires: &DummyNodeWires) { + // Set the root + let packed_root = self + .root_hash + .0 + .pack(Endianness::Little) + .into_iter() + .map(GFp::from_canonical_u32) + .collect::>(); + pw.set_target_arr(&wires.root.arr, &packed_root); + + pw.set_target_arr( + &wires.metadata_digest, + &self.metadata_digest.to_weierstrass().to_fields(), + ); + + // First get field negative one in usize form + let ptr = GFp::NEG_ONE.to_canonical_u64() as usize; + wires.key.assign(pw, &[0; MAX_KEY_NIBBLE_LEN], ptr); + } +} + +/// Num of children = 1 +impl CircuitLogicWires for DummyNodeWires { + type CircuitBuilderParams = (); + + type Inputs = DummyNodeCircuit; + + const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; + + fn circuit_logic( + builder: &mut CBuilder, + _verified_proofs: [&ProofWithPublicInputsTarget; 0], + _builder_parameters: Self::CircuitBuilderParams, + ) -> Self { + DummyNodeCircuit::build(builder) + } + + fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> Result<()> { + inputs.assign(pw, self); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::{super::public_inputs::tests::new_extraction_public_inputs, *}; + + use mp2_common::{ + rlp::MAX_KEY_NIBBLE_LEN, + utils::{Endianness, Packer}, + C, D, F, + }; + use mp2_test::circuit::{run_circuit, UserCircuit}; + use plonky2::{ + field::types::{Field, Sample}, + iop::{target::Target, witness::WitnessWrite}, + plonk::circuit_builder::CircuitBuilder, + }; + + #[derive(Clone, Debug)] + struct TestDummyNodeCircuit<'a> { + c: DummyNodeCircuit, + exp_pi: PublicInputs<'a, F>, + } + + impl UserCircuit for TestDummyNodeCircuit<'_> { + // Extension node wires + child public inputs + type Wires = (DummyNodeWires, Vec); + + fn build(b: &mut CircuitBuilder) -> Self::Wires { + let exp_pi = b.add_virtual_targets(PublicInputs::::TOTAL_LEN); + let ext_wires = DummyNodeCircuit::build(b); + + (ext_wires, exp_pi) + } + + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + self.c.assign(pw, &wires.0); + + assert_eq!(wires.1.len(), PublicInputs::::TOTAL_LEN); + assert_eq!( + self.exp_pi.proof_inputs.len(), + PublicInputs::::TOTAL_LEN + ); + pw.set_target_arr(&wires.1, self.exp_pi.proof_inputs) + } + } + + #[test] + fn test_values_extraction_dummy_node_circuit() { + // Prepare the public inputs + let random_hash = B256::random(); + let md = Point::rand(); + let random_md = md.to_weierstrass(); + let key = vec![0u8; MAX_KEY_NIBBLE_LEN]; + let ptr = GFp::NEG_ONE.to_canonical_u64() as usize; + let values_digest = Point::NEUTRAL.to_weierstrass(); + + let exp_pi = new_extraction_public_inputs( + &random_hash.0.pack(Endianness::Little), + &key, + ptr, + &values_digest, + &random_md, + 0, + ); + + let exp_pi = PublicInputs::new(&exp_pi); + + // Quick test to see if we can convert back to public inputs. + assert_eq!(random_hash.0.pack(Endianness::Little), exp_pi.root_hash()); + let (exp_key, exp_ptr) = exp_pi.mpt_key_info(); + assert_eq!( + key.iter() + .cloned() + .map(F::from_canonical_u8) + .collect::>(), + exp_key, + ); + assert_eq!(exp_ptr, GFp::NEG_ONE); + assert_eq!(Point::NEUTRAL.to_weierstrass(), exp_pi.values_digest()); + assert_eq!(random_md, exp_pi.metadata_digest()); + assert_eq!(GFp::ZERO, exp_pi.n()); + + let circuit = TestDummyNodeCircuit { + c: DummyNodeCircuit { + root_hash: random_hash, + metadata_digest: md, + }, + exp_pi: exp_pi.clone(), + }; + let proof = run_circuit::(circuit); + let pi = PublicInputs::new(&proof.public_inputs); + + { + let exp_hash = random_hash.0.pack(Endianness::Little); + assert_eq!(pi.root_hash(), exp_hash); + } + { + let (key, ptr) = pi.mpt_key_info(); + assert_eq!(key, exp_key); + + assert_eq!(ptr, exp_ptr); + } + assert_eq!(pi.values_digest(), exp_pi.values_digest()); + assert_eq!(pi.metadata_digest(), exp_pi.metadata_digest()); + assert_eq!(pi.n(), exp_pi.n()); + } +} diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index 2c06dc88a..cff827d38 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -25,6 +25,7 @@ use std::iter::once; pub mod api; mod branch; +mod dummy; mod extension; pub mod gadgets; mod leaf_mapping; diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index ba91ddcba..ee7e213d0 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -8,6 +8,7 @@ use alloy::{ use anyhow::Result; use mp2_common::eth::{node_type, EventLogInfo, MP2EthError, NodeType, ReceiptQuery}; use ryhope::storage::updatetree::{Next, UpdateTree}; +use serde::{Deserialize, Serialize}; use std::future::Future; use std::{ @@ -85,7 +86,7 @@ pub trait Extractable { ) -> impl Future, MP2PlannerError>>; } -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] struct ProofData { node: Vec, node_type: NodeType, From 9ae7296f9fa4d2c5d04a05fae1aa727254251830 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 3 Feb 2025 13:09:08 +0000 Subject: [PATCH 40/47] Values extraction API working --- mp2-common/src/array.rs | 25 +- mp2-common/src/keccak.rs | 40 +- mp2-common/src/mpt_sequential/utils.rs | 66 ++- mp2-common/src/rlp.rs | 49 +- mp2-v1/src/values_extraction/api.rs | 163 +++--- mp2-v1/src/values_extraction/branch.rs | 12 +- .../gadgets/column_gadget.rs | 521 ------------------ .../gadgets/metadata_gadget.rs | 19 +- mp2-v1/src/values_extraction/leaf_mapping.rs | 9 +- .../leaf_mapping_of_mappings.rs | 10 +- mp2-v1/src/values_extraction/leaf_single.rs | 13 +- mp2-v1/src/values_extraction/mod.rs | 4 +- 12 files changed, 227 insertions(+), 704 deletions(-) delete mode 100644 mp2-v1/src/values_extraction/gadgets/column_gadget.rs diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index 120235f62..bd3a32aac 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -488,12 +488,13 @@ where slice_len: Target, ) { let tru = b._true(); + let mut take = b._false(); for (i, (our, other)) in self.arr.iter().zip(other.arr.iter()).enumerate() { let it = b.constant(F::from_canonical_usize(i)); - // TODO: fixed to 6 becaues max nibble len = 64 - TO CHANGE - let before_end = less_than(b, it, slice_len, 6); + let reached_end = b.is_equal(slice_len, it); + take = b.or(take, reached_end); let eq = b.is_equal(our.to_target(), other.to_target()); - let res = b.select(before_end, eq.target, tru.target); + let res = b.select(take, tru.target, eq.target); b.connect(res, tru.target); } } @@ -526,25 +527,19 @@ where b: &mut CircuitBuilder, at: Target, ) -> Array { + let tru = b._true(); let m = b.constant(F::from_canonical_usize(SUB_SIZE)); + let orig_size = b.constant(F::from_canonical_usize(SIZE)); let upper_bound = b.add(at, m); let num_bits_size = SIZE.ilog2() + 1; + // By enforcing that upper_bound is less than or equal to total size we don't need to check at each step + let lt = less_than_or_equal_to_unsafe(b, upper_bound, orig_size, num_bits_size as usize); + b.connect(lt.target, tru.target); Array:: { arr: core::array::from_fn(|i| { let i_target = b.constant(F::from_canonical_usize(i)); let i_plus_n_target = b.add(at, i_target); - // ((i + offset) <= n + M) - // unsafe should be ok since the function assumes that `at + SUB_SIZE <= SIZE` - let lt = less_than_or_equal_to_unsafe( - b, - i_plus_n_target, - upper_bound, - num_bits_size as usize, - ); - // ((i+n) <= n+M) * (i+n) - let j = b.mul(lt.target, i_plus_n_target); - // out_val = arr[((i+n)<=n+M) * (i+n)] - self.value_at(b, j) + self.value_at(b, i_plus_n_target) }), } } diff --git a/mp2-common/src/keccak.rs b/mp2-common/src/keccak.rs index 18ad2f01c..7a38c135e 100644 --- a/mp2-common/src/keccak.rs +++ b/mp2-common/src/keccak.rs @@ -126,14 +126,14 @@ impl KeccakCircuit { a: &VectorWire, ) -> KeccakWires { let diff_target = b.add_virtual_target(); - let end_padding = b.add(a.real_len, diff_target); + let end_padding_offset = b.add(a.real_len, diff_target); let one = b.one(); - let end_padding = b.sub(end_padding, one); // inclusive range - // little endian so we start padding from the end of the byte - let single_pad = b.constant(F::from_canonical_usize(0x81)); // 1000 0001 + let end_padding = b.sub(end_padding_offset, one); // inclusive range + // little endian so we start padding from the end of the byte + let begin_pad = b.constant(F::from_canonical_usize(0x01)); // 0000 0001 let end_pad = b.constant(F::from_canonical_usize(0x80)); // 1000 0000 - // TODO : make that const generic + let padded_node = a .arr .arr @@ -141,31 +141,17 @@ impl KeccakCircuit { .enumerate() .map(|(i, byte)| { let i_target = b.constant(F::from_canonical_usize(i)); - // condition if we are within the data range ==> i < length - let is_data = less_than(b, i_target, a.real_len, 32); + // vector wires are filled with zeroes beyond a.real_len + // so we can just take the value no matter what and add padding if appropriate. + // condition if we start the padding ==> i == length let is_start_padding = b.is_equal(i_target, a.real_len); // condition if we are done with the padding ==> i == length + diff - 1 let is_end_padding = b.is_equal(i_target, end_padding); - // condition if we only need to add one byte 1000 0001 to pad - // because we work on u8 data, we know we're at least adding 1 byte and in - // this case it's 0x81 = 1000 0001 - // i == length == diff - 1 - let is_start_and_end = b.and(is_start_padding, is_end_padding); - - // nikko XXX: Is this sound ? I think so but not 100% sure. - // I think it's ok to not use `quin_selector` or `b.random_acess` because - // if the prover gives another byte target, then the resulting hash would be invalid, - let item_data = b.mul(is_data.target, *byte); - let item_start_padding = b.mul(is_start_padding.target, begin_pad); - let item_end_padding = b.mul(is_end_padding.target, end_pad); - let item_start_and_end = b.mul(is_start_and_end.target, single_pad); - // if all of these conditions are false, then item will be 0x00,i.e. the padding - let mut item = item_data; - item = b.add(item, item_start_padding); - item = b.add(item, item_end_padding); - item = b.add(item, item_start_and_end); - item + + // Adds the padding to the byte based on the calculated conditions. + let item = b.mul_add(is_start_padding.target, begin_pad, *byte); + b.mul_add(is_end_padding.target, end_pad, item) }) .collect::>(); @@ -176,7 +162,7 @@ impl KeccakCircuit { // to only look at a certain portion of our data, each bool says if the hash function // will update its state for this block or not. let rate_bytes = b.constant(F::from_canonical_usize(KECCAK256_R / 8)); - let end_padding_offset = b.add(end_padding, one); + let nb_blocks = b.div(end_padding_offset, rate_bytes); // - 1 because keccak always take first block so we don't count it let nb_actual_blocks = b.sub(nb_blocks, one); diff --git a/mp2-common/src/mpt_sequential/utils.rs b/mp2-common/src/mpt_sequential/utils.rs index a7c0be269..7ba11d1ea 100644 --- a/mp2-common/src/mpt_sequential/utils.rs +++ b/mp2-common/src/mpt_sequential/utils.rs @@ -57,13 +57,65 @@ pub fn left_pad_leaf_value< let value_len_80 = b.sub(value[0], byte_80); let value_len = b.select(is_single_byte, one, value_len_80); let offset = b.select(is_single_byte, zero, one); - value - // WARNING: this is a hack to avoid another const generic but - // what we should really do here is extract RLP_VALUE_LEN-1 because we - // consider 1 extra byte for the RLP header always (which may or may not exist) - .extract_array::(b, offset) - .into_vec(value_len) - .normalize_left::<_, _, PADDED_LEN>(b) + + // So the value is just in the first byte if is_single_byte is true + // Hence the first index we take is offset + value_len - 1 and then we continue until we hit + // offset + let tmp = b.add(offset, value_len); + let start = b.sub(tmp, one); + + let mut last_byte_found = b._false(); + + let mut result_bytes = [zero; PADDED_LEN]; + + // Need the length to be a power of two + let ram_value = if !value.arr.len().is_power_of_two() { + let new_size = value.arr.len().next_power_of_two(); + let mut value_vec = value.arr.to_vec(); + value_vec.resize(new_size, zero); + value_vec + } else { + value.arr.to_vec() + }; + + result_bytes + .iter_mut() + .rev() + .enumerate() + .for_each(|(i, out_byte)| { + // offset = info.byte_offset + i + let index = b.constant(F::from_canonical_usize(i)); + let inner_offset = b.sub(start, index); + // Set to 0 if found the last byte. + let inner_offset = b.select(last_byte_found, zero, inner_offset); + + // Since VALUE_LEN is a constant that is determined at compile time this conditional won't + // cause any issues with the circuit. + let byte = if RLP_VALUE_LEN <= 64 { + b.random_access(inner_offset, ram_value.clone()) + } else { + value.random_access_large_array(b, inner_offset) + }; + + // Now if `last_byte_found` is true we add zero, otherwise add `byte` + let to_add = b.select(last_byte_found, zero, byte); + + *out_byte = b.add(*out_byte, to_add); + // is_last_byte = offset == last_byte_offset + let is_last_byte = b.is_equal(inner_offset, offset); + // last_byte_found |= is_last_byte + last_byte_found = b.or(last_byte_found, is_last_byte); + }); + + Array::::from_array(result_bytes) + + // value + // // WARNING: this is a hack to avoid another const generic but + // // what we should really do here is extract RLP_VALUE_LEN-1 because we + // // consider 1 extra byte for the RLP header always (which may or may not exist) + // .extract_array::(b, offset) + // .into_vec(value_len) + // .normalize_left::<_, _, PADDED_LEN>(b) } pub fn visit_proof(proof: &[Vec]) { diff --git a/mp2-common/src/rlp.rs b/mp2-common/src/rlp.rs index 01d6824ab..e695cd2da 100644 --- a/mp2-common/src/rlp.rs +++ b/mp2-common/src/rlp.rs @@ -1,6 +1,6 @@ use crate::array::{Array, VectorWire}; -use crate::utils::{greater_than_or_equal_to_unsafe, less_than, less_than_unsafe, num_to_bits}; +use crate::utils::{less_than, num_to_bits}; use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; use plonky2::iop::target::{BoolTarget, Target}; @@ -146,6 +146,7 @@ pub fn decode_compact_encoding< cond, ) } + // Returns the length from the RLP prefix in case of long string or long list // data is the full data starting from the "type" byte of RLP encoding // data length needs to be a power of 2 @@ -157,33 +158,31 @@ pub fn data_len, const D: usize>( offset: Target, ) -> Target { let mut res = b.zero(); - let one = b.one(); - let const_256 = b.constant(F::from_canonical_u64(256)); + let const_256 = b.constant(F::from_canonical_u64(256)); + let mut last_byte_found = b._false(); + let lol_add_one = b.add_const(len_of_len, F::ONE); for i in 0..MAX_LEN_BYTES { - let i_tgt = b.constant(F::from_canonical_u8(i as u8)); + // We shift by one because the first byte is the rlp target. + let i_tgt = b.constant(F::from_canonical_u8(i as u8 + 1)); // make sure we don't read out more than the actual len - let len_of_len_pred = less_than_unsafe(b, i_tgt, len_of_len, 8); + let equal = b.is_equal(i_tgt, lol_add_one); + last_byte_found = b.or(equal, last_byte_found); + // this part offset i to read from the array - let i_offset = b.add(i_tgt, offset); - // i+1 because first byte is the RLP type - let i_plus_1 = b.add(i_offset, one); + let i_plus_1 = b.add(i_tgt, offset); + let item = quin_selector(b, data, i_plus_1); // shift result by one byte - let multiplicand = b.mul(const_256, res); // res += 2^i * arr[i+1] only if we're in right range - let sum = b.add(multiplicand, item); - let multiplicand_2 = b.mul(sum, len_of_len_pred.target); - - let not_len_of_len_pred_target = b.not(len_of_len_pred); - let multiplicand_3 = b.mul(not_len_of_len_pred_target.target, res); - // res = (2^i * arr[i+1]) * (i < len_len) + res * (i >= len_len) - res = b.add(multiplicand_2, multiplicand_3); + let sum = b.mul_add(const_256, res, item); + res = b.select(last_byte_found, res, sum); } res } + // We read the RLP header but knowing it is a value that is always <55bytes long // we can hardcode the type of RLP header it is and directly get the real number len // in this case, the header marker is 0x80 that we can directly take out from first byte @@ -249,7 +248,7 @@ pub fn decode_header, const D: usize>( let select_3 = b._if(prefix_less_0xb8, short_str_len, select_2); let len = b._if(prefix_less_0x80, one, select_3); - let data_type = greater_than_or_equal_to_unsafe(b, prefix, byte_c0, 8).target; + let data_type = b.not(prefix_less_0xc0).target; let final_offset = b.add(offset, offset_data); RlpHeader { @@ -323,25 +322,15 @@ pub fn quin_selector, const D: usize>( arr: &[Target], n: Target, ) -> Target { - let mut nums: Vec = vec![]; - + let mut sum = b.zero(); for (i, el) in arr.iter().enumerate() { let i_target = b.constant(F::from_canonical_usize(i)); let is_eq = b.is_equal(i_target, n); // (i == n (idx) ) * element - let product = b.mul(is_eq.target, *el); - nums.push(product); + sum = b.mul_add(is_eq.target, *el, sum); } - // SUM_i (i == n (idx) ) * element - // -> sum = element - calculate_total(b, &nums) -} -fn calculate_total, const D: usize>( - b: &mut CircuitBuilder, - arr: &[Target], -) -> Target { - b.add_many(arr) + sum } #[cfg(test)] diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 1c83a3ad8..24bf67d47 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -474,7 +474,7 @@ where .map(|p| (p, self.leaf_receipt.get_verifier_data().clone()).into()), CircuitInput::Dummy(dummy) => set .generate_proof(&self.dummy, [], [], dummy) - .map(|p| (p, self.leaf_receipt.get_verifier_data().clone()).into()), + .map(|p| (p, self.dummy.get_verifier_data().clone()).into()), CircuitInput::Extension(ext) => { let mut child_proofs = ext.get_child_proofs()?; let (child_proof, child_vk) = child_proofs @@ -1078,35 +1078,71 @@ mod tests { let metadata = test_slot.table_columns(&contract_address, TEST_CHAIN_ID, vec![]); - let (expected_metadata_digest, expected_values_digest, circuit_input) = - match &test_slot.slot { - // Simple variable slot + let (expected_metadata_digest, expected_values_digest, circuit_input) = match &test_slot + .slot + { + // Simple variable slot + StorageSlot::Simple(slot) => { + let metadata_digest = metadata.digest(); + let values_digest = storage_value_digest(&metadata, &[], &value, &test_slot); + + let circuit_input = CircuitInput::new_single_variable_leaf( + node, + *slot as u8, + evm_word, + table_info.to_vec(), + ); + + (metadata_digest, values_digest, circuit_input) + } + // Mapping variable + StorageSlot::Mapping(mapping_key, slot) => { + let padded_key = left_pad32(mapping_key); + let metadata_digest = metadata.digest(); + let values_digest = + storage_value_digest(&metadata, &[&padded_key], &value, &test_slot); + + let outer_key_id = metadata.input_columns()[0].identifier().0; + + let circuit_input = CircuitInput::new_mapping_variable_leaf( + node, + *slot as u8, + mapping_key.clone(), + outer_key_id, + evm_word, + table_info.to_vec(), + ); + + (metadata_digest, values_digest, circuit_input) + } + StorageSlot::Node(StorageSlotNode::Struct(parent, _)) => match *parent.clone() { + // Simple Struct StorageSlot::Simple(slot) => { let metadata_digest = metadata.digest(); - let values_digest = storage_value_digest(&metadata, &[], &value, evm_word); + let values_digest = storage_value_digest(&metadata, &[], &value, &test_slot); let circuit_input = CircuitInput::new_single_variable_leaf( node, - *slot as u8, + slot as u8, evm_word, table_info.to_vec(), ); (metadata_digest, values_digest, circuit_input) } - // Mapping variable + // Mapping Struct StorageSlot::Mapping(mapping_key, slot) => { - let padded_key = left_pad32(mapping_key); + let padded_key = left_pad32(&mapping_key); let metadata_digest = metadata.digest(); let values_digest = - storage_value_digest(&metadata, &[&padded_key], &value, evm_word); + storage_value_digest(&metadata, &[&padded_key], &value, &test_slot); let outer_key_id = metadata.input_columns()[0].identifier().0; let circuit_input = CircuitInput::new_mapping_variable_leaf( node, - *slot as u8, - mapping_key.clone(), + slot as u8, + mapping_key, outer_key_id, evm_word, table_info.to_vec(), @@ -1114,79 +1150,44 @@ mod tests { (metadata_digest, values_digest, circuit_input) } - StorageSlot::Node(StorageSlotNode::Struct(parent, _)) => match *parent.clone() { - // Simple Struct - StorageSlot::Simple(slot) => { - let metadata_digest = metadata.digest(); - let values_digest = storage_value_digest(&metadata, &[], &value, evm_word); - - let circuit_input = CircuitInput::new_single_variable_leaf( - node, - slot as u8, - evm_word, - table_info.to_vec(), - ); - - (metadata_digest, values_digest, circuit_input) - } - // Mapping Struct - StorageSlot::Mapping(mapping_key, slot) => { - let padded_key = left_pad32(&mapping_key); - let metadata_digest = metadata.digest(); - let values_digest = - storage_value_digest(&metadata, &[&padded_key], &value, evm_word); - - let outer_key_id = metadata.input_columns()[0].identifier().0; - - let circuit_input = CircuitInput::new_mapping_variable_leaf( - node, - slot as u8, - mapping_key, - outer_key_id, - evm_word, - table_info.to_vec(), - ); - - (metadata_digest, values_digest, circuit_input) - } - // Mapping of mappings Struct - StorageSlot::Node(StorageSlotNode::Mapping(grand, inner_mapping_key)) => { - match *grand { - StorageSlot::Mapping(outer_mapping_key, slot) => { - let padded_outer_key = left_pad32(&outer_mapping_key); - let padded_inner_key = left_pad32(&inner_mapping_key); - let metadata_digest = metadata.digest(); - let values_digest = storage_value_digest( - &metadata, - &[&padded_outer_key, &padded_inner_key], - &value, - evm_word, - ); - - let key_ids = metadata - .input_columns() - .iter() - .map(|col| col.identifier().0) - .collect::>(); - - let circuit_input = CircuitInput::new_mapping_of_mappings_leaf( - node, - slot as u8, - (outer_mapping_key, key_ids[0]), - (inner_mapping_key, key_ids[1]), - evm_word, - table_info.to_vec(), - ); - - (metadata_digest, values_digest, circuit_input) - } - _ => unreachable!(), + // Mapping of mappings Struct + StorageSlot::Node(StorageSlotNode::Mapping(grand, inner_mapping_key)) => { + match *grand { + StorageSlot::Mapping(outer_mapping_key, slot) => { + let padded_outer_key = left_pad32(&outer_mapping_key); + let padded_inner_key = left_pad32(&inner_mapping_key); + let metadata_digest = metadata.digest(); + let values_digest = storage_value_digest( + &metadata, + &[&padded_outer_key, &padded_inner_key], + &value, + &test_slot, + ); + + let key_ids = metadata + .input_columns() + .iter() + .map(|col| col.identifier().0) + .collect::>(); + + let circuit_input = CircuitInput::new_mapping_of_mappings_leaf( + node, + slot as u8, + (outer_mapping_key, key_ids[0]), + (inner_mapping_key, key_ids[1]), + evm_word, + table_info.to_vec(), + ); + + (metadata_digest, values_digest, circuit_input) } + _ => unreachable!(), } - _ => unreachable!(), - }, + } _ => unreachable!(), - }; + }, + _ => unreachable!(), + }; let proof = generate_proof(params, circuit_input).unwrap(); diff --git a/mp2-v1/src/values_extraction/branch.rs b/mp2-v1/src/values_extraction/branch.rs index 2b2aedbb5..0498144f4 100644 --- a/mp2-v1/src/values_extraction/branch.rs +++ b/mp2-v1/src/values_extraction/branch.rs @@ -10,7 +10,7 @@ use mp2_common::{ public_inputs::PublicInputCommon, rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}, types::{CBuilder, GFp}, - utils::{less_than, Endianness, PackerTarget}, + utils::{Endianness, PackerTarget}, D, }; use plonky2::{ @@ -93,14 +93,16 @@ where let headers = decode_fixed_list::<_, D, MAX_ITEMS_IN_LIST>(b, &node.arr.arr, zero); let zero_point = b.curve_zero(); + let mut should_process = b._false(); let mut seen_nibbles = vec![]; for (i, proof_inputs) in inputs.iter().enumerate() { let it = b.constant(GFp::from_canonical_usize(i)); - let should_process = less_than(b, it, n_proof_valid, 5); + let proof_limit = b.is_equal(it, n_proof_valid); + should_process = b.or(should_process, proof_limit); // Accumulate the values digest. let child_digest = proof_inputs.values_digest_target(); - let child_digest = b.curve_select(should_process, child_digest, zero_point); + let child_digest = b.curve_select(should_process, zero_point, child_digest); values_digest = b.curve_add(values_digest, child_digest); let child_digest = proof_inputs.metadata_digest_target(); @@ -112,7 +114,7 @@ where } // Add the number of leaves this proof has processed. - let maybe_n = b.select(should_process, proof_inputs.n(), zero); + let maybe_n = b.select(should_process, zero, proof_inputs.n()); n = b.add(n, maybe_n); let child_key = proof_inputs.mpt_key(); @@ -125,7 +127,7 @@ where // Make sure we don't process twice the same proof for same nibble. seen_nibbles.iter().for_each(|sn| { let is_equal = b.is_equal(*sn, nibble); - let should_be_false = b.select(should_process, is_equal.target, ffalse.target); + let should_be_false = b.select(should_process, ffalse.target, is_equal.target); b.connect(should_be_false, ffalse.target); }); seen_nibbles.push(nibble); diff --git a/mp2-v1/src/values_extraction/gadgets/column_gadget.rs b/mp2-v1/src/values_extraction/gadgets/column_gadget.rs deleted file mode 100644 index 291b8a56c..000000000 --- a/mp2-v1/src/values_extraction/gadgets/column_gadget.rs +++ /dev/null @@ -1,521 +0,0 @@ -//! The column gadget is used to extract either a single column when it’s a simple value or -//! multiple columns for struct. - -use super::column_info::{ColumnInfo, ColumnInfoTarget}; -use itertools::Itertools; -use mp2_common::{ - array::{Array, VectorWire}, - eth::left_pad32, - group_hashing::{map_to_curve_point, CircuitBuilderGroupHashing}, - types::{CBuilder, MAPPING_LEAF_VALUE_LEN}, - utils::{Endianness, Packer, PackerTarget}, - F, -}; -use plonky2::{ - field::types::{Field, PrimeField64}, - iop::target::{BoolTarget, Target}, -}; -use plonky2_ecgfp5::{ - curve::curve::Point, - gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, -}; -use std::{array, iter::once}; - -/// Number of lookup tables for getting the first bits or last bits of a byte as a big-endian integer -const NUM_BITS_LOOKUP_TABLES: usize = 7; - -#[derive(Debug)] -pub(crate) struct ColumnGadget<'a, const MAX_FIELD_PER_EVM: usize> { - /// Value bytes to extract the struct - value: &'a [Target; MAPPING_LEAF_VALUE_LEN], - /// Information about all columns of the table to be extracted - table_info: &'a [ColumnInfoTarget], - /// Boolean flags specifying whether the i-th field being processed has to be extracted into a column or not - is_extracted_columns: &'a [BoolTarget], -} - -impl<'a, const MAX_FIELD_PER_EVM: usize> ColumnGadget<'a, MAX_FIELD_PER_EVM> { - pub(crate) fn new( - value: &'a [Target; MAPPING_LEAF_VALUE_LEN], - table_info: &'a [ColumnInfoTarget], - is_extracted_columns: &'a [BoolTarget], - ) -> Self { - assert_eq!(table_info.len(), MAX_FIELD_PER_EVM); - assert_eq!(is_extracted_columns.len(), MAX_FIELD_PER_EVM); - - Self { - value, - table_info, - is_extracted_columns, - } - } - - pub(crate) fn build(&self, b: &mut CBuilder) -> CurveTarget { - // Initialize the lookup tables for getting the first bits and last bits of a byte - // as a big-endian integer. - let bytes = &(0..=u8::MAX as u16).collect_vec(); - let mut lookup_inputs = [bytes; NUM_BITS_LOOKUP_TABLES]; - // This maxiumn lookup value is `u8::MAX + 8`, since the maxiumn `info.length` is 256, - // and we need to compute `first_bits_5(info.length + 7)`. - let first_bits_5_input = (0..=u8::MAX as u16 + 8).collect_vec(); - lookup_inputs[4] = &first_bits_5_input; - let first_bits_lookup_indexes = add_first_bits_lookup_tables(b, lookup_inputs); - lookup_inputs[4] = bytes; - // This maxiumn lookup value is `256`, since the maxiumn `info.length` is 256, - // and we need to compute `last_bits_3(info.length)`. - let last_bits_3_input = (0..=u8::MAX as u16 + 1).collect_vec(); - lookup_inputs[2] = &last_bits_3_input; - let last_bits_lookup_indexes = add_last_bits_lookup_tables(b, lookup_inputs); - - // Accumulate to compute the value digest. - let mut value_digest = b.curve_zero(); - (0..MAX_FIELD_PER_EVM).for_each(|i| { - // Get the column info to extract. - let info = &self.table_info[i]; - // Get the flag if the field has to be extracted. - let is_extracted = self.is_extracted_columns[i]; - - // Extract the value by column info. - let extracted_value = extract_value_target( - b, - info, - self.value, - &first_bits_lookup_indexes, - &last_bits_lookup_indexes, - ); - - // Compute and accumulate to the value digest only if the current field has to be - // extracted in a column. - // digest = D(info.identifier || pack(extracted_value)) - let inputs = once(info.identifier) - .chain(extracted_value.pack(b, Endianness::Big)) - .collect_vec(); - let digest = b.map_to_curve_point(&inputs); - // new_value_digest = value_digest + digest - let new_value_digest = b.add_curve_point(&[value_digest, digest]); - // value_digest = is_extracted ? new_value_digest : value_digest - value_digest = b.curve_select(is_extracted, new_value_digest, value_digest); - }); - - value_digest - } -} - -/// Get the first bits of a byte as a big-endian integer. -const fn first_bits(byte: u16, n: u8) -> u16 { - byte >> (8 - n) -} - -/// Get the last bits of a byte as a big-endian integer. -const fn last_bits(byte: u16, n: u8) -> u16 { - byte & ((1 << n) - 1) -} - -/// Macro to generate the lookup functions for getting first bits of a byte -/// as a big-endian integer -macro_rules! first_bits_lookup_funs { - ($($n:expr),*) => { - [ - $(|byte: u16| first_bits(byte, $n)),* - ] - }; -} - -/// Macro to generate the lookup functions for getting last bits of a byte -/// as a big-endian integer -macro_rules! last_bits_lookup_funs { - ($($n:expr),*) => { - [ - $(|byte: u16| last_bits(byte, $n)),* - ] - }; -} - -/// Add the lookup tables for getting the first bits of a byte -/// as a big-endian integer. And return the indexes of lookup tables. -fn add_first_bits_lookup_tables( - b: &mut CBuilder, - inputs: [&Vec; NUM_BITS_LOOKUP_TABLES], -) -> [usize; NUM_BITS_LOOKUP_TABLES] { - let lookup_funs = first_bits_lookup_funs!(1, 2, 3, 4, 5, 6, 7); - - array::from_fn(|i| b.add_lookup_table_from_fn(lookup_funs[i], inputs[i])) -} - -/// Add the lookup tables for getting the last bits of a byte -/// as a big-endian integer. And return the indexes of lookup tables. -fn add_last_bits_lookup_tables( - b: &mut CBuilder, - inputs: [&Vec; NUM_BITS_LOOKUP_TABLES], -) -> [usize; NUM_BITS_LOOKUP_TABLES] { - let lookup_funs = last_bits_lookup_funs!(1, 2, 3, 4, 5, 6, 7); - - array::from_fn(|i| b.add_lookup_table_from_fn(lookup_funs[i], inputs[i])) -} - -/// Extract the value by the column info. -fn extract_value_target( - b: &mut CBuilder, - info: &ColumnInfoTarget, - value_bytes: &[Target; MAPPING_LEAF_VALUE_LEN], - first_bits_lookup_indexes: &[usize; NUM_BITS_LOOKUP_TABLES], - last_bits_lookup_indexes: &[usize; NUM_BITS_LOOKUP_TABLES], -) -> [Target; MAPPING_LEAF_VALUE_LEN] { - let zero = b.zero(); - - // Extract all the bits of the field aligined with bytes. - let mut aligned_bytes = Vec::with_capacity(MAPPING_LEAF_VALUE_LEN); - for i in 0..MAPPING_LEAF_VALUE_LEN { - // Get the current and next bytes. - let current_byte = value_bytes[i]; - let next_byte = if i < MAPPING_LEAF_VALUE_LEN - 1 { - value_bytes[i + 1] - } else { - zero - }; - - // Compute the possible bytes. - let mut possible_bytes = Vec::with_capacity(8); - // byte0 = last_bits_8(current_byte) * 2^0 + first_bits_0(next_byte) = current_byte - possible_bytes.push(current_byte); - // byte1 = last_bits_7(current_byte) * 2^1 + first_bits_1(next_byte) - // byte2 = last_bits_6(current_byte) * 2^2 + first_bits_2(next_byte) - // ... - // byte7 = last_bits_1(current_byte) * 2^7 + first_bits_7(next_byte) - for j in 0..7 { - let first_part = if i < MAPPING_LEAF_VALUE_LEN - 1 { - b.add_lookup_from_index(next_byte, first_bits_lookup_indexes[j]) - } else { - zero - }; - let last_part = b.add_lookup_from_index( - current_byte, - last_bits_lookup_indexes[NUM_BITS_LOOKUP_TABLES - 1 - j], - ); - let last_part = b.mul_const(F::from_canonical_u8(1 << (j + 1)), last_part); - let byte = b.add(first_part, last_part); - possible_bytes.push(byte); - } - - // Get the actual byte. - let actual_byte = b.random_access(info.bit_offset, possible_bytes); - aligned_bytes.push(actual_byte); - } - - // Next we need to extract in a vector from aligned_bytes[info.byte_offset] to aligned_bytes[last_byte_offset]. - // last_byte_offset = info.byte_offset + ceil(info.length / 8) - 1 - // => length_bytes = ceil(info.length / 8) = first_bits_5(info.length + 7) - // => last_byte_offset = info.byte_offset + length_bytes - 1 - let length = b.add_const(info.length, F::from_canonical_u8(7)); - let length_bytes = b.add_lookup_from_index(length, first_bits_lookup_indexes[4]); - let last_byte_offset = b.add(info.byte_offset, length_bytes); - let last_byte_offset = b.add_const(last_byte_offset, F::NEG_ONE); - - // Extract from aligned_bytes[info.byte_offset] to aligned_bytes[last_byte_offset]. - let mut last_byte_found = b._false(); - let mut result_bytes = Vec::with_capacity(MAPPING_LEAF_VALUE_LEN); - for i in 0..MAPPING_LEAF_VALUE_LEN { - // offset = info.byte_offset + i - let offset = b.add_const(info.byte_offset, F::from_canonical_usize(i)); - // Set to 0 if found the last byte. - let offset = b.select(last_byte_found, zero, offset); - let byte = b.random_access(offset, aligned_bytes.clone()); - result_bytes.push(byte); - // is_last_byte = offset == last_byte_offset - let is_last_byte = b.is_equal(offset, last_byte_offset); - // last_byte_found |= is_last_byte - last_byte_found = b.or(last_byte_found, is_last_byte); - } - - // real_len = last_byte_offset - byte_offset + 1 = length_bytes - // result_vec = {result_bytes, real_len} - // result = result_vec.normalize_left() - let arr: Array = result_bytes.try_into().unwrap(); - let result_vec = VectorWire { - arr, - real_len: length_bytes, - }; - let result: Array = result_vec.normalize_left(b); - let mut result = result.arr; - - // At last we need to retain only the first `info.length % 8` bits for - // the last byte of result. - // length_mod_8 = last_bits_3(info.length) - let length_mod_8 = b.add_lookup_from_index(info.length, last_bits_lookup_indexes[2]); - let last_byte = result[31]; - // We need to compute `first_bits_{length_mod_8}(last_byte)`. - let mut possible_bytes = Vec::with_capacity(8); - // If length_mod_8 == 0, we don't need to cut any bit. - // byte0 = last_byte - possible_bytes.push(last_byte); - first_bits_lookup_indexes.iter().for_each(|lookup_index| { - // byte1 = first_bits_1(last_byte) - // byte2 = first_bits_2(last_byte) - // ... - // byte7 = first_bits_7(last_byte) - let byte = b.add_lookup_from_index(last_byte, *lookup_index); - possible_bytes.push(byte); - }); - result[31] = b.random_access(length_mod_8, possible_bytes); - - result -} - -#[derive(Clone, Debug)] -pub struct ColumnGadgetData { - pub(crate) value: [F; MAPPING_LEAF_VALUE_LEN], - pub(crate) table_info: [ColumnInfo; MAX_FIELD_PER_EVM], - pub(crate) num_extracted_columns: usize, -} - -impl ColumnGadgetData { - /// Create a new data. - pub fn new( - mut table_info: Vec, - extracted_column_identifiers: &[u64], - value: [u8; MAPPING_LEAF_VALUE_LEN], - ) -> Self { - let num_extracted_columns = extracted_column_identifiers.len(); - assert!(num_extracted_columns <= MAX_FIELD_PER_EVM); - - // Move the extracted columns to the front the vector of column information. - table_info.sort_by_key(|column_info| { - !extracted_column_identifiers.contains(&column_info.identifier.to_canonical_u64()) - }); - - // Extend the column information vector with the last element. - let last_column_info = table_info.last().cloned().unwrap_or(ColumnInfo::default()); - table_info.resize(MAX_FIELD_PER_EVM, last_column_info); - let table_info = table_info.try_into().unwrap(); - - let value = value.map(F::from_canonical_u8); - - Self { - value, - table_info, - num_extracted_columns, - } - } - - /// Compute the values digest. - pub fn digest(&self) -> Point { - let value = self - .value - .map(|f| u8::try_from(f.to_canonical_u64()).unwrap()); - self.table_info[..self.num_extracted_columns] - .iter() - .fold(Point::NEUTRAL, |acc, info| { - let extracted_value = extract_value(&value, info); - - // digest = D(info.identifier || pack(extracted_value)) - let inputs = once(info.identifier) - .chain( - extracted_value - .pack(Endianness::Big) - .into_iter() - .map(F::from_canonical_u32), - ) - .collect_vec(); - let digest = map_to_curve_point(&inputs); - - acc + digest - }) - } -} - -pub fn extract_value( - value_bytes: &[u8; MAPPING_LEAF_VALUE_LEN], - info: &ColumnInfo, -) -> [u8; MAPPING_LEAF_VALUE_LEN] { - let bit_offset = u8::try_from(info.bit_offset.to_canonical_u64()).unwrap(); - assert!(bit_offset <= 8); - let [byte_offset, length] = - [info.byte_offset, info.length].map(|f| usize::try_from(f.to_canonical_u64()).unwrap()); - - // last_byte_offset = info.byte_offset + ceil(info.length / 8) - 1 - let last_byte_offset = byte_offset + length.div_ceil(8) - 1; - - // Extract all the bits of the field aligined with bytes. - let mut result_bytes = Vec::with_capacity(last_byte_offset - byte_offset + 1); - for i in byte_offset..=last_byte_offset { - // Get the current and next bytes. - let current_byte = u16::from(value_bytes[i]); - let next_byte = if i < MAPPING_LEAF_VALUE_LEN - 1 { - u16::from(value_bytes[i + 1]) - } else { - 0 - }; - - // actual_byte = last_bits(current_byte, 8 - bit_offset) * 2^bit_offset + first_bits(next_byte, bit_offset) - let actual_byte = (last_bits(current_byte, 8 - bit_offset) << bit_offset) - + first_bits(next_byte, bit_offset); - - result_bytes.push(u8::try_from(actual_byte).unwrap()); - } - - // At last we need to retain only the first `info.length % 8` bits for - // the last byte of result. - let mut last_byte = u16::from(*result_bytes.last().unwrap()); - let length_mod_8 = length % 8; - if length_mod_8 > 0 { - // If length_mod_8 == 0, we don't need to cut any bit. - last_byte = first_bits(last_byte, u8::try_from(length_mod_8).unwrap()); - } - *result_bytes.last_mut().unwrap() = u8::try_from(last_byte).unwrap(); - - // Normalize left. - left_pad32(&result_bytes) -} - -/// Filter to get the column identifiers of one table by the slot and EVM word. -/// We save multiple simple slots in one table, and only one mapping slot in one table. -pub fn filter_table_column_identifiers( - table_info: &[ColumnInfo], - slot: u8, - evm_word: u32, -) -> Vec { - table_info - .iter() - .filter_map(|col_info| { - if col_info.slot() == F::from_canonical_u8(slot) - && col_info.evm_word() == F::from_canonical_u32(evm_word) - { - Some(col_info.identifier().to_canonical_u64()) - } else { - None - } - }) - .collect() -} - -#[cfg(test)] -pub(crate) mod tests { - use super::{super::column_info::ColumnInfoTarget, *}; - use crate::{ - tests::TEST_MAX_FIELD_PER_EVM, - values_extraction::gadgets::column_info::{ - CircuitBuilderColumnInfo, WitnessWriteColumnInfo, - }, - }; - use mp2_common::{C, D}; - use mp2_test::circuit::{run_circuit, UserCircuit}; - use plonky2::iop::witness::{PartialWitness, WitnessWrite}; - use plonky2_ecgfp5::gadgets::curve::PartialWitnessCurve; - use rand::{thread_rng, Rng}; - - #[derive(Clone, Debug)] - pub(crate) struct ColumnGadgetTarget { - value: [Target; MAPPING_LEAF_VALUE_LEN], - table_info: [ColumnInfoTarget; MAX_FIELD_PER_EVM], - is_extracted_columns: [BoolTarget; MAX_FIELD_PER_EVM], - } - - impl ColumnGadgetTarget { - fn column_gadget(&self) -> ColumnGadget { - ColumnGadget::new(&self.value, &self.table_info, &self.is_extracted_columns) - } - } - - pub(crate) trait CircuitBuilderColumnGadget { - /// Add a virtual column gadget target. - fn add_virtual_column_gadget_target( - &mut self, - ) -> ColumnGadgetTarget; - } - - impl CircuitBuilderColumnGadget for CBuilder { - fn add_virtual_column_gadget_target( - &mut self, - ) -> ColumnGadgetTarget { - let value = self.add_virtual_target_arr(); - let table_info = array::from_fn(|_| self.add_virtual_column_info()); - let is_extracted_columns = array::from_fn(|_| self.add_virtual_bool_target_safe()); - - ColumnGadgetTarget { - value, - table_info, - is_extracted_columns, - } - } - } - - pub(crate) trait WitnessWriteColumnGadget { - fn set_column_gadget_target( - &mut self, - target: &ColumnGadgetTarget, - value: &ColumnGadgetData, - ); - } - - impl> WitnessWriteColumnGadget for T { - fn set_column_gadget_target( - &mut self, - target: &ColumnGadgetTarget, - data: &ColumnGadgetData, - ) { - self.set_target_arr(&target.value, &data.value); - self.set_column_info_target_arr(&target.table_info, &data.table_info); - target - .is_extracted_columns - .iter() - .enumerate() - .for_each(|(i, t)| self.set_bool_target(*t, i < data.num_extracted_columns)); - } - } - - impl ColumnGadgetData { - /// Create a sample data. It could be used in integration tests. - pub(crate) fn sample() -> Self { - let rng = &mut thread_rng(); - - let value = array::from_fn(|_| F::from_canonical_u8(rng.gen())); - let table_info = array::from_fn(|_| ColumnInfo::sample()); - let num_extracted_columns = rng.gen_range(1..=MAX_FIELD_PER_EVM); - - Self { - value, - table_info, - num_extracted_columns, - } - } - } - - #[derive(Clone, Debug)] - struct TestColumnGadgetCircuit { - column_gadget_data: ColumnGadgetData, - expected_column_digest: Point, - } - - impl UserCircuit for TestColumnGadgetCircuit { - // Column gadget target + expected column digest - type Wires = (ColumnGadgetTarget, CurveTarget); - - fn build(b: &mut CBuilder) -> Self::Wires { - let column_gadget_target = b.add_virtual_column_gadget_target(); - let expected_column_digest = b.add_virtual_curve_target(); - - let column_digest = column_gadget_target.column_gadget().build(b); - b.connect_curve_points(column_digest, expected_column_digest); - - (column_gadget_target, expected_column_digest) - } - - fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - pw.set_column_gadget_target(&wires.0, &self.column_gadget_data); - pw.set_curve_target(wires.1, self.expected_column_digest.to_weierstrass()); - } - } - - #[test] - fn test_values_extraction_column_gadget() { - let column_gadget_data = ColumnGadgetData::sample(); - let expected_column_digest = column_gadget_data.digest(); - - let test_circuit = TestColumnGadgetCircuit { - column_gadget_data, - expected_column_digest, - }; - - let _ = run_circuit::(test_circuit); - } -} diff --git a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs index a952bb84c..761be2bb0 100644 --- a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs +++ b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs @@ -10,7 +10,7 @@ use super::column_info::{ use itertools::Itertools; use mp2_common::{ array::{Array, Targetable}, - eth::{left_pad32, EventLogInfo}, + eth::{left_pad32, EventLogInfo, StorageSlot}, group_hashing::CircuitBuilderGroupHashing, poseidon::{empty_poseidon_hash, hash_to_int_value, H}, serialization::{ @@ -159,12 +159,16 @@ impl TableMetadata { (point, H::hash_no_pad(&row_id_input).into()) } - pub fn extracted_value_digest(&self, value: &[u8], location_offset: F) -> Point { + pub fn extracted_value_digest(&self, value: &[u8], slot: &StorageSlot) -> Point { + let mut slot_extraction_id = [F::ZERO; 8]; + slot_extraction_id[7] = F::from_canonical_u8(slot.slot()); + let location_offset = F::from_canonical_u32(slot.evm_offset()); self.extracted_columns() .iter() .fold(Point::NEUTRAL, |acc, column| { + let correct_extraction_id = slot_extraction_id == column.extraction_id(); let correct_location = location_offset == column.location_offset(); - if correct_location { + if correct_location && correct_extraction_id { acc + column.value_digest(value) } else { acc @@ -251,12 +255,11 @@ impl TableMetadata { &self, input_vals: &[[u8; 32]], value: &[u8], - location_offset: u32, + slot: &StorageSlot, ) -> Point { - let location_offset = F::from_canonical_u32(location_offset); let (input_vd, row_unique) = self.input_value_digest(input_vals); - let extract_vd = self.extracted_value_digest(value, location_offset); + let extract_vd = self.extracted_value_digest(value, slot); let inputs = if self.input_columns().is_empty() { empty_poseidon_hash() @@ -280,7 +283,7 @@ impl TableMetadata { // values_digest = values_digest * row_id let row_id = Scalar::from_noncanonical_biguint(row_id); - if location_offset.0 == 0 { + if slot.evm_offset() == 0 { (extract_vd + input_vd) * row_id } else { extract_vd * row_id @@ -327,7 +330,7 @@ impl .extracted_columns .iter() .copied() - .chain(std::iter::repeat(ExtractedColumnInfo::default())) + .chain(std::iter::repeat(columns_metadata.extracted_columns[0])) .take(MAX_EXTRACTED_COLUMNS) .collect::>(); pw.set_extracted_column_info_target_arr( diff --git a/mp2-v1/src/values_extraction/leaf_mapping.rs b/mp2-v1/src/values_extraction/leaf_mapping.rs index 3502e9112..d0987fc3b 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping.rs @@ -191,7 +191,7 @@ mod tests { use super::*; use crate::{ tests::TEST_MAX_COLUMNS, - values_extraction::{storage_value_digest, KEY_ID_PREFIX}, + values_extraction::{storage_value_digest, StorageSlotInfo, KEY_ID_PREFIX}, }; use eth_trie::{Nibbles, Trie}; use mp2_common::{ @@ -267,12 +267,15 @@ mod tests { ); let metadata_digest = table_metadata.digest(); - + let slot_info = StorageSlotInfo::new( + storage_slot.clone(), + table_metadata.extracted_columns.clone(), + ); let values_digest = storage_value_digest( &table_metadata, &[mapping_key], &value.clone().try_into().unwrap(), - evm_word, + &slot_info, ); let slot = MappingSlot::new(slot, mapping_key.to_vec()); diff --git a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs index 148b0617a..3b783e312 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping_of_mappings.rs @@ -207,7 +207,9 @@ mod tests { use super::*; use crate::{ tests::TEST_MAX_COLUMNS, - values_extraction::{storage_value_digest, INNER_KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX}, + values_extraction::{ + storage_value_digest, StorageSlotInfo, INNER_KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, + }, }; use eth_trie::{Nibbles, Trie}; use mp2_common::{ @@ -288,11 +290,15 @@ mod tests { ); let metadata_digest = table_metadata.digest(); + let slot_info = StorageSlotInfo::new( + storage_slot.clone(), + table_metadata.extracted_columns.clone(), + ); let values_digest = storage_value_digest( &table_metadata, &[outer_key, inner_key], &value.clone().try_into().unwrap(), - evm_word, + &slot_info, ); let slot = MappingSlot::new(slot, outer_key.to_vec()); diff --git a/mp2-v1/src/values_extraction/leaf_single.rs b/mp2-v1/src/values_extraction/leaf_single.rs index 5695aff25..94a045107 100644 --- a/mp2-v1/src/values_extraction/leaf_single.rs +++ b/mp2-v1/src/values_extraction/leaf_single.rs @@ -165,7 +165,10 @@ impl CircuitLogicWires #[cfg(test)] mod tests { use super::*; - use crate::{tests::TEST_MAX_COLUMNS, values_extraction::storage_value_digest}; + use crate::{ + tests::TEST_MAX_COLUMNS, + values_extraction::{storage_value_digest, StorageSlotInfo}, + }; use eth_trie::{Nibbles, Trie}; use mp2_common::{ array::Array, @@ -238,13 +241,17 @@ mod tests { ); let metadata_digest = table_metadata.digest(); + + let slot_info = StorageSlotInfo::new( + storage_slot.clone(), + table_metadata.extracted_columns.clone(), + ); let values_digest = storage_value_digest( &table_metadata, &[], &value.clone().try_into().unwrap(), - evm_word, + &slot_info, ); - let slot = SimpleSlot::new(slot); let c = LeafCircuit { node: node.clone(), diff --git a/mp2-v1/src/values_extraction/mod.rs b/mp2-v1/src/values_extraction/mod.rs index cff827d38..7edcf7119 100644 --- a/mp2-v1/src/values_extraction/mod.rs +++ b/mp2-v1/src/values_extraction/mod.rs @@ -378,7 +378,7 @@ pub fn storage_value_digest( table_metadata: &TableMetadata, keys: &[&[u8]], value: &[u8; MAPPING_LEAF_VALUE_LEN], - evm_word: u32, + slot: &StorageSlotInfo, ) -> Digest { let padded_keys = keys .iter() @@ -392,5 +392,5 @@ pub fn storage_value_digest( keys.len(), table_metadata.input_columns.len() ); - table_metadata.storage_values_digest(padded_keys.as_slice(), value.as_slice(), evm_word) + table_metadata.storage_values_digest(padded_keys.as_slice(), value.as_slice(), slot.slot()) } From 8c963fa1b25c2fbf0709e09d40da2a9d083e787e Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 3 Feb 2025 17:19:15 +0000 Subject: [PATCH 41/47] Updated Extraction Planner --- mp2-common/src/eth.rs | 17 +- mp2-common/src/proof.rs | 2 +- mp2-v1/src/values_extraction/api.rs | 2 +- mp2-v1/src/values_extraction/planner.rs | 565 ++++++++++++++-------- mp2-v1/tests/common/cases/table_source.rs | 2 +- ryhope/src/storage/updatetree.rs | 20 + 6 files changed, 385 insertions(+), 223 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index a3e03388d..b57385c12 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -141,7 +141,7 @@ pub fn extract_child_hashes(rlp_data: &[u8]) -> Vec> { } /// Enum used to distinguish between different types of node in an MPT. -#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] pub enum NodeType { Branch, Extension, @@ -774,7 +774,7 @@ impl ReceiptQuery Result, MP2EthError> { let mpt_root = block_util.receipts_trie.root_hash()?; - let proofs = tx_indices + tx_indices .iter() .map(|&tx_index| { let key = tx_index.rlp_bytes(); @@ -787,18 +787,7 @@ impl ReceiptQuery, MP2EthError>>()?; - - // // In the case when proofs is empty we just need to provide a proof for the root node - // if proofs.is_empty() { - // // Transaction index 0 should always be present - // let key = 0u64.rlp_bytes(); - - // // Get the proof but just use the first node of the proof - // let proof = block_util.receipts_trie.get_proof(&key[..])?; - - // } - Ok(proofs) + .collect::, MP2EthError>>() } } diff --git a/mp2-common/src/proof.rs b/mp2-common/src/proof.rs index 4f9154f05..f9705f2f8 100644 --- a/mp2-common/src/proof.rs +++ b/mp2-common/src/proof.rs @@ -21,7 +21,7 @@ use serde::{Deserialize, Serialize}; /// The generic type `T` allows to specify the /// specific inputs of each circuits besides the proofs that need to be /// recursively verified, while the proofs are serialized in byte format. -#[derive(Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct ProofInputSerialized { pub input: T, pub serialized_child_proofs: Vec>, diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 24bf67d47..0ae58131c 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -48,7 +48,7 @@ const NUM_IO: usize = PublicInputs::::TOTAL_LEN; /// CircuitInput is a wrapper around the different specialized circuits that can /// be used to prove a MPT node recursively. -#[derive(Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub enum CircuitInput where [(); PAD_LEN(LEAF_LEN)]:, diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index ee7e213d0..cab1cc586 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -1,15 +1,23 @@ //! This code returns an [`UpdateTree`] used to plan how we prove a series of values was extracted from a Merkle Patricia Trie. use alloy::{ + eips::BlockNumberOrTag, network::Ethereum, primitives::{keccak256, Address, B256}, - providers::RootProvider, + providers::{Provider, RootProvider}, transports::Transport, }; use anyhow::Result; -use mp2_common::eth::{node_type, EventLogInfo, MP2EthError, NodeType, ReceiptQuery}; -use ryhope::storage::updatetree::{Next, UpdateTree}; -use serde::{Deserialize, Serialize}; -use std::future::Future; +use mp2_common::{ + eth::{node_type, EventLogInfo, MP2EthError, NodeType, ReceiptQuery}, + mpt_sequential::PAD_LEN, +}; + +use ryhope::{ + error::RyhopeError, + storage::updatetree::{Next, UpdateTree}, +}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use std::{fmt::Debug, future::Future, hash::Hash}; use std::{ collections::HashMap, @@ -18,7 +26,9 @@ use std::{ write, }; -use super::{generate_proof, CircuitInput, PublicParameters}; +use super::{ + gadgets::metadata_gadget::TableMetadata, generate_proof, CircuitInput, PublicParameters, +}; #[derive(Debug)] /// Error enum used for Extractable data @@ -31,6 +41,8 @@ pub enum MP2PlannerError { EthError(MP2EthError), /// An error that occurs from a method in the proving API. ProvingError(String), + /// Error from within Ryhope + RyhopeError(RyhopeError), } impl Error for MP2PlannerError {} @@ -55,6 +67,9 @@ impl Display for MP2PlannerError { MP2PlannerError::ProvingError(s) => { write!(f, "Error while proving, extra message: {}", s) } + MP2PlannerError::RyhopeError(e) => { + write!(f, "Error in Ryhope method {{ inner: {:?} }}", e) + } } } } @@ -68,76 +83,204 @@ impl From for MP2PlannerError { } } +impl From for MP2PlannerError { + fn from(value: RyhopeError) -> Self { + MP2PlannerError::RyhopeError(value) + } +} + +/// Trait used to mark types that are needed as extra circuit inputs +pub trait ExtraInput {} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum InputEnum { + Leaf(E::ExtraLeafInput), + Extension(Vec), + Branch(Vec>), + Dummy(B256), +} + +impl InputEnum { + /// Create a new Branch or extension node with empty input + pub fn empty_non_leaf(node: &[u8]) -> Result { + let node_type = node_type(node)?; + match node_type { + NodeType::Branch => Ok(InputEnum::Branch(vec![])), + NodeType::Extension => Ok(InputEnum::Extension(vec![])), + _ => Err(MP2PlannerError::UpdateTreeError("Tried to make an empty non leaf node from a node that wasn't a Branch or Extension".to_string())) + } + } +} + /// Trait that is implemented for all data that we can provably extract. -pub trait Extractable { +pub trait Extractable: Debug { + /// The extra info needed to make a leaf proof for this extraction type. + type ExtraLeafInput: Clone + + Debug + + Serialize + + DeserializeOwned + + PartialEq + + Eq + + Ord + + PartialOrd + + Hash; + fn create_update_tree( &self, contract: Address, epoch: u64, provider: &RootProvider, - ) -> impl Future, MP2PlannerError>>; + ) -> impl Future, MP2PlannerError>> + where + Self: Sized; - fn prove_value_extraction( + fn to_circuit_input( + &self, + proof_data: &ProofData, + ) -> CircuitInput + where + [(); PAD_LEN(LEAF_LEN)]:, + Self: Sized; + + fn prove_value_extraction< + const MAX_EXTRACTED_COLUMNS: usize, + const LEAF_LEN: usize, + T: Transport + Clone, + >( &self, contract: Address, epoch: u64, - pp: &PublicParameters<512, MAX_COLUMNS>, + pp: &PublicParameters, provider: &RootProvider, - ) -> impl Future, MP2PlannerError>>; + ) -> impl Future, MP2PlannerError>> + where + [(); PAD_LEN(LEAF_LEN)]:; } -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -struct ProofData { +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +pub struct ProofData { node: Vec, - node_type: NodeType, - tx_index: Option, - proof: Option>, + extra_inputs: InputEnum, } -impl ProofData { - pub fn new(node: Vec, node_type: NodeType, tx_index: Option) -> ProofData { - ProofData { - node, - node_type, - tx_index, - proof: None, +impl ProofData { + pub fn new(node: Vec, extra_inputs: InputEnum) -> ProofData { + ProofData:: { node, extra_inputs } + } + + /// Create a new instance of [`ProofData`] from a slice of [`u8`] + pub fn from_slice( + node: &[u8], + extra_inputs: InputEnum, + ) -> Result, MP2PlannerError> { + let node_type = node_type(node)?; + + // Check that the node type matches the extra input type we expect. + if !matches!( + (node_type, &extra_inputs), + (NodeType::Branch, InputEnum::Branch(..)) + | (NodeType::Extension, InputEnum::Extension(..)) + | (NodeType::Leaf, InputEnum::Leaf(..)) + ) { + return Err(MP2PlannerError::ProvingError(format!( + "The node provided: {:?} did not match the extra input type provided: {:?} ", + node_type, extra_inputs + ))); } + + Ok(ProofData::::new(node.to_vec(), extra_inputs)) } -} -impl Extractable - for EventLogInfo -{ - async fn create_update_tree( - &self, - contract: Address, - epoch: u64, - provider: &RootProvider, - ) -> Result, MP2PlannerError> { - let query = ReceiptQuery:: { - contract, - event: *self, + /// Update a [`ProofData`] with a proof represented as a [`Vec`] + pub fn update(&mut self, proof: Vec) -> Result<(), MP2PlannerError> { + match self.extra_inputs { + InputEnum::Branch(ref mut proofs) => proofs.push(proof), + + InputEnum::Extension(ref mut inner_proof) => { + if !proof.is_empty() { + return Err(MP2PlannerError::UpdateTreeError( + "Can't update Extension ProofData if its child proof isn't empty" + .to_string(), + )); + } + *inner_proof = proof; + } + _ => { + return Err(MP2PlannerError::UpdateTreeError( + "Can't update a Proof Data that isn't an Extension or Branch".to_string(), + )) + } }; - let proofs = query.query_receipt_proofs(provider, epoch.into()).await?; + Ok(()) + } +} - // Convert the paths into their keys using keccak - let key_paths = proofs - .iter() - .map(|input| input.mpt_proof.iter().map(keccak256).collect::>()) - .collect::>>(); +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExtractionUpdatePlan { + pub(crate) update_tree: UpdateTree, + pub(crate) proof_cache: HashMap>, +} + +impl ExtractionUpdatePlan { + pub fn new(update_tree: UpdateTree, proof_cache: HashMap>) -> Self { + Self { + update_tree, + proof_cache, + } + } + + pub fn process_locally( + &mut self, + params: &PublicParameters, + extractable: &E, + ) -> Result, MP2PlannerError> + where + [(); PAD_LEN(LEAF_LEN)]:, + { + let mut update_plan = self.update_tree.clone().into_workplan(); + let mut final_proof = Vec::::new(); + while let Some(Next::Ready(work_plan_item)) = update_plan.next() { + let proof_data = self.proof_cache.get(work_plan_item.k()).ok_or( + MP2PlannerError::UpdateTreeError("Key not present in the proof cache".to_string()), + )?; + let circuit_type = extractable.to_circuit_input(proof_data); + + let proof = generate_proof(params, circuit_type).map_err(|e| { + MP2PlannerError::ProvingError(format!( + "Error while generating proof for node {{ inner: {:?} }}", + e + )) + })?; + + let parent = self.update_tree.get_parent_key(work_plan_item.k()); + + match parent { + Some(parent_key) => { + let proof_data_ref = self.proof_cache.get_mut(&parent_key).unwrap(); + proof_data_ref.update(proof)? + } + None => { + final_proof = proof; + } + } - // Now we make the UpdateTree - Ok(UpdateTree::::from_paths(key_paths, epoch as i64)) + update_plan.done(&work_plan_item)?; + } + Ok(final_proof) } +} - async fn prove_value_extraction( +impl Extractable + for EventLogInfo +{ + type ExtraLeafInput = u64; + async fn create_update_tree( &self, contract: Address, epoch: u64, - pp: &PublicParameters<512, MAX_EXTRACTED_COLUMNS>, provider: &RootProvider, - ) -> Result, MP2PlannerError> { + ) -> Result, MP2PlannerError> { let query = ReceiptQuery:: { contract, event: *self, @@ -145,166 +288,115 @@ impl Extractable let proofs = query.query_receipt_proofs(provider, epoch.into()).await?; - let mut data_store = HashMap::::new(); + let mut proof_cache = HashMap::>::new(); // Convert the paths into their keys using keccak - let key_paths = proofs - .iter() - .map(|input| { - let tx_index = input.tx_index; - input - .mpt_proof - .iter() - .map(|node| { - let node_key = keccak256(node); - let node_type = node_type(node)?; - let tx = if let NodeType::Leaf = node_type { - Some(tx_index) - } else { - None - }; - data_store.insert(node_key, ProofData::new(node.clone(), node_type, tx)); - - Ok(node_key) - }) - .collect::, MP2PlannerError>>() - }) - .collect::>, MP2PlannerError>>()?; - - let update_tree = UpdateTree::::from_paths(key_paths, epoch as i64); - - let mut update_plan = update_tree.clone().into_workplan(); - - while let Some(Next::Ready(work_plan_item)) = update_plan.next() { - let node_type = data_store - .get(work_plan_item.k()) - .ok_or(MP2PlannerError::UpdateTreeError(format!( - "No ProofData found for key: {:?}", - work_plan_item.k() - )))? - .node_type; - - let update_tree_node = update_tree.get_node(work_plan_item.k()).ok_or( - MP2PlannerError::UpdateTreeError(format!( - "No UpdateTreeNode found for key: {:?}", - work_plan_item.k(), - )), - )?; - - match node_type { - NodeType::Leaf => { - let proof_data = data_store.get_mut(work_plan_item.k()).ok_or( - MP2PlannerError::UpdateTreeError(format!( - "No ProofData found for key: {:?}", - work_plan_item.k() - )), - )?; - let input = CircuitInput::new_receipt_leaf( - &proof_data.node, - proof_data.tx_index.unwrap(), - self, - ); - let proof = generate_proof(pp, input).map_err(|_| { - MP2PlannerError::ProvingError( - "Error calling generate proof API".to_string(), - ) - })?; - proof_data.proof = Some(proof); - update_plan.done(&work_plan_item).map_err(|_| { - MP2PlannerError::UpdateTreeError( - "Could not mark work plan item as done".to_string(), - ) - })?; - } - NodeType::Extension => { - let child_key = update_tree.get_child_keys(update_tree_node); - if child_key.len() != 1 { - return Err(MP2PlannerError::ProvingError(format!( - "Expected nodes child keys to have length 1, actual length: {}", - child_key.len() - ))); - } - let child_proof = data_store - .get(&child_key[0]) - .ok_or(MP2PlannerError::UpdateTreeError(format!( - "Extension node child had no proof data for key: {:?}", - child_key[0] - )))? - .clone(); - let proof_data = data_store.get_mut(work_plan_item.k()).ok_or( - MP2PlannerError::UpdateTreeError(format!( - "No ProofData found for key: {:?}", - work_plan_item.k() - )), - )?; - let input = CircuitInput::new_extension( - proof_data.node.clone(), - child_proof.proof.ok_or(MP2PlannerError::UpdateTreeError( - "Extension node child proof was a None value".to_string(), - ))?, - ); - let proof = generate_proof(pp, input).map_err(|_| { - MP2PlannerError::ProvingError( - "Error calling generate proof API".to_string(), - ) - })?; - proof_data.proof = Some(proof); - update_plan.done(&work_plan_item).map_err(|_| { - MP2PlannerError::UpdateTreeError( - "Could not mark work plan item as done".to_string(), - ) - })?; - } - NodeType::Branch => { - let child_keys = update_tree.get_child_keys(update_tree_node); - let child_proofs = child_keys + if proofs.is_empty() { + let block = provider + .get_block_by_number(BlockNumberOrTag::Number(epoch), false.into()) + .await + .map_err(|_| MP2PlannerError::FetchError)? + .ok_or(MP2PlannerError::UpdateTreeError( + "Fetched Block with no relevant events but the result was None".to_string(), + ))?; + let receipt_root = block.header.receipts_root; + + let dummy_input = InputEnum::Dummy(receipt_root); + let proof_data = ProofData:: { + node: vec![], + extra_inputs: dummy_input, + }; + + proof_cache.insert(receipt_root, proof_data); + + let update_tree = UpdateTree::::from_path(vec![receipt_root], epoch as i64); + + Ok(ExtractionUpdatePlan::new(update_tree, proof_cache)) + } else { + let key_paths = proofs + .iter() + .map(|input| { + let proof_len = input.mpt_proof.len(); + + // First we add the leaf and its proving data to the cache + let leaf = input + .mpt_proof + .last() + .ok_or(MP2PlannerError::UpdateTreeError( + "MPT proof had no nodes".to_string(), + ))?; + let leaf_key = keccak256(leaf); + let leaf_proof_data = + ProofData::::from_slice(leaf, InputEnum::Leaf(input.tx_index))?; + + proof_cache.insert(leaf_key, leaf_proof_data); + + input + .mpt_proof .iter() - .map(|key| { - data_store - .get(key) - .ok_or(MP2PlannerError::UpdateTreeError(format!( - "Branch child data could not be found for key: {:?}", - key - )))? - .clone() - .proof - .ok_or(MP2PlannerError::UpdateTreeError( - "No proof found in brnach node child".to_string(), - )) + .take(proof_len - 1) + .map(|proof_vec| { + let proof_key = keccak256(proof_vec); + let proof_input = InputEnum::::empty_non_leaf(proof_vec)?; + let proof_data = ProofData::::from_slice(proof_vec, proof_input)?; + proof_cache.insert(proof_key, proof_data); + Ok(proof_key) }) - .collect::>, MP2PlannerError>>()?; - let proof_data = data_store.get_mut(work_plan_item.k()).ok_or( - MP2PlannerError::UpdateTreeError(format!( - "No ProofData found for key: {:?}", - work_plan_item.k() - )), - )?; - let input = CircuitInput::new_branch(proof_data.node.clone(), child_proofs); - let proof = generate_proof(pp, input).map_err(|_| { - MP2PlannerError::ProvingError( - "Error calling generate proof API".to_string(), - ) - })?; - proof_data.proof = Some(proof); - update_plan.done(&work_plan_item).map_err(|_| { - MP2PlannerError::UpdateTreeError( - "Could not mark work plan item as done".to_string(), - ) - })?; - } + .chain(std::iter::once(Ok(leaf_key))) + .collect::, MP2PlannerError>>() + }) + .collect::>, MP2PlannerError>>()?; + + // Now we make the UpdateTree + let update_tree = UpdateTree::::from_paths(key_paths, epoch as i64); + + // Finally make the plan + Ok(ExtractionUpdatePlan::::new(update_tree, proof_cache)) + } + } + + fn to_circuit_input( + &self, + proof_data: &ProofData, + ) -> CircuitInput + where + [(); PAD_LEN(LEAF_LEN)]:, + Self: Sized, + { + let ProofData { node, extra_inputs } = proof_data; + match extra_inputs { + InputEnum::Branch(child_proofs) => { + CircuitInput::new_branch(node.clone(), child_proofs.clone()) + } + InputEnum::Extension(child_proof) => { + CircuitInput::new_extension(node.clone(), child_proof.clone()) + } + InputEnum::Leaf(tx_index) => CircuitInput::new_receipt_leaf(node, *tx_index, self), + InputEnum::Dummy(block_hash) => { + let metadata = TableMetadata::from_event_info(self); + let metadata_digest = metadata.digest(); + CircuitInput::new_dummy(*block_hash, metadata_digest) } } + } - let final_data = data_store - .get(update_tree.root()) - .ok_or(MP2PlannerError::UpdateTreeError( - "No data for root of update tree found".to_string(), - ))? - .clone(); + async fn prove_value_extraction< + const MAX_EXTRACTED_COLUMNS: usize, + const LEAF_LEN: usize, + T: Transport + Clone, + >( + &self, + contract: Address, + epoch: u64, + pp: &PublicParameters, + provider: &RootProvider, + ) -> Result, MP2PlannerError> + where + [(); PAD_LEN(LEAF_LEN)]:, + { + let mut extraction_plan = self.create_update_tree(contract, epoch, provider).await?; - final_data.proof.ok_or(MP2PlannerError::UpdateTreeError( - "No proof stored for final data".to_string(), - )) + extraction_plan.process_locally(pp, self) } } @@ -324,7 +416,7 @@ pub mod tests { }; use mp2_test::eth::get_mainnet_url; use plonky2::{field::types::Field, hash::hash_types::HashOut, plonk::config::Hasher}; - use plonky2_ecgfp5::curve::scalar_field::Scalar; + use plonky2_ecgfp5::curve::{curve::Point, scalar_field::Scalar}; use std::str::FromStr; use crate::values_extraction::{ @@ -345,13 +437,16 @@ pub mod tests { // get some tx and receipt let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let update_tree = event_info + let extraction_plan = event_info .create_update_tree(contract, epoch, &provider) .await?; - let block_util = build_test_data().await; + let block_util = build_test_data(epoch).await; - assert_eq!(*update_tree.root(), block_util.block.header.receipts_root); + assert_eq!( + *extraction_plan.update_tree.root(), + block_util.block.header.receipts_root + ); Ok(()) } @@ -431,7 +526,7 @@ pub mod tests { let pi = PublicInputs::new(&final_proof.proof.public_inputs); - let mut block_util = build_test_data().await; + let mut block_util = build_test_data(epoch).await; // Check the output hash { assert_eq!( @@ -457,14 +552,72 @@ pub mod tests { Ok(()) } + #[tokio::test] + async fn test_empty_block_receipt_proving() -> Result<()> { + // First get the info we will feed in to our function + let event_info = test_receipt_trie_helper().await?; + + let contract = Address::from_str("0xbd3531da5cf5857e7cfaa92426877b022e612cf8")?; + let epoch: u64 = 21767312; + + let url = get_mainnet_url(); + // get some tx and receipt + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + + let pp = build_circuits_params::<512, 7>(); + let final_proof_bytes = event_info + .prove_value_extraction(contract, epoch, &pp, &provider) + .await?; + + let final_proof = ProofWithVK::deserialize(&final_proof_bytes)?; + + let metadata = TableMetadata::from(event_info); + + let metadata_digest = metadata.digest(); + + let value_digest = Point::NEUTRAL; + + let pi = PublicInputs::new(&final_proof.proof.public_inputs); + + let mut block_util = build_test_data(epoch).await; + // Check the output hash + { + assert_eq!( + pi.root_hash(), + block_util + .receipts_trie + .root_hash()? + .0 + .to_vec() + .pack(Endianness::Little) + ); + } + + // Check value digest + { + assert_eq!(pi.values_digest(), value_digest.to_weierstrass()); + } + + // Check metadata digest + { + assert_eq!(pi.metadata_digest(), metadata_digest.to_weierstrass()); + } + + // Check that the number of rows is zero + { + assert_eq!(pi.n(), GFp::ZERO); + } + Ok(()) + } + /// Function that fetches a block together with its transaction trie and receipt trie for testing purposes. - async fn build_test_data() -> BlockUtil { + async fn build_test_data(block_number: u64) -> BlockUtil { let url = get_mainnet_url(); // get some tx and receipt let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); // We fetch a specific block which we know includes transactions relating to the PudgyPenguins contract. - BlockUtil::fetch(&provider, BlockNumberOrTag::Number(21362445)) + BlockUtil::fetch(&provider, BlockNumberOrTag::Number(block_number)) .await .unwrap() } diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 53b139df3..565212418 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -896,7 +896,7 @@ where .on_http(ctx.rpc_url.parse().unwrap()); let value_proof = event - .prove_value_extraction::<32, _>( + .prove_value_extraction::<32, 512, _>( contract.address(), bn as u64, ctx.params().get_value_extraction_params(), diff --git a/ryhope/src/storage/updatetree.rs b/ryhope/src/storage/updatetree.rs index b736572fc..7b9fd75db 100644 --- a/ryhope/src/storage/updatetree.rs +++ b/ryhope/src/storage/updatetree.rs @@ -72,6 +72,10 @@ impl UpdateTree { pub fn get_node(&self, key: &K) -> Option<&UpdateTreeNode> { self.idx.get(key).map(|idx| self.node(*idx)) } + pub fn get_node_mut(&mut self, key: &K) -> Option<&mut UpdateTreeNode> { + let idx = self.idx.get(key).cloned(); + idx.map(|index| self.node_mut(index)) + } pub fn get_child_keys(&self, node: &UpdateTreeNode) -> Vec { node.children @@ -79,6 +83,17 @@ impl UpdateTree { .map(|idx| self.node(*idx).k()) .collect() } + + pub fn get_parent_key(&self, key: &K) -> Option { + let idx = self.idx.get(key); + if let Some(&idx) = idx { + self.nodes[idx] + .parent + .map(|parent_idx| self.nodes[parent_idx].k()) + } else { + None + } + } } impl UpdateTree { @@ -443,6 +458,11 @@ impl UpdatePlan { &self.t } + /// Return a mutable reference to the tree this plan is built around. + pub fn tree_mut(&mut self) -> &mut UpdateTree { + &mut self.t + } + /// Mark the given item as having been completed. Its dependent will not be /// generated by the iterator until the item has been marked as completed. pub fn done(&mut self, item: &WorkplanItem) -> Result<(), RyhopeError> { From 6fae4454dc6b88538bd6ce3a48769af2ed249dfe Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 4 Feb 2025 11:11:37 +0000 Subject: [PATCH 42/47] Removed ReceiptQuery struct --- mp2-common/src/eth.rs | 185 ++++++------- mp2-test/src/mpt_sequential.rs | 19 +- mp2-v1/src/values_extraction/api.rs | 6 +- .../gadgets/metadata_gadget.rs | 2 +- mp2-v1/src/values_extraction/leaf_receipt.rs | 6 +- mp2-v1/src/values_extraction/planner.rs | 249 +++++++----------- mp2-v1/tests/common/cases/table_source.rs | 17 +- 7 files changed, 192 insertions(+), 292 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index b57385c12..56f8a80a0 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -292,17 +292,6 @@ pub struct ProofQuery { pub(crate) slot: StorageSlot, } -/// Struct used for storing relevant data to query blocks as they come in. -/// The constant `NO_TOPICS` is the number of indexed items in the event (excluding the event signature) and -/// `MAX_DATA_WORDS` is the number of 32 byte words of data we want to extract in addition to the topics. -#[derive(Debug, Clone)] -pub struct ReceiptQuery { - /// The contract that emits the event we care about - pub contract: Address, - /// The signature of the event we wish to monitor for - pub event: EventLogInfo, -} - /// Struct used to store all the information needed for proving a leaf is in the Receipt Trie. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ReceiptProofInfo { @@ -432,6 +421,77 @@ impl EventLogInfo( + &self, + provider: &RootProvider, + block: BlockNumberOrTag, + ) -> Result, MP2EthError> { + // Retrieve the transaction indices for the relevant logs + let tx_indices = self.retrieve_tx_indices(provider, block).await?; + + // Construct the Receipt Trie for this block so we can retrieve MPT proofs. + let mut block_util = BlockUtil::fetch(provider, block).await?; + EventLogInfo::::extract_info(&tx_indices, &mut block_util) + } + + /// Function to query for relevant logs at a specific block, it returns a [`BTreeSet`] of the transaction indices that are relevant. + pub async fn retrieve_tx_indices( + &self, + provider: &RootProvider, + block: BlockNumberOrTag, + ) -> Result, MP2EthError> { + let filter = Filter::new() + .select(block) + .address(self.address) + .event_signature(B256::from(self.event_signature)); + for i in 0..RETRY_NUM - 1 { + debug!( + "Querying Receipt logs:\n\tevent signature = {:?}", + self.event_signature, + ); + match provider.get_logs(&filter).await { + // For each of the logs return the transacion its included in, then sort and remove duplicates. + Ok(response) => { + return Ok(BTreeSet::from_iter( + response.iter().map_while(|log| log.transaction_index), + )) + } + Err(e) => println!("Failed to query the Receipt logs at {i} time: {e:?}"), + } + } + match provider.get_logs(&filter).await { + // For each of the logs return the transacion its included in, then sort and remove duplicates. + Ok(response) => Ok(BTreeSet::from_iter( + response.iter().map_while(|log| log.transaction_index), + )), + Err(_) => Err(MP2EthError::FetchError), + } + } + + /// Function that takes a list of transaction indices in the form of a [`BTreeSet`] and a [`BlockUtil`] and returns a list of [`ReceiptProofInfo`]. + pub fn extract_info( + tx_indices: &BTreeSet, + block_util: &mut BlockUtil, + ) -> Result, MP2EthError> { + let mpt_root = block_util.receipts_trie.root_hash()?; + tx_indices + .iter() + .map(|&tx_index| { + let key = tx_index.rlp_bytes(); + + let proof = block_util.receipts_trie.get_proof(&key[..])?; + + Ok(ReceiptProofInfo { + mpt_proof: proof, + mpt_root, + tx_index, + }) + }) + .collect::, MP2EthError>>() + } } /// Represent an intermediate or leaf node of a storage slot in contract. @@ -710,87 +770,6 @@ impl ReceiptProofInfo { } } -impl ReceiptQuery { - /// Construct a new [`ReceiptQuery`] from the contract [`Address`] and the event's name as a [`str`]. - pub fn new(contract: Address, event_name: &str) -> Self { - Self { - contract, - event: EventLogInfo::::new(contract, event_name), - } - } - - /// Function that returns the MPT Trie inclusion proofs for all receipts in a block whose logs contain - /// the specified event for the contract. - pub async fn query_receipt_proofs( - &self, - provider: &RootProvider, - block: BlockNumberOrTag, - ) -> Result, MP2EthError> { - // Retrieve the transaction indices for the relevant logs - let tx_indices = self.retrieve_tx_indices(provider, block).await?; - - // Construct the Receipt Trie for this block so we can retrieve MPT proofs. - let mut block_util = BlockUtil::fetch(provider, block).await?; - ReceiptQuery::::extract_info(&tx_indices, &mut block_util) - } - - /// Function to query for relevant logs at a specific block, it returns a [`BTreeSet`] of the transaction indices that are relevant. - pub async fn retrieve_tx_indices( - &self, - provider: &RootProvider, - block: BlockNumberOrTag, - ) -> Result, MP2EthError> { - let filter = Filter::new() - .select(block) - .address(self.contract) - .event_signature(B256::from(self.event.event_signature)); - for i in 0..RETRY_NUM - 1 { - debug!( - "Querying Receipt logs:\n\tevent signature = {:?}", - self.event.event_signature, - ); - match provider.get_logs(&filter).await { - // For each of the logs return the transacion its included in, then sort and remove duplicates. - Ok(response) => { - return Ok(BTreeSet::from_iter( - response.iter().map_while(|log| log.transaction_index), - )) - } - Err(e) => println!("Failed to query the Receipt logs at {i} time: {e:?}"), - } - } - match provider.get_logs(&filter).await { - // For each of the logs return the transacion its included in, then sort and remove duplicates. - Ok(response) => Ok(BTreeSet::from_iter( - response.iter().map_while(|log| log.transaction_index), - )), - Err(_) => Err(MP2EthError::FetchError), - } - } - - /// Function that takes a list of transaction indices in the form of a [`BTreeSet`] and a [`BlockUtil`] and returns a list of [`ReceiptProofInfo`]. - pub fn extract_info( - tx_indices: &BTreeSet, - block_util: &mut BlockUtil, - ) -> Result, MP2EthError> { - let mpt_root = block_util.receipts_trie.root_hash()?; - tx_indices - .iter() - .map(|&tx_index| { - let key = tx_index.rlp_bytes(); - - let proof = block_util.receipts_trie.get_proof(&key[..])?; - - Ok(ReceiptProofInfo { - mpt_proof: proof, - mpt_root, - tx_index, - }) - }) - .collect::, MP2EthError>>() - } -} - impl Rlpable for alloy::rpc::types::Block { fn rlp(&self) -> Vec { let mut out = Vec::new(); @@ -1107,7 +1086,7 @@ mod test { // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it let test_info = generate_receipt_test_info::(); let proofs = test_info.proofs(); - let query = test_info.query(); + let event = test_info.info(); for proof in proofs.iter() { let memdb = Arc::new(MemoryDB::new(true)); let tx_trie = EthTrie::new(Arc::clone(&memdb)); @@ -1122,7 +1101,7 @@ mod test { .mpt_proof .last() .ok_or(anyhow!("Couldn't get first node in proof"))?; - let expected_sig: [u8; 32] = query.event.event_signature; + let expected_sig: [u8; 32] = event.event_signature; // Convert to Rlp form so we can use provided methods. let node_rlp = rlp::Rlp::new(last_node); @@ -1148,11 +1127,11 @@ mod test { .filter_map(|(log_rlp, log_off)| { let mut bytes = log_rlp.data().ok()?; let log = Log::decode(&mut bytes).ok()?; - if log.address == query.contract + if log.address == event.address && log .data .topics() - .contains(&B256::from(query.event.event_signature)) + .contains(&B256::from(event.event_signature)) { Some(logs_offset + log_off) } else { @@ -1162,19 +1141,19 @@ mod test { .collect::>(); for log_offset in relevant_logs_offset.iter() { - let mut buf = &last_node[*log_offset..*log_offset + query.event.size]; + let mut buf = &last_node[*log_offset..*log_offset + event.size]; let decoded_log = Log::decode(&mut buf)?; - let raw_bytes: [u8; 20] = last_node[*log_offset + query.event.add_rel_offset - ..*log_offset + query.event.add_rel_offset + 20] + let raw_bytes: [u8; 20] = last_node + [*log_offset + event.add_rel_offset..*log_offset + event.add_rel_offset + 20] .to_vec() .try_into() .unwrap(); - assert_eq!(decoded_log.address, query.contract); - assert_eq!(raw_bytes, query.contract); + assert_eq!(decoded_log.address, event.address); + assert_eq!(raw_bytes, event.address); let topics = decoded_log.topics(); assert_eq!(topics[0].0, expected_sig); - let raw_bytes: [u8; 32] = last_node[*log_offset + query.event.sig_rel_offset - ..*log_offset + query.event.sig_rel_offset + 32] + let raw_bytes: [u8; 32] = last_node + [*log_offset + event.sig_rel_offset..*log_offset + event.sig_rel_offset + 32] .to_vec() .try_into() .unwrap(); diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 6116712bb..271c4d5d5 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -8,7 +8,7 @@ use alloy::{ }; use eth_trie::{EthTrie, MemoryDB, Trie}; -use mp2_common::eth::{ReceiptProofInfo, ReceiptQuery}; +use mp2_common::eth::{EventLogInfo, ReceiptProofInfo}; use rand::{distributions::uniform::SampleRange, thread_rng, Rng}; use std::sync::Arc; @@ -52,8 +52,8 @@ pub fn generate_random_storage_mpt( #[derive(Debug, Clone)] pub struct ReceiptTestInfo { - /// The query which we have returned proofs for - pub query: ReceiptQuery, + /// The event which we have returned proofs for + pub event: EventLogInfo, /// The proofs for receipts relating to `self.query` pub proofs: Vec, } @@ -66,8 +66,8 @@ impl self.proofs.clone() } /// Getter for the query - pub fn query(&self) -> &ReceiptQuery { - &self.query + pub fn info(&self) -> &EventLogInfo { + &self.event } } /// This function is used so that we can generate a Receipt Trie for a blog with varying transactions @@ -270,19 +270,16 @@ pub fn generate_receipt_test_info panic!(), }; - let receipt_query = ReceiptQuery::::new( + let event = EventLogInfo::::new( *event_contract.address(), &events[0].signature(), ); - let proofs = receipt_query + let proofs = event .query_receipt_proofs(rpc.root(), BlockNumberOrTag::Number(block_number)) .await .unwrap(); - ReceiptTestInfo { - query: receipt_query, - proofs, - } + ReceiptTestInfo { event, proofs } }) } diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 0ae58131c..9ff6fd6eb 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -959,7 +959,7 @@ mod tests { fn test_receipt_api() { let receipt_proof_infos = generate_receipt_test_info::<1, 0>(); let receipt_proofs = receipt_proof_infos.proofs(); - let query = receipt_proof_infos.query(); + let event = receipt_proof_infos.info(); // We need two nodes that are children of the same branch so we compare the last but two nodes for each of them until we find a case that works let (info_one, info_two) = if let Some((one, two)) = receipt_proofs .iter() @@ -997,7 +997,7 @@ mod tests { let leaf_input_1 = CircuitInput::new_receipt_leaf( info_one.mpt_proof.last().unwrap(), info_one.tx_index, - &query.event, + event, ); let now = std::time::Instant::now(); let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); @@ -1016,7 +1016,7 @@ mod tests { let leaf_input_2 = CircuitInput::new_receipt_leaf( info_two.mpt_proof.last().unwrap(), info_two.tx_index, - &query.event, + event, ); let now = std::time::Instant::now(); let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); diff --git a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs index 761be2bb0..b5b0c15cb 100644 --- a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs +++ b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs @@ -176,7 +176,7 @@ impl TableMetadata { }) } - pub fn extracted_receipt_value_digest( + fn extracted_receipt_value_digest( &self, value: &[u8], event: &EventLogInfo, diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index 74369da0b..fbafab9b2 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -464,12 +464,12 @@ mod tests { let receipt_proof_infos = generate_receipt_test_info::(); let proofs = receipt_proof_infos.proofs(); let info = proofs.first().unwrap(); - let query = receipt_proof_infos.query(); + let event = receipt_proof_infos.info(); let c = ReceiptLeafCircuit::::new::( info.mpt_proof.last().unwrap(), info.tx_index, - &query.event, + event, ) .unwrap(); let metadata = c.metadata.clone(); @@ -490,7 +490,7 @@ mod tests { // Check value digest { - let exp_digest = metadata.receipt_value_digest(info.tx_index, &node, &query.event); + let exp_digest = metadata.receipt_value_digest(info.tx_index, &node, event); assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); } diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index cab1cc586..e870a7f40 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -2,13 +2,13 @@ use alloy::{ eips::BlockNumberOrTag, network::Ethereum, - primitives::{keccak256, Address, B256}, + primitives::{keccak256, B256}, providers::{Provider, RootProvider}, transports::Transport, }; use anyhow::Result; use mp2_common::{ - eth::{node_type, EventLogInfo, MP2EthError, NodeType, ReceiptQuery}, + eth::{node_type, EventLogInfo, MP2EthError, NodeType}, mpt_sequential::PAD_LEN, }; @@ -89,14 +89,16 @@ impl From for MP2PlannerError { } } -/// Trait used to mark types that are needed as extra circuit inputs -pub trait ExtraInput {} - +/// Enum used for supplying extra inputs needed to convert [`ProofData`] to [`CircuitInput`]. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum InputEnum { + /// Leaf Variant that contains the extra inputs that depend on the implementation of [`Extractable`] Leaf(E::ExtraLeafInput), + /// Extension extra input should be a single child proof Extension(Vec), + /// Branch extra inputs should be a list of child proofs Branch(Vec>), + /// A dummy input just requires the root hash of the tree and the metadata digest for the extracted item Dummy(B256), } @@ -104,10 +106,14 @@ impl InputEnum { /// Create a new Branch or extension node with empty input pub fn empty_non_leaf(node: &[u8]) -> Result { let node_type = node_type(node)?; + // Match on the node type to make sure we can create an empty version. match node_type { NodeType::Branch => Ok(InputEnum::Branch(vec![])), NodeType::Extension => Ok(InputEnum::Extension(vec![])), - _ => Err(MP2PlannerError::UpdateTreeError("Tried to make an empty non leaf node from a node that wasn't a Branch or Extension".to_string())) + _ => Err(MP2PlannerError::UpdateTreeError( + "Tried to make an empty node from a MPT node that wasn't a Branch or Extension" + .to_string(), + )), } } } @@ -124,16 +130,15 @@ pub trait Extractable: Debug { + Ord + PartialOrd + Hash; - - fn create_update_tree( + /// Method that creates an [`ExtractionUpdatePlan`] that can then be processed either locally or in a distributed fashion. + fn create_update_plan( &self, - contract: Address, epoch: u64, provider: &RootProvider, ) -> impl Future, MP2PlannerError>> where Self: Sized; - + /// Method that defines how to convert [`ProofData`] into [`CircuitInput`] for this implementation. fn to_circuit_input( &self, proof_data: &ProofData, @@ -141,14 +146,13 @@ pub trait Extractable: Debug { where [(); PAD_LEN(LEAF_LEN)]:, Self: Sized; - + /// Method provided for building and processing an [`ExtractionUpdatePlan`] locally. fn prove_value_extraction< const MAX_EXTRACTED_COLUMNS: usize, const LEAF_LEN: usize, T: Transport + Clone, >( &self, - contract: Address, epoch: u64, pp: &PublicParameters, provider: &RootProvider, @@ -157,18 +161,22 @@ pub trait Extractable: Debug { [(); PAD_LEN(LEAF_LEN)]:; } +/// Struct that stores the MPT node along with any extra data needed for the [`CircuitInput`] API. #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] pub struct ProofData { + /// The MPT node node: Vec, + /// Extra inputs as defined by the implementor of [`Extractable`] extra_inputs: InputEnum, } impl ProofData { + /// Create a new instance of [`ProofData`] pub fn new(node: Vec, extra_inputs: InputEnum) -> ProofData { ProofData:: { node, extra_inputs } } - /// Create a new instance of [`ProofData`] from a slice of [`u8`] + /// Create a new instance of [`ProofData`] from a slice of [`u8`] and any extra data required. pub fn from_slice( node: &[u8], extra_inputs: InputEnum, @@ -191,11 +199,15 @@ impl ProofData { Ok(ProofData::::new(node.to_vec(), extra_inputs)) } - /// Update a [`ProofData`] with a proof represented as a [`Vec`] + /// Update a [`ProofData`] with a proof represented as a [`Vec`]. This method + /// will error if called on a node whose `extra_inputs` are not either the + /// [`InputEnum::Extension`] or [`InputEnum::Branch`] variant. pub fn update(&mut self, proof: Vec) -> Result<(), MP2PlannerError> { match self.extra_inputs { + // If its a branch simply push the proof into the stored vec InputEnum::Branch(ref mut proofs) => proofs.push(proof), - + // For an extension we check that the vec is currently empty, if it is we replace it with + // the provided one. InputEnum::Extension(ref mut inner_proof) => { if !proof.is_empty() { return Err(MP2PlannerError::UpdateTreeError( @@ -216,20 +228,30 @@ impl ProofData { } } +/// A struct that stores an [`UpdateTree`] of keys and a local cache of [`ProofData`]. +/// This way when a [`WorkplanItem`](ryhope::storage::updatetree::WorkplanItem) is processed we can update the cache so any parent proofs can +/// be processed. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ExtractionUpdatePlan { + /// The [`UpdateTree`] that specifies the order proofs should be generated. pub(crate) update_tree: UpdateTree, + /// The cache of input data, at the beginning only the keys relating to leaf proofs will have all data + /// provided, it should then be updated as these tasks are processed. pub(crate) proof_cache: HashMap>, } impl ExtractionUpdatePlan { + /// Create a new [`ExtractionUpdatePlan`] from its constituent parts. pub fn new(update_tree: UpdateTree, proof_cache: HashMap>) -> Self { Self { update_tree, proof_cache, } } - + /// Method to run the plan to completion locally. For each item in the [`UpdatePlan`](ryhope::storage::updatetree::UpdatePlan) we fetch the data from [`self.proof_cache`](ExtractionUpdatePlan::proof_cache) + /// convert the [`ProofData`] to a [`CircuitInput`] which we then pass to the [`generate_proof`] function defined in [`crate::values_extraction::api`]. We then take the output proof + /// and if the current key has a parent node in [`self.update_tree`](ExtractionUpdatePlan::update_tree) we update the [`ProofData`] stored for this key. If no parent is present we must be at the root of the tree + /// and so we just return the final proof. pub fn process_locally( &mut self, params: &PublicParameters, @@ -238,14 +260,20 @@ impl ExtractionUpdatePlan { where [(); PAD_LEN(LEAF_LEN)]:, { + // Convert the UpdateTree into an UpdatePlan let mut update_plan = self.update_tree.clone().into_workplan(); + // Instantiate a vector that will eventually be the output. let mut final_proof = Vec::::new(); + // Run the loop while the UpdatePlan continues to yield tasks. while let Some(Next::Ready(work_plan_item)) = update_plan.next() { + // Retrieve proof data related to this key let proof_data = self.proof_cache.get(work_plan_item.k()).ok_or( MP2PlannerError::UpdateTreeError("Key not present in the proof cache".to_string()), )?; + // Convert to CircuitInput let circuit_type = extractable.to_circuit_input(proof_data); + // Generate the proof let proof = generate_proof(params, circuit_type).map_err(|e| { MP2PlannerError::ProvingError(format!( "Error while generating proof for node {{ inner: {:?} }}", @@ -253,8 +281,9 @@ impl ExtractionUpdatePlan { )) })?; + // Fetch the parent of this key let parent = self.update_tree.get_parent_key(work_plan_item.k()); - + // Determine next steps based on whether the parent exists match parent { Some(parent_key) => { let proof_data_ref = self.proof_cache.get_mut(&parent_key).unwrap(); @@ -264,7 +293,7 @@ impl ExtractionUpdatePlan { final_proof = proof; } } - + // Mark the item as done update_plan.done(&work_plan_item)?; } Ok(final_proof) @@ -275,18 +304,13 @@ impl Extractable for EventLogInfo { type ExtraLeafInput = u64; - async fn create_update_tree( + async fn create_update_plan( &self, - contract: Address, epoch: u64, provider: &RootProvider, ) -> Result, MP2PlannerError> { - let query = ReceiptQuery:: { - contract, - event: *self, - }; - - let proofs = query.query_receipt_proofs(provider, epoch.into()).await?; + // Query for the receipt proofs relating to this event at block number `epoch` + let proofs = self.query_receipt_proofs(provider, epoch.into()).await?; let mut proof_cache = HashMap::>::new(); @@ -386,7 +410,6 @@ impl Extractable T: Transport + Clone, >( &self, - contract: Address, epoch: u64, pp: &PublicParameters, provider: &RootProvider, @@ -394,7 +417,7 @@ impl Extractable where [(); PAD_LEN(LEAF_LEN)]:, { - let mut extraction_plan = self.create_update_tree(contract, epoch, provider).await?; + let mut extraction_plan = self.create_update_plan(epoch, provider).await?; extraction_plan.process_locally(pp, self) } @@ -408,15 +431,14 @@ pub mod tests { use eth_trie::Trie; use mp2_common::{ digest::Digest, - eth::{left_pad32, BlockUtil}, - poseidon::{hash_to_int_value, H}, + eth::{BlockUtil, ReceiptProofInfo}, proof::ProofWithVK, types::GFp, - utils::{Endianness, Packer, ToFields}, + utils::{Endianness, Packer}, }; use mp2_test::eth::get_mainnet_url; - use plonky2::{field::types::Field, hash::hash_types::HashOut, plonk::config::Hasher}; - use plonky2_ecgfp5::curve::{curve::Point, scalar_field::Scalar}; + use plonky2::field::types::Field; + use plonky2_ecgfp5::curve::curve::Point; use std::str::FromStr; use crate::values_extraction::{ @@ -428,20 +450,14 @@ pub mod tests { #[tokio::test] async fn test_receipt_update_tree() -> Result<()> { // First get the info we will feed in to our function - let event_info = test_receipt_trie_helper().await?; - - let contract = Address::from_str("0xbd3531da5cf5857e7cfaa92426877b022e612cf8")?; let epoch: u64 = 21362445; + let (block_util, event_info, _) = build_test_data(epoch).await?; let url = get_mainnet_url(); // get some tx and receipt let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let extraction_plan = event_info - .create_update_tree(contract, epoch, &provider) - .await?; - - let block_util = build_test_data(epoch).await; + let extraction_plan = event_info.create_update_plan(epoch, &provider).await?; assert_eq!( *extraction_plan.update_tree.root(), @@ -451,122 +467,24 @@ pub mod tests { } #[tokio::test] - async fn test_receipt_proving() -> Result<()> { - // First get the info we will feed in to our function - let event_info = test_receipt_trie_helper().await?; - - let contract = Address::from_str("0xbd3531da5cf5857e7cfaa92426877b022e612cf8")?; - let epoch: u64 = 21362445; - - let url = get_mainnet_url(); - // get some tx and receipt - let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - - let pp = build_circuits_params::<512, 7>(); - let final_proof_bytes = event_info - .prove_value_extraction(contract, epoch, &pp, &provider) - .await?; - - let final_proof = ProofWithVK::deserialize(&final_proof_bytes)?; - let query = ReceiptQuery::<2, 1> { - contract, - event: event_info, - }; - - let metadata = TableMetadata::from(event_info); - - let metadata_digest = metadata.digest(); - - let value_digest = query - .query_receipt_proofs(&provider, epoch.into()) - .await? - .iter() - .fold(Digest::NEUTRAL, |acc, info| { - let node = info.mpt_proof.last().unwrap().clone(); - - let mut tx_index_input = [0u8; 32]; - tx_index_input[31] = info.tx_index as u8; - - let node_rlp = rlp::Rlp::new(&node); - // The actual receipt data is item 1 in the list - let receipt_rlp = node_rlp.at(1).unwrap(); - - // We make a new `Rlp` struct that should be the encoding of the inner list representing the `ReceiptEnvelope` - let receipt_list = rlp::Rlp::new(&receipt_rlp.data().unwrap()[1..]); - - // The logs themselves start are the item at index 3 in this list - let gas_used_rlp = receipt_list.at(1).unwrap(); - - let gas_used_bytes = left_pad32(gas_used_rlp.data().unwrap()); - - let (input_vd, row_unique_data) = - metadata.input_value_digest(&[&tx_index_input, &gas_used_bytes]); - let extracted_vd = metadata.extracted_receipt_value_digest(&node, &event_info); - - let total = input_vd + extracted_vd; - - // row_id = H2int(row_unique_data || num_actual_columns) - let inputs = HashOut::from(row_unique_data) - .to_fields() - .into_iter() - .chain(std::iter::once(GFp::from_canonical_usize( - metadata.num_actual_columns, - ))) - .collect::>(); - let hash = H::hash_no_pad(&inputs); - let row_id = hash_to_int_value(hash); - - // values_digest = values_digest * row_id - let row_id = Scalar::from_noncanonical_biguint(row_id); - - let exp_digest = total * row_id; - - acc + exp_digest - }); - - let pi = PublicInputs::new(&final_proof.proof.public_inputs); - - let mut block_util = build_test_data(epoch).await; - // Check the output hash - { - assert_eq!( - pi.root_hash(), - block_util - .receipts_trie - .root_hash()? - .0 - .to_vec() - .pack(Endianness::Little) - ); - } - - // Check value digest - { - assert_eq!(pi.values_digest(), value_digest.to_weierstrass()); - } - - // Check metadata digest - { - assert_eq!(pi.metadata_digest(), metadata_digest.to_weierstrass()); - } - Ok(()) + async fn test_receipt_local_proving() -> Result<()> { + let pp = build_circuits_params::<512, 5>(); + // Test proving on a block with some relevant events + test_receipt_proving(21362445, &pp).await?; + // Test proving on a block with no relevant events + test_receipt_proving(21767312, &pp).await } - #[tokio::test] - async fn test_empty_block_receipt_proving() -> Result<()> { + async fn test_receipt_proving(epoch: u64, pp: &PublicParameters<512, 5>) -> Result<()> { // First get the info we will feed in to our function - let event_info = test_receipt_trie_helper().await?; - - let contract = Address::from_str("0xbd3531da5cf5857e7cfaa92426877b022e612cf8")?; - let epoch: u64 = 21767312; + let (mut block_util, event_info, proof_info) = build_test_data(epoch).await?; let url = get_mainnet_url(); // get some tx and receipt let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let pp = build_circuits_params::<512, 7>(); let final_proof_bytes = event_info - .prove_value_extraction(contract, epoch, &pp, &provider) + .prove_value_extraction(epoch, pp, &provider) .await?; let final_proof = ProofWithVK::deserialize(&final_proof_bytes)?; @@ -575,11 +493,20 @@ pub mod tests { let metadata_digest = metadata.digest(); - let value_digest = Point::NEUTRAL; + let value_digest = proof_info.iter().try_fold(Digest::NEUTRAL, |acc, info| { + let node = info + .mpt_proof + .last() + .ok_or(MP2PlannerError::UpdateTreeError( + "MPT proof had no nodes".to_string(), + ))?; + Result::::Ok( + acc + metadata.receipt_value_digest(info.tx_index, node, &event_info), + ) + })?; let pi = PublicInputs::new(&final_proof.proof.public_inputs); - let mut block_util = build_test_data(epoch).await; // Check the output hash { assert_eq!( @@ -603,23 +530,31 @@ pub mod tests { assert_eq!(pi.metadata_digest(), metadata_digest.to_weierstrass()); } - // Check that the number of rows is zero + // Check that the number of rows is equal to the length of { - assert_eq!(pi.n(), GFp::ZERO); + assert_eq!(pi.n(), GFp::from_canonical_usize(proof_info.len())); } Ok(()) } + type TestData = (BlockUtil, EventLogInfo<2, 1>, Vec); /// Function that fetches a block together with its transaction trie and receipt trie for testing purposes. - async fn build_test_data(block_number: u64) -> BlockUtil { + async fn build_test_data(block_number: u64) -> Result { let url = get_mainnet_url(); // get some tx and receipt - let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + let provider = ProviderBuilder::new().on_http(url.parse()?); // We fetch a specific block which we know includes transactions relating to the PudgyPenguins contract. - BlockUtil::fetch(&provider, BlockNumberOrTag::Number(block_number)) - .await - .unwrap() + let block_util = + BlockUtil::fetch(&provider, BlockNumberOrTag::Number(block_number)).await?; + + let event_info = test_receipt_trie_helper().await?; + + let proof_info = event_info + .query_receipt_proofs(&provider, block_number.into()) + .await?; + + Ok((block_util, event_info, proof_info)) } /// Function to build a list of [`ReceiptProofInfo`] for a set block. diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 565212418..4daacd1d8 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -20,7 +20,7 @@ use futures::{future::BoxFuture, FutureExt}; use itertools::Itertools; use log::{debug, info}; use mp2_common::{ - eth::{EventLogInfo, ProofQuery, ReceiptProofInfo, ReceiptQuery, StorageSlot, StorageSlotNode}, + eth::{EventLogInfo, ProofQuery, ReceiptProofInfo, StorageSlot, StorageSlotNode}, poseidon::H, proof::ProofWithVK, types::{GFp, HashOutput}, @@ -863,12 +863,7 @@ where let block_number = ctx.block_number().await; let new_block_number = block_number as BlockPrimaryIndex; - let query = ReceiptQuery::<{ R::NO_TOPICS }, { R::MAX_DATA_WORDS }> { - contract: contract.address(), - event, - }; - - let proof_infos = query + let proof_infos = event .query_receipt_proofs(provider.root(), block_number.into()) .await .unwrap(); @@ -897,7 +892,6 @@ where let value_proof = event .prove_value_extraction::<32, 512, _>( - contract.address(), bn as u64, ctx.params().get_value_extraction_params(), provider.root(), @@ -940,12 +934,7 @@ where let block_number = ctx.block_number().await; let new_block_number = block_number as BlockPrimaryIndex; - let query = ReceiptQuery::<{ R::NO_TOPICS }, { R::MAX_DATA_WORDS }> { - contract: contract.address(), - event, - }; - - let proof_infos = query + let proof_infos = event .query_receipt_proofs(provider.root(), block_number.into()) .await .unwrap(); From eab62360a4a2bffcae761267b112dd4e85d68651 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 5 Feb 2025 17:53:19 +0000 Subject: [PATCH 43/47] Updated for review comments --- mp2-common/Cargo.toml | 1 + mp2-common/src/array.rs | 156 +++++++---- mp2-common/src/keccak.rs | 12 +- mp2-common/src/mpt_sequential/utils.rs | 14 +- mp2-common/src/rlp.rs | 115 +++----- mp2-v1/src/api.rs | 23 +- mp2-v1/src/final_extraction/api.rs | 4 +- .../src/final_extraction/receipt_circuit.rs | 16 +- mp2-v1/src/values_extraction/api.rs | 85 +++--- .../values_extraction/gadgets/column_info.rs | 121 +++------ .../gadgets/metadata_gadget.rs | 191 ++++--------- mp2-v1/src/values_extraction/leaf_mapping.rs | 53 ++-- .../leaf_mapping_of_mappings.rs | 77 +++--- mp2-v1/src/values_extraction/leaf_receipt.rs | 74 +++-- mp2-v1/src/values_extraction/leaf_single.rs | 14 +- mp2-v1/src/values_extraction/mod.rs | 256 +++++++++++++++++- mp2-v1/src/values_extraction/planner.rs | 25 +- mp2-v1/tests/common/cases/indexing.rs | 55 +--- 18 files changed, 734 insertions(+), 558 deletions(-) diff --git a/mp2-common/Cargo.toml b/mp2-common/Cargo.toml index 084b5b3a2..143a36aef 100644 --- a/mp2-common/Cargo.toml +++ b/mp2-common/Cargo.toml @@ -34,6 +34,7 @@ rstest.workspace = true tokio.workspace = true mp2_test = { path = "../mp2-test" } + [features] ci = ["mp2_test/ci"] original_poseidon = [] diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index bd3a32aac..8326e3953 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -624,53 +624,7 @@ where b: &mut CircuitBuilder, at: Target, ) -> T { - // We will split the array into smaller arrays of size 64, padding the last array with zeroes if required - let padded_size = (SIZE - 1) / RANDOM_ACCESS_SIZE + 1; - - // Create an array of `Array`s - let arrays: Vec> = (0..padded_size) - .map(|i| Array { - arr: create_array(|j| { - let index = RANDOM_ACCESS_SIZE * i + j; - if index < self.arr.len() { - self.arr[index] - } else { - T::from_target(b.zero()) - } - }), - }) - .collect(); - - // We need to express `at` in base 64, we are also assuming that the initial array was smaller than 64^2 = 4096 which we enforce with a range check. - // We also check that `at` is smaller that the size of the array. - let array_size = b.constant(F::from_noncanonical_u64(SIZE as u64)); - let less_than_check = less_than_unsafe(b, at, array_size, 12); - let true_target = b._true(); - b.connect(less_than_check.target, true_target.target); - - let (low_bits, high_bits) = b.split_low_high(at, 6, 12); - - // Search each of the smaller arrays for the target at `low_bits` - let mut first_search = arrays - .into_iter() - .map(|array| { - b.random_access( - low_bits, - array - .arr - .iter() - .map(Targetable::to_target) - .collect::>(), - ) - }) - .collect::>(); - - // Now we push a number of zero targets into the array to make it a power of 2 - let next_power_of_two = first_search.len().next_power_of_two(); - let zero_target = b.zero(); - first_search.resize(next_power_of_two, zero_target); - // Serach the result for the Target at `high_bits` - T::from_target(b.random_access(high_bits, first_search)) + large_slice_random_access(b, &self.arr, at) } /// Returns [`Self[at..at+SUB_SIZE]`]. @@ -729,6 +683,114 @@ impl Array { /// Plonky2 API const RANDOM_ACCESS_SIZE: usize = 64; +/// This function allows you to search a large slice of [`Targetable`] by representing it as a number of +/// smaller [`Array`]s with size [`RANDOM_ACCESS_SIZE`], padding the final smaller array where required. +/// For example if we have an array of length `512` and we wish to find the value at index `324` the following +/// occurs: +/// 1) Split the original slice into `512 / 64 = 8` chunks `[A_0, ... , A_7]` +/// 2) Express `324` in base 64 (Little Endian) `[4, 5]` +/// 3) For each `i \in [0, 7]` use a [`RandomAccesGate`] to lookup the `4`th element, `v_i,3` of `A_i` +/// and create a new list of length `8` that consists of `[v_0,3, v_1,3, ... v_7,3]` +/// 4) Now use another [`RandomAccessGate`] to select the `5`th elemnt of this new list (`v_4,3` as we have zero-indexed both times) +/// +/// For comparison using [`Array::value_at`] on an [`Array`] with length `512` results in 129 rows, using this method +/// on the same [`Array`] results in 15 rows. +pub(crate) fn large_slice_random_access( + b: &mut CircuitBuilder, + slice: &[T], + at: Target, +) -> T +where + F: RichField + Extendable, + T: Targetable + Clone + Serialize + for<'de> Deserialize<'de>, +{ + let zero = b.zero(); + // We will split the array into smaller arrays of size 64, padding the last array with zeroes if required + let padded_size = (slice.len() - 1) / RANDOM_ACCESS_SIZE + 1; + + // Create an array of `Array`s + let arrays: Vec> = (0..padded_size) + .map(|i| Array { + arr: create_array(|j| { + let index = RANDOM_ACCESS_SIZE * i + j; + if index < slice.len() { + slice[index] + } else { + T::from_target(b.zero()) + } + }), + }) + .collect(); + + // We need to express `at` in base 64, we are also assuming that the initial array was smaller than 64^2 = 4096 which we enforce with a range check. + // We also check that `at` is smaller that the size of the array, if it is not the output defaults to zero. + let array_size = b.constant(F::from_noncanonical_u64(slice.len() as u64)); + let less_than_check = less_than_unsafe(b, at, array_size, 12); + + let lookup_index = b.select(less_than_check, at, zero); + let (low_bits, high_bits) = b.split_low_high(lookup_index, 6, 12); + // Search each of the smaller arrays for the target at `low_bits` + let mut first_search = arrays + .into_iter() + .map(|array| { + b.random_access( + low_bits, + array + .arr + .iter() + .map(Targetable::to_target) + .collect::>(), + ) + }) + .collect::>(); + + // Now we push a number of zero targets into the array to make it a power of 2 + let next_power_of_two = first_search.len().next_power_of_two(); + let zero_target = b.zero(); + first_search.resize(next_power_of_two, zero_target); + // Serach the result for the Target at `high_bits` + let second_search = b.random_access(high_bits, first_search); + T::from_target(b.select(less_than_check, second_search, zero)) +} + +/// Function to extract value from a slice using random access gates. +/// If `at` is outside the range of the slice it defaults to return zero. +pub fn extract_value( + b: &mut CircuitBuilder, + data: &[T], + at: Target, +) -> T +where + F: RichField + Extendable, + T: Targetable + Clone + Serialize + for<'de> Deserialize<'de>, +{ + // We check to see if the index `at` is a constant, if it is we can directly return the value + if let Some(val) = b.target_as_constant(at) { + let index = val.to_canonical_u64() as usize; + return data[index]; + } + let data_len = data.len(); + let zero = b.zero(); + // Only use random_access when SIZE is a power of 2 and smaller than 64 + // see https://stackoverflow.com/a/600306/1202623 for the trick + if data_len <= RANDOM_ACCESS_SIZE { + let next_power_two = data_len.next_power_of_two(); + // Escape hatch when we can use random_access from plonky2 base + T::from_target( + b.random_access( + at, + data.iter() + .map(Targetable::to_target) + .chain(std::iter::repeat(zero)) + .take(next_power_two) + .collect::>(), + ), + ) + } else { + large_slice_random_access(b, data, at) + } +} + #[cfg(test)] mod test { use core::array::from_fn as create_array; diff --git a/mp2-common/src/keccak.rs b/mp2-common/src/keccak.rs index 7a38c135e..a3a97dc4d 100644 --- a/mp2-common/src/keccak.rs +++ b/mp2-common/src/keccak.rs @@ -23,7 +23,9 @@ use serde::{Deserialize, Serialize}; use crate::{ array::{Array, Vector, VectorWire}, - utils::{keccak256, less_than, Endianness, FromTargets, PackerTarget, ToTargets}, + utils::{ + keccak256, less_than, less_than_unsafe, Endianness, FromTargets, PackerTarget, ToTargets, + }, }; /// Length of a hash in bytes. @@ -170,13 +172,17 @@ impl KeccakCircuit { let blocks = (0..total_num_blocks) .map(|i| { let i_target = b.constant(F::from_canonical_usize(i)); - less_than(b, i_target, nb_actual_blocks, 8) + if i == 0 { + less_than(b, i_target, nb_actual_blocks, 8) + } else { + less_than_unsafe(b, i_target, nb_actual_blocks, 8) + } }) .collect::>(); let hash_target = HashInputTarget { input: BigUintTarget { - limbs: node_u32_target.clone(), + limbs: node_u32_target, }, input_bits: 0, blocks, diff --git a/mp2-common/src/mpt_sequential/utils.rs b/mp2-common/src/mpt_sequential/utils.rs index 7ba11d1ea..99da94b8c 100644 --- a/mp2-common/src/mpt_sequential/utils.rs +++ b/mp2-common/src/mpt_sequential/utils.rs @@ -64,7 +64,8 @@ pub fn left_pad_leaf_value< let tmp = b.add(offset, value_len); let start = b.sub(tmp, one); - let mut last_byte_found = b._false(); + // In the case that prefix is exactly 128 the value is precisely zero so we should not extract anything + let mut last_byte_found = b.is_equal(value_len_80, zero); let mut result_bytes = [zero; PADDED_LEN]; @@ -83,7 +84,6 @@ pub fn left_pad_leaf_value< .rev() .enumerate() .for_each(|(i, out_byte)| { - // offset = info.byte_offset + i let index = b.constant(F::from_canonical_usize(i)); let inner_offset = b.sub(start, index); // Set to 0 if found the last byte. @@ -91,7 +91,7 @@ pub fn left_pad_leaf_value< // Since VALUE_LEN is a constant that is determined at compile time this conditional won't // cause any issues with the circuit. - let byte = if RLP_VALUE_LEN <= 64 { + let byte: Target = if RLP_VALUE_LEN <= 64 { b.random_access(inner_offset, ram_value.clone()) } else { value.random_access_large_array(b, inner_offset) @@ -108,14 +108,6 @@ pub fn left_pad_leaf_value< }); Array::::from_array(result_bytes) - - // value - // // WARNING: this is a hack to avoid another const generic but - // // what we should really do here is extract RLP_VALUE_LEN-1 because we - // // consider 1 extra byte for the RLP header always (which may or may not exist) - // .extract_array::(b, offset) - // .into_vec(value_len) - // .normalize_left::<_, _, PADDED_LEN>(b) } pub fn visit_proof(proof: &[Vec]) { diff --git a/mp2-common/src/rlp.rs b/mp2-common/src/rlp.rs index e695cd2da..ed41277ea 100644 --- a/mp2-common/src/rlp.rs +++ b/mp2-common/src/rlp.rs @@ -1,6 +1,6 @@ -use crate::array::{Array, VectorWire}; +use crate::array::{extract_value, Array, VectorWire}; -use crate::utils::{less_than, num_to_bits}; +use crate::utils::{less_than, less_than_unsafe, num_to_bits}; use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; use plonky2::iop::target::{BoolTarget, Target}; @@ -153,46 +153,22 @@ pub fn decode_compact_encoding< // non power of 2 lengths are padded leading zeros pub fn data_len, const D: usize>( b: &mut CircuitBuilder, - data: &[Target], + data: &[Target; MAX_LEN_BYTES], len_of_len: Target, - offset: Target, ) -> Target { - let mut res = b.zero(); - - let const_256 = b.constant(F::from_canonical_u64(256)); + let zero = b.zero(); let mut last_byte_found = b._false(); - let lol_add_one = b.add_const(len_of_len, F::ONE); - for i in 0..MAX_LEN_BYTES { - // We shift by one because the first byte is the rlp target. - let i_tgt = b.constant(F::from_canonical_u8(i as u8 + 1)); - // make sure we don't read out more than the actual len - let equal = b.is_equal(i_tgt, lol_add_one); - last_byte_found = b.or(equal, last_byte_found); - - // this part offset i to read from the array - let i_plus_1 = b.add(i_tgt, offset); - - let item = quin_selector(b, data, i_plus_1); - - // shift result by one byte - // res += 2^i * arr[i+1] only if we're in right range - let sum = b.mul_add(const_256, res, item); - res = b.select(last_byte_found, res, sum); - } - - res + let const_256 = b.constant(F::from_canonical_u64(256)); + data.iter().enumerate().fold(zero, |acc, (j, &byte)| { + let j_tgt = b.constant(F::from_canonical_usize(j)); + + let at_end = b.is_equal(j_tgt, len_of_len); + last_byte_found = b.or(last_byte_found, at_end); + let sum = b.mul_add(const_256, acc, byte); + b.select(last_byte_found, acc, sum) + }) } -// We read the RLP header but knowing it is a value that is always <55bytes long -// we can hardcode the type of RLP header it is and directly get the real number len -// in this case, the header marker is 0x80 that we can directly take out from first byte -pub fn short_string_len, const D: usize>( - b: &mut CircuitBuilder, - header: &Target, -) -> Target { - let byte_80 = b.constant(F::from_canonical_usize(128)); - b.sub(*header, byte_80) -} /// It returns the RLP header information starting at data[offset]. The header.offset /// is absolute from the 0-index of data (not from the `offset` index) pub fn decode_header, const D: usize>( @@ -203,7 +179,7 @@ pub fn decode_header, const D: usize>( let one = b.one(); let zero = b.zero(); - let prefix = quin_selector(b, data, offset); + let prefix = extract_value(b, data, offset); let byte_80 = b.constant(F::from_canonical_usize(128)); let byte_b7 = b.constant(F::from_canonical_usize(183)); @@ -212,10 +188,12 @@ pub fn decode_header, const D: usize>( let byte_f7 = b.constant(F::from_canonical_usize(247)); let byte_f8 = b.constant(F::from_canonical_usize(248)); + // We check less than or equal to 128 because the single byte 0 is encoded as a string of length zero in RLP let prefix_less_0x80 = less_than(b, prefix, byte_80, 8); - let prefix_less_0xb8 = less_than(b, prefix, byte_b8, 8); - let prefix_less_0xc0 = less_than(b, prefix, byte_c0, 8); - let prefix_less_0xf8 = less_than(b, prefix, byte_f8, 8); + // Prefix has been range checked now so we can use unsafe variant + let prefix_less_0xb8 = less_than_unsafe(b, prefix, byte_b8, 8); + let prefix_less_0xc0 = less_than_unsafe(b, prefix, byte_c0, 8); + let prefix_less_0xf8 = less_than_unsafe(b, prefix, byte_f8, 8); // This part determines at which offset should we read the data let prefix_plus_one = b.add(prefix, one); @@ -236,13 +214,21 @@ pub fn decode_header, const D: usize>( // i.e. if it's a single byte value, no offset we directly read value let offset_data = b._if(prefix_less_0x80, zero, select_3); - // read the lenght encoded depending on the type + // read the length encoded depending on the type + // To avoid repeatedly indexing into the data slice we extract MAX_LEN_BYTES from `offset` + 1 + // as offset includes the type encoding + let poss_length_bytes: [Target; MAX_LEN_BYTES] = std::array::from_fn(|i| { + let index = b.add_const(offset, F::from_canonical_usize(i + 1)); + extract_value(b, data, index) + }); let prefix_minus_f7 = b.sub(prefix, byte_f7); - let long_list_len = data_len(b, data, prefix_minus_f7, offset); + + let long_list_len = data_len(b, &poss_length_bytes, prefix_minus_f7); + // let long_list_len = data_len(b, data, prefix_minus_f7, offset); let short_list_len = b.sub(prefix, byte_c0); let select_1 = b._if(prefix_less_0xf8, short_list_len, long_list_len); let prefix_minus_b7 = b.sub(prefix, byte_b7); - let long_str_len = data_len(b, data, prefix_minus_b7, offset); + let long_str_len = data_len(b, &poss_length_bytes, prefix_minus_b7); let select_2 = b._if(prefix_less_0xc0, long_str_len, select_1); let short_str_len = b.sub(prefix, byte_80); let select_3 = b._if(prefix_less_0xb8, short_str_len, select_2); @@ -263,6 +249,7 @@ pub fn decode_header, const D: usize>( /// The offsets decoded in the returned list are starting from the 0-index of `data` /// not from the `offset` index. /// If N is less than the actual number of items, then the number of fields will be N. +/// This is achieved by using the list header to pad the missing entries. /// Otherwise, the number of fields returned is determined by the header the RLP list. pub fn decode_fixed_list, const D: usize, const N: usize>( b: &mut CircuitBuilder, @@ -286,13 +273,10 @@ pub fn decode_fixed_list, const D: usize, const N: for i in 0..N { // stop when you've looked at exactly the same number of bytes than // the RLP list header indicates - let at_the_end = b.is_equal(offset, end_idx); - // offset always equals offset after we've reached end_idx so before_the_end - // is only true when we haven't reached the end yet - let before_the_end = b.not(at_the_end); - + let before_the_end = b.is_not_equal(offset, end_idx); + let offset_to_use = b.select(before_the_end, offset, zero); // read the header starting from the offset - let header = decode_header(b, data, offset); + let header = decode_header(b, data, offset_to_use); let new_offset = b.add(header.offset, header.len); dec_off[i] = header.offset; @@ -302,8 +286,7 @@ pub fn decode_fixed_list, const D: usize, const N: // move offset to the next field in the list // updates offset such that is is either < end_idx or after that // always equals to end_idx - let diff = b.sub(new_offset, offset); - offset = b.mul_add(before_the_end.target, diff, offset); + offset = b.select(before_the_end, new_offset, offset); num_fields = b.add(num_fields, before_the_end.target); } @@ -315,24 +298,6 @@ pub fn decode_fixed_list, const D: usize, const N: } } -/// Returns an element of the array at index n -/// TODO: replace with random_access from plonky2 and compare constraints -pub fn quin_selector, const D: usize>( - b: &mut CircuitBuilder, - arr: &[Target], - n: Target, -) -> Target { - let mut sum = b.zero(); - for (i, el) in arr.iter().enumerate() { - let i_target = b.constant(F::from_canonical_usize(i)); - let is_eq = b.is_equal(i_target, n); - // (i == n (idx) ) * element - sum = b.mul_add(is_eq.target, *el, sum); - } - - sum -} - #[cfg(test)] mod tests { use std::array::from_fn as create_array; @@ -357,8 +322,6 @@ mod tests { use crate::utils::{keccak256, less_than_or_equal_to, IntTargetWriter}; use crate::{C, D, F}; - use super::quin_selector; - /// Returns an array of length `M` from the array `arr` starting at index `offset` pub fn extract_array, const D: usize, const M: usize>( b: &mut CircuitBuilder, @@ -379,7 +342,7 @@ mod tests { let j = b.mul(lt.target, i_plus_n_target); // out_val = arr[((i+n)<=n+M) * (i+n)] - *out_val = quin_selector(b, arr, j); + *out_val = super::extract_value(b, arr, j); } out @@ -545,7 +508,11 @@ mod tests { let len_of_len = builder.constant(F::from_canonical_u64(2)); let zero = builder.zero(); - let res = super::data_len(&mut builder, &data, len_of_len, zero); + let poss_length_bytes: [Target; MAX_LEN_BYTES] = std::array::from_fn(|i| { + let index = builder.add_const(zero, F::from_canonical_usize(i + 1)); + super::extract_value(&mut builder, &data, index) + }); + let res = super::data_len(&mut builder, &poss_length_bytes, len_of_len); builder.connect(res, ret_target); builder.register_public_inputs(&data); diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 201ce0ecb..7aa537a3e 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -10,13 +10,14 @@ use crate::{ self, compute_metadata_digest as length_metadata_digest, LengthCircuitInput, }, values_extraction::{ - self, compute_id_with_prefix, + self, gadgets::{ column_info::{ExtractedColumnInfo, InputColumnInfo}, metadata_gadget::TableMetadata, }, - identifier_block_column, identifier_for_value_column, INNER_KEY_ID_PREFIX, KEY_ID_PREFIX, - OUTER_KEY_ID_PREFIX, + identifier_block_column, identifier_for_inner_mapping_key_column, + identifier_for_outer_mapping_key_column, identifier_for_value_column, INNER_KEY_ID_PREFIX, + KEY_ID_PREFIX, OUTER_KEY_ID_PREFIX, }, MAX_RECEIPT_LEAF_NODE_LEN, }; @@ -255,34 +256,32 @@ impl SlotInputs { let input_columns = match num_mapping_keys { 0 => vec![], 1 => { - let identifier = compute_id_with_prefix( - KEY_ID_PREFIX, + let identifier = identifier_for_outer_mapping_key_column( slot, contract_address, chain_id, extra.clone(), ); - let input_column = InputColumnInfo::new(&[slot], identifier, KEY_ID_PREFIX, 32); + + let input_column = InputColumnInfo::new(&[slot], identifier, KEY_ID_PREFIX); vec![input_column] } 2 => { - let outer_identifier = compute_id_with_prefix( - OUTER_KEY_ID_PREFIX, + let outer_identifier = identifier_for_outer_mapping_key_column( slot, contract_address, chain_id, extra.clone(), ); - let inner_identifier = compute_id_with_prefix( - INNER_KEY_ID_PREFIX, + let inner_identifier = identifier_for_inner_mapping_key_column( slot, contract_address, chain_id, extra.clone(), ); vec![ - InputColumnInfo::new(&[slot], outer_identifier, OUTER_KEY_ID_PREFIX, 32), - InputColumnInfo::new(&[slot], inner_identifier, INNER_KEY_ID_PREFIX, 32), + InputColumnInfo::new(&[slot], outer_identifier, OUTER_KEY_ID_PREFIX), + InputColumnInfo::new(&[slot], inner_identifier, INNER_KEY_ID_PREFIX), ] } _ => vec![], diff --git a/mp2-v1/src/final_extraction/api.rs b/mp2-v1/src/final_extraction/api.rs index 600fb24c7..8e9176fff 100644 --- a/mp2-v1/src/final_extraction/api.rs +++ b/mp2-v1/src/final_extraction/api.rs @@ -11,7 +11,7 @@ use super::{ base_circuit::BaseCircuitInput, lengthed_circuit::LengthedRecursiveWires, merge_circuit::{MergeTable, MergeTableRecursiveWires}, - receipt_circuit::{ReceiptCircuitInput, ReceiptCircuitProofInputs, ReceiptRecursiveWires}, + receipt_circuit::{ReceiptCircuitInput, ReceiptCircuitProofInputs, ReceiptCircuitProofWires}, simple_circuit::SimpleCircuitRecursiveWires, BaseCircuitProofInputs, LengthedCircuit, MergeCircuit, PublicInputs, SimpleCircuit, }; @@ -53,7 +53,7 @@ pub struct PublicParameters { simple: CircuitWithUniversalVerifier, lengthed: CircuitWithUniversalVerifier, merge: CircuitWithUniversalVerifier, - receipt: CircuitWithUniversalVerifier, + receipt: CircuitWithUniversalVerifier, circuit_set: RecursiveCircuits, } diff --git a/mp2-v1/src/final_extraction/receipt_circuit.rs b/mp2-v1/src/final_extraction/receipt_circuit.rs index ae53aa513..b0a9f24aa 100644 --- a/mp2-v1/src/final_extraction/receipt_circuit.rs +++ b/mp2-v1/src/final_extraction/receipt_circuit.rs @@ -57,7 +57,7 @@ impl ReceiptExtractionCircuit { // enforce the MPT key extraction reached the root b.connect(value_pi.mpt_key().pointer, minus_one); - // enforce block_pi.state_root == contract_pi.state_root + // enforce block_pi.receipt_root == value_pi.root block_pi .receipt_root() .enforce_equal(b, &OutputHash::from_targets(value_pi.root_hash_info())); @@ -76,15 +76,7 @@ impl ReceiptExtractionCircuit { } } -/// The wires that are needed for the recursive framework, that concerns verifying the input -/// proofs -#[derive(Serialize, Deserialize, Clone, Debug)] -pub(crate) struct ReceiptRecursiveWires { - /// Wires containing the block and value proof - verification: ReceiptCircuitProofWires, -} - -impl CircuitLogicWires for ReceiptRecursiveWires { +impl CircuitLogicWires for ReceiptCircuitProofWires { type CircuitBuilderParams = FinalExtractionBuilderParams; type Inputs = ReceiptCircuitProofInputs; @@ -102,11 +94,11 @@ impl CircuitLogicWires for ReceiptRecursiveWires { verification.get_block_public_inputs(), verification.get_value_public_inputs(), ); - Self { verification } + verification } fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> anyhow::Result<()> { - inputs.assign_proof_targets(pw, &self.verification)?; + inputs.assign_proof_targets(pw, self)?; Ok(()) } } diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 9ff6fd6eb..679848382 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -95,7 +95,7 @@ where evm_word: u32, table_info: Vec, ) -> Self { - let input_column = InputColumnInfo::new(&[slot], key_id, KEY_ID_PREFIX, 32); + let input_column = InputColumnInfo::new(&[slot], key_id, KEY_ID_PREFIX); let metadata = TableMetadata::new(&[input_column], &table_info); @@ -123,9 +123,9 @@ where // but are used in proving we are looking at the correct node. For instance mapping keys are used to calculate the position of a leaf node // that we need to extract from, but only the output of a keccak hash of some combination of them is included in the node, hence we feed them in as witness. let outer_input_column = - InputColumnInfo::new(&[slot], outer_key_data.1, OUTER_KEY_ID_PREFIX, 32); + InputColumnInfo::new(&[slot], outer_key_data.1, OUTER_KEY_ID_PREFIX); let inner_input_column = - InputColumnInfo::new(&[slot], inner_key_data.1, INNER_KEY_ID_PREFIX, 32); + InputColumnInfo::new(&[slot], inner_key_data.1, INNER_KEY_ID_PREFIX); let metadata = TableMetadata::new(&[outer_input_column, inner_input_column], &table_info); @@ -517,14 +517,20 @@ mod tests { *, }; use crate::{ - tests::TEST_MAX_COLUMNS, values_extraction::storage_value_digest, MAX_RECEIPT_LEAF_NODE_LEN, + tests::TEST_MAX_COLUMNS, + values_extraction::{ + compute_leaf_mapping_metadata_digest, compute_leaf_mapping_of_mappings_metadata_digest, + compute_leaf_mapping_of_mappings_values_digest, compute_leaf_mapping_values_digest, + compute_leaf_single_metadata_digest, compute_leaf_single_values_digest, + }, + MAX_RECEIPT_LEAF_NODE_LEN, }; use alloy::primitives::Address; use eth_trie::{EthTrie, MemoryDB, Trie}; use itertools::Itertools; use log::info; use mp2_common::{ - eth::{left_pad32, StorageSlot, StorageSlotNode}, + eth::{StorageSlot, StorageSlotNode}, group_hashing::weierstrass_to_point, mpt_sequential::utils::bytes_to_nibbles, types::MAPPING_LEAF_VALUE_LEN, @@ -1083,8 +1089,8 @@ mod tests { { // Simple variable slot StorageSlot::Simple(slot) => { - let metadata_digest = metadata.digest(); - let values_digest = storage_value_digest(&metadata, &[], &value, &test_slot); + let metadata_digest = compute_leaf_single_metadata_digest(&test_slot); + let values_digest = compute_leaf_single_values_digest(&test_slot, value); let circuit_input = CircuitInput::new_single_variable_leaf( node, @@ -1097,10 +1103,14 @@ mod tests { } // Mapping variable StorageSlot::Mapping(mapping_key, slot) => { - let padded_key = left_pad32(mapping_key); - let metadata_digest = metadata.digest(); - let values_digest = - storage_value_digest(&metadata, &[&padded_key], &value, &test_slot); + let key_id = metadata.input_columns()[0].identifier().to_canonical_u64(); + let metadata_digest = compute_leaf_mapping_metadata_digest(&test_slot, key_id); + let values_digest = compute_leaf_mapping_values_digest( + &test_slot, + value, + mapping_key.clone(), + key_id, + ); let outer_key_id = metadata.input_columns()[0].identifier().0; @@ -1118,8 +1128,8 @@ mod tests { StorageSlot::Node(StorageSlotNode::Struct(parent, _)) => match *parent.clone() { // Simple Struct StorageSlot::Simple(slot) => { - let metadata_digest = metadata.digest(); - let values_digest = storage_value_digest(&metadata, &[], &value, &test_slot); + let metadata_digest = compute_leaf_single_metadata_digest(&test_slot); + let values_digest = compute_leaf_single_values_digest(&test_slot, value); let circuit_input = CircuitInput::new_single_variable_leaf( node, @@ -1132,18 +1142,20 @@ mod tests { } // Mapping Struct StorageSlot::Mapping(mapping_key, slot) => { - let padded_key = left_pad32(&mapping_key); - let metadata_digest = metadata.digest(); - let values_digest = - storage_value_digest(&metadata, &[&padded_key], &value, &test_slot); - - let outer_key_id = metadata.input_columns()[0].identifier().0; + let key_id = metadata.input_columns()[0].identifier().to_canonical_u64(); + let metadata_digest = compute_leaf_mapping_metadata_digest(&test_slot, key_id); + let values_digest = compute_leaf_mapping_values_digest( + &test_slot, + value, + mapping_key.clone(), + key_id, + ); let circuit_input = CircuitInput::new_mapping_variable_leaf( node, slot as u8, - mapping_key, - outer_key_id, + mapping_key.clone(), + key_id, evm_word, table_info.to_vec(), ); @@ -1154,27 +1166,28 @@ mod tests { StorageSlot::Node(StorageSlotNode::Mapping(grand, inner_mapping_key)) => { match *grand { StorageSlot::Mapping(outer_mapping_key, slot) => { - let padded_outer_key = left_pad32(&outer_mapping_key); - let padded_inner_key = left_pad32(&inner_mapping_key); - let metadata_digest = metadata.digest(); - let values_digest = storage_value_digest( - &metadata, - &[&padded_outer_key, &padded_inner_key], - &value, + let input_columns = metadata.input_columns(); + let outer_key_id = input_columns[0].identifier().to_canonical_u64(); + let inner_key_id = input_columns[1].identifier().to_canonical_u64(); + let outer_mapping_data = (outer_mapping_key, outer_key_id); + let inner_mapping_data = (inner_mapping_key, inner_key_id); + let metadata_digest = compute_leaf_mapping_of_mappings_metadata_digest( &test_slot, + outer_key_id, + inner_key_id, + ); + let values_digest = compute_leaf_mapping_of_mappings_values_digest( + &test_slot, + value, + outer_mapping_data.clone(), + inner_mapping_data.clone(), ); - - let key_ids = metadata - .input_columns() - .iter() - .map(|col| col.identifier().0) - .collect::>(); let circuit_input = CircuitInput::new_mapping_of_mappings_leaf( node, slot as u8, - (outer_mapping_key, key_ids[0]), - (inner_mapping_key, key_ids[1]), + outer_mapping_data, + inner_mapping_data, evm_word, table_info.to_vec(), ); diff --git a/mp2-v1/src/values_extraction/gadgets/column_info.rs b/mp2-v1/src/values_extraction/gadgets/column_info.rs index 6ee1fc79f..f7d9084d3 100644 --- a/mp2-v1/src/values_extraction/gadgets/column_info.rs +++ b/mp2-v1/src/values_extraction/gadgets/column_info.rs @@ -50,21 +50,12 @@ pub struct InputColumnInfo { pub identifier: F, /// Prefix used in computing mpt metadata pub metadata_prefix: [u8; 32], - /// The length (in bits) of the field to extract in the EVM word - pub length: F, } impl InputColumnInfo { /// Construct a new instance of [`ColumnInfo`] - pub fn new( - extraction_identifier: &[u8], - identifier: u64, - metadata_prefix: &[u8], - length: usize, - ) -> Self { - let mut extraction_vec = extraction_identifier.pack(Endianness::Little); - extraction_vec.resize(PACKED_HASH_LEN, 0u32); - extraction_vec.reverse(); + pub fn new(extraction_identifier: &[u8], identifier: u64, metadata_prefix: &[u8]) -> Self { + let extraction_vec = left_pad32(extraction_identifier).pack(Endianness::Big); let extraction_identifier = extraction_vec .into_iter() .map(F::from_canonical_u32) @@ -72,13 +63,11 @@ impl InputColumnInfo { .try_into() .expect("This should never fail"); let identifier = F::from_canonical_u64(identifier); - let length = F::from_canonical_usize(length); Self { extraction_identifier, identifier, metadata_prefix: left_pad::<32>(metadata_prefix), - length, } } @@ -120,10 +109,6 @@ impl InputColumnInfo { .collect() } - pub fn length(&self) -> F { - self.length - } - pub fn value_digest(&self, value: &[u8]) -> Point { let bytes = left_pad32(value); @@ -296,40 +281,33 @@ impl ExtractedColumnInfo { pub fn value_digest(&self, value: &[u8]) -> Point { // If the column identifier is zero then its a dummy column. This is because the column identifier // is always computed as the output of a hash which is EXTREMELY unlikely to be exactly zero. - if self.identifier() == F::ZERO { - Point::NEUTRAL - } else { - let bytes = self.extract_value(value); - - let inputs = once(self.identifier()) - .chain( - bytes - .pack(Endianness::Big) - .into_iter() - .map(F::from_canonical_u32), - ) - .collect_vec(); - map_to_curve_point(&inputs) - } + + let bytes = self.extract_value(value); + + let inputs = once(self.identifier()) + .chain( + bytes + .pack(Endianness::Big) + .into_iter() + .map(F::from_canonical_u32), + ) + .collect_vec(); + map_to_curve_point(&inputs) } pub fn receipt_value_digest(&self, value: &[u8], offset: usize) -> Point { - if self.identifier().0 == 0 { - Point::NEUTRAL - } else { - let start = offset + self.byte_offset().0 as usize; - let bytes = left_pad32(&value[start..start + self.length.0 as usize]); - - let inputs = once(self.identifier()) - .chain( - bytes - .pack(Endianness::Big) - .into_iter() - .map(F::from_canonical_u32), - ) - .collect_vec(); - map_to_curve_point(&inputs) - } + let start = offset + self.byte_offset().0 as usize; + let bytes = left_pad32(&value[start..start + self.length.0 as usize]); + + let inputs = once(self.identifier()) + .chain( + bytes + .pack(Endianness::Big) + .into_iter() + .map(F::from_canonical_u32), + ) + .collect_vec(); + map_to_curve_point(&inputs) } } @@ -365,10 +343,9 @@ pub struct ExtractedColumnInfoTarget { /// this would be either the offset from the start of the receipt or from the start of the /// relevant log pub(crate) byte_offset: Target, - /// The length (in bits) of the field to extract in the EVM word + /// The length in bytes of the field to extract in the EVM word pub(crate) length: Target, - /// For storage this is the EVM word, for receipts this is either 1 or 0 and indicates whether to - /// use the relevant log offset or not. + /// For storage this is the EVM word, for receipts this is zero pub(crate) location_offset: Target, } @@ -473,24 +450,30 @@ pub struct InputColumnInfoTarget { pub extraction_identifier: [Target; PACKED_HASH_LEN], /// Column identifier pub identifier: Target, - /// Prefix used in computing mpt metadata - pub metadata_prefix: [Target; PACKED_HASH_LEN], - /// The length of the field to extract in the EVM word - pub length: Target, } impl InputColumnInfoTarget { /// Compute the MPT metadata. - pub fn mpt_metadata(&self, b: &mut CBuilder) -> HashOutTarget { + pub fn mpt_metadata( + &self, + b: &mut CBuilder, + metadata_prefix: &[Target; PACKED_HASH_LEN], + extraction_id: &[Target; PACKED_HASH_LEN], + ) -> HashOutTarget { // key_column_md = H( "\0KEY" || slot) - let inputs = [self.metadata_prefix(), self.extraction_id().as_slice()].concat(); + let inputs = [metadata_prefix.as_slice(), extraction_id.as_slice()].concat(); b.hash_n_to_hash_no_pad::(inputs) } /// Compute the column information digest. - pub fn digest(&self, b: &mut CBuilder) -> CurveTarget { - let metadata = self.mpt_metadata(b); + pub fn digest( + &self, + b: &mut CBuilder, + metadata_prefix: &[Target; PACKED_HASH_LEN], + extraction_id: &[Target; PACKED_HASH_LEN], + ) -> CurveTarget { + let metadata = self.mpt_metadata(b, metadata_prefix, extraction_id); // digest = D(mpt_metadata || info.identifier) let inputs = [metadata.elements.as_slice(), &[self.identifier()]].concat(); @@ -505,14 +488,6 @@ impl InputColumnInfoTarget { pub fn identifier(&self) -> Target { self.identifier } - - pub fn metadata_prefix(&self) -> &[Target] { - self.metadata_prefix.as_slice() - } - - pub fn length(&self) -> Target { - self.length - } } pub trait CircuitBuilderColumnInfo { @@ -541,15 +516,11 @@ impl CircuitBuilderColumnInfo for CBuilder { fn add_virtual_input_column_info(&mut self) -> InputColumnInfoTarget { let extraction_identifier: [Target; PACKED_HASH_LEN] = self.add_virtual_target_arr(); - let metadata_prefix: [Target; PACKED_HASH_LEN] = self.add_virtual_target_arr(); - - let [identifier, length] = self.add_virtual_target_arr(); + let identifier = self.add_virtual_target(); InputColumnInfoTarget { extraction_identifier, identifier, - metadata_prefix, - length, } } } @@ -617,13 +588,7 @@ impl> WitnessWriteColumnInfo for T { .iter() .zip(value.extraction_identifier.iter()) .for_each(|(t, v)| self.set_target(*t, *v)); - target - .metadata_prefix - .iter() - .zip(value.metadata_prefix().iter()) - .for_each(|(t, v)| self.set_target(*t, *v)); - self.set_target(target.length, value.length()); self.set_target(target.identifier, value.identifier()); } } diff --git a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs index b5b0c15cb..6118e290a 100644 --- a/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs +++ b/mp2-v1/src/values_extraction/gadgets/metadata_gadget.rs @@ -1,7 +1,5 @@ //! The metadata gadget is used to ensure the correct extraction from the set of all identifiers. -use crate::values_extraction::{DATA_PREFIX, GAS_USED_PREFIX, TOPIC_PREFIX, TX_INDEX_PREFIX}; - use super::column_info::{ CircuitBuilderColumnInfo, ExtractedColumnInfo, ExtractedColumnInfoTarget, InputColumnInfo, InputColumnInfoTarget, WitnessWriteColumnInfo, @@ -12,6 +10,7 @@ use mp2_common::{ array::{Array, Targetable}, eth::{left_pad32, EventLogInfo, StorageSlot}, group_hashing::CircuitBuilderGroupHashing, + keccak::PACKED_HASH_LEN, poseidon::{empty_poseidon_hash, hash_to_int_value, H}, serialization::{ deserialize_array, deserialize_long_array, serialize_array, serialize_long_array, @@ -21,7 +20,7 @@ use mp2_common::{ F, }; use plonky2::{ - field::types::{Field, PrimeField64}, + field::types::Field, hash::hash_types::HashOut, iop::{ target::{BoolTarget, Target}, @@ -81,7 +80,7 @@ impl TableMetadata { let input_columns = input_prefixes .iter() - .map(|prefix| InputColumnInfo::new(extraction_identifier, rng.gen(), prefix, 32)) + .map(|prefix| InputColumnInfo::new(extraction_identifier, rng.gen(), prefix)) .collect::>(); let num_actual_columns = rng.gen_range(1..=NUM_EXTRACTED_COLUMNS); @@ -291,26 +290,32 @@ impl TableMetadata { } } -pub struct TableMetadataGadget; - -impl - TableMetadataGadget -{ - pub(crate) fn build( +impl TableMetadata { + pub(crate) fn build( b: &mut CBuilder, - ) -> TableMetadataTarget { + num_input_columns: usize, + ) -> TableMetadataTarget { + let real_columns = array::from_fn(|_| b.add_virtual_bool_target_safe()); + + let num_actual_columns = b.add_many(real_columns.iter().map(|bool_tar| bool_tar.target)); + let num_actual_columns = b.add_const( + num_actual_columns, + F::from_canonical_usize(num_input_columns), + ); TableMetadataTarget { - input_columns: array::from_fn(|_| b.add_virtual_input_column_info()), + input_columns: (0..num_input_columns) + .map(|_| b.add_virtual_input_column_info()) + .collect::>(), extracted_columns: array::from_fn(|_| b.add_virtual_extracted_column_info()), - real_columns: array::from_fn(|_| b.add_virtual_bool_target_safe()), - num_actual_columns: b.add_virtual_target(), + real_columns, + num_actual_columns, } } - pub(crate) fn assign( + pub(crate) fn assign( pw: &mut PartialWitness, columns_metadata: &TableMetadata, - metadata_target: &TableMetadataTarget, + metadata_target: &TableMetadataTarget, ) { // First we check that we are trying to assign from a `TableMetadata` with the correct // number of columns @@ -345,117 +350,13 @@ impl .for_each(|(i, &b_target)| { pw.set_bool_target(b_target, i < columns_metadata.extracted_columns.len()) }); - - pw.set_target( - metadata_target.num_actual_columns, - F::from_canonical_usize(columns_metadata.num_actual_columns), - ); - } -} - -impl - From> for TableMetadata -{ - fn from(event: EventLogInfo) -> Self { - let extraction_id = event.event_signature; - - let tx_index_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - TX_INDEX_PREFIX, - ] - .concat() - .into_iter() - .map(F::from_canonical_u8) - .collect::>(); - let tx_index_column_id = H::hash_no_pad(&tx_index_input).elements[0].to_canonical_u64(); - - let gas_used_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - GAS_USED_PREFIX, - ] - .concat() - .into_iter() - .map(F::from_canonical_u8) - .collect::>(); - let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0].to_canonical_u64(); - - let tx_index_input_column = InputColumnInfo::new( - extraction_id.as_slice(), - tx_index_column_id, - TX_INDEX_PREFIX, - 32, - ); - let gas_used_index_column = InputColumnInfo::new( - extraction_id.as_slice(), - gas_used_column_id, - GAS_USED_PREFIX, - 32, - ); - - let topic_columns = event - .topics - .iter() - .enumerate() - .map(|(j, &offset)| { - let input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - TOPIC_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(F::from_canonical_u8) - .collect::>(); - - let topic_id = H::hash_no_pad(&input).elements[0].to_canonical_u64(); - ExtractedColumnInfo::new(extraction_id.as_slice(), topic_id, offset, 32, 0) - }) - .collect::>(); - - let data_columns = event - .data - .iter() - .enumerate() - .map(|(j, &offset)| { - let input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - DATA_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(F::from_canonical_u8) - .collect::>(); - - let data_id = H::hash_no_pad(&input).elements[0].to_canonical_u64(); - ExtractedColumnInfo::new(extraction_id.as_slice(), data_id, offset, 32, 0) - }) - .collect::>(); - - let extracted_columns = [topic_columns, data_columns].concat(); - - TableMetadata::new( - &[tx_index_input_column, gas_used_index_column], - &extracted_columns, - ) } } #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub(crate) struct TableMetadataTarget< - const MAX_EXTRACTED_COLUMNS: usize, - const INPUT_COLUMNS: usize, -> { - #[serde( - serialize_with = "serialize_long_array", - deserialize_with = "deserialize_long_array" - )] +pub(crate) struct TableMetadataTarget { /// Information about all input columns of the table - pub(crate) input_columns: [InputColumnInfoTarget; INPUT_COLUMNS], + pub(crate) input_columns: Vec, #[serde( serialize_with = "serialize_long_array", deserialize_with = "deserialize_long_array" @@ -479,15 +380,19 @@ type ReceiptExtractedOutput = ( CurveTarget, ); -impl - TableMetadataTarget -{ +impl TableMetadataTarget { #[cfg(test)] - pub fn metadata_digest(&self, b: &mut CBuilder) -> CurveTarget { + pub fn metadata_digest( + &self, + b: &mut CBuilder, + metadata_prefixes: &[&[Target; PACKED_HASH_LEN]], + extraction_id: &[Target; PACKED_HASH_LEN], + ) -> CurveTarget { let input_points = self .input_columns .iter() - .map(|column| column.digest(b)) + .zip_eq(metadata_prefixes.iter()) + .map(|(column, metadata_prefix)| column.digest(b, metadata_prefix, extraction_id)) .collect::>(); let curve_zero = b.curve_zero(); @@ -511,17 +416,23 @@ impl pub(crate) fn inputs_digests( &self, b: &mut CBuilder, - input_values: &[Array; INPUT_COLUMNS], + input_values: &[Array], + metadata_prefixes: &[&[Target; PACKED_HASH_LEN]], + extraction_id: &[Target; PACKED_HASH_LEN], ) -> (CurveTarget, CurveTarget) { let (metadata_points, value_points): (Vec, Vec) = self .input_columns .iter() - .zip(input_values.iter()) - .map(|(column, input_val)| { + .zip_eq(input_values.iter()) + .zip_eq(metadata_prefixes) + .map(|((column, input_val), metadata_prefix)| { let inputs = once(column.identifier) .chain(input_val.arr.iter().map(|t| t.to_target())) .collect_vec(); - (column.digest(b), b.map_to_curve_point(&inputs)) + ( + column.digest(b, metadata_prefix, extraction_id), + b.map_to_curve_point(&inputs), + ) }) .unzip(); @@ -541,13 +452,13 @@ impl b: &mut CBuilder, value: &Array, offset: Target, - extraction_id: &[Target; 8], + extraction_id: &[Target; PACKED_HASH_LEN], ) -> (CurveTarget, CurveTarget) { let one = b.one(); let curve_zero = b.curve_zero(); - let ex_id_arr = Array::::from(*extraction_id); + let ex_id_arr = Array::::from(*extraction_id); let (metadata_points, value_points): (Vec, Vec) = self .extracted_columns @@ -562,7 +473,8 @@ impl let correct_offset = b.is_equal(offset, column.location_offset()); // We check that we have the correct base extraction id - let column_ex_id_arr = Array::::from(column.extraction_id()); + let column_ex_id_arr = + Array::::from(column.extraction_id()); let correct_extraction_id = column_ex_id_arr.equals(b, &ex_id_arr); // We only extract if we are in the correct location AND `column.is_extracted` is true @@ -686,19 +598,20 @@ pub(crate) mod tests { impl UserCircuit for TestMedataCircuit { // Metadata target + slot + expected number of actual columns + expected metadata digest type Wires = ( - TableMetadataTarget, + TableMetadataTarget, Target, Target, CurveTarget, ); fn build(b: &mut CBuilder) -> Self::Wires { - let metadata_target = TableMetadataGadget::build(b); + let metadata_target = TableMetadata::build(b, 0); let slot = b.add_virtual_target(); + let zero = b.zero(); let expected_num_actual_columns = b.add_virtual_target(); let expected_metadata_digest = b.add_virtual_curve_target(); - - let metadata_digest = metadata_target.metadata_digest(b); + let extraction_id = [zero, zero, zero, zero, zero, zero, zero, slot]; + let metadata_digest = metadata_target.metadata_digest(b, &[], &extraction_id); b.connect_curve_points(metadata_digest, expected_metadata_digest); @@ -716,7 +629,7 @@ pub(crate) mod tests { } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - TableMetadataGadget::assign(pw, &self.columns_metadata, &wires.0); + TableMetadata::assign(pw, &self.columns_metadata, &wires.0); pw.set_target(wires.1, F::from_canonical_u8(self.slot)); pw.set_target( diff --git a/mp2-v1/src/values_extraction/leaf_mapping.rs b/mp2-v1/src/values_extraction/leaf_mapping.rs index d0987fc3b..52efc4fee 100644 --- a/mp2-v1/src/values_extraction/leaf_mapping.rs +++ b/mp2-v1/src/values_extraction/leaf_mapping.rs @@ -4,17 +4,18 @@ use crate::values_extraction::public_inputs::{PublicInputs, PublicInputsArgs}; use anyhow::Result; use mp2_common::{ - array::{Array, Targetable, Vector, VectorWire}, + array::{Array, Vector, VectorWire}, + eth::left_pad32, group_hashing::CircuitBuilderGroupHashing, - keccak::{InputData, KeccakCircuit, KeccakWires}, + keccak::{InputData, KeccakCircuit, KeccakWires, PACKED_HASH_LEN}, mpt_sequential::{ utils::left_pad_leaf_value, MPTLeafOrExtensionNode, MAX_LEAF_VALUE_LEN, PAD_LEN, }, poseidon::hash_to_int_target, public_inputs::PublicInputCommon, storage_key::{MappingSlot, MappingStructSlotWires}, - types::{CBuilder, GFp}, - utils::{Endianness, ToTargets}, + types::{CBuilder, GFp, MAPPING_LEAF_VALUE_LEN}, + utils::{Endianness, Packer, ToTargets}, CHasher, D, F, }; use plonky2::{ @@ -32,7 +33,10 @@ use recursion_framework::circuit_builder::CircuitLogicWires; use serde::{Deserialize, Serialize}; use std::iter::once; -use super::gadgets::metadata_gadget::{TableMetadata, TableMetadataGadget, TableMetadataTarget}; +use super::{ + gadgets::metadata_gadget::{TableMetadata, TableMetadataTarget}, + KEY_ID_PREFIX, +}; #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] pub struct LeafMappingWires { @@ -45,7 +49,7 @@ pub struct LeafMappingWires { /// Storage mapping variable slot pub(crate) slot: MappingStructSlotWires, /// MPT metadata - metadata: TableMetadataTarget, + metadata: TableMetadataTarget, /// The offset from the base slot offset: Target, } @@ -63,7 +67,7 @@ impl LeafMappingCircuit LeafMappingWires { let zero = b.zero(); - let metadata = TableMetadataGadget::build(b); + let metadata = TableMetadata::build(b, 1); let offset = b.add_virtual_target(); let slot = MappingSlot::build_struct(b, offset); @@ -76,19 +80,28 @@ impl LeafMappingCircuit = left_pad_leaf_value(b, &wires.value); + let value: Array = left_pad_leaf_value(b, &wires.value); // Compute the metadata digest and the value digest - let packed_mapping_key = Array::::pack(&slot.mapping_key, b, Endianness::Big); - - let (input_metadata_digest, input_value_digest) = - metadata.inputs_digests(b, &[packed_mapping_key.clone()]); - let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests::<32>( + let packed_mapping_key = slot.mapping_key.pack(b, Endianness::Big); + + let key_prefix: [Target; PACKED_HASH_LEN] = left_pad32(KEY_ID_PREFIX) + .pack(Endianness::Big) + .iter() + .map(|num| b.constant(F::from_canonical_u32(*num))) + .collect::>() + .try_into() + .expect("This should never fail"); + + let extraction_id = [zero, zero, zero, zero, zero, zero, zero, slot.mapping_slot]; + let (input_metadata_digest, input_value_digest) = metadata.inputs_digests( b, - &value, - offset, - &[zero, zero, zero, zero, zero, zero, zero, slot.mapping_slot], + &[packed_mapping_key.clone()], + &[&key_prefix], + &extraction_id, ); + let (extracted_metadata_digest, extracted_value_digest) = + metadata.extracted_digests::(b, &value, offset, &extraction_id); let selector = b.is_equal(zero, offset); let curve_zero = b.curve_zero(); @@ -101,11 +114,7 @@ impl LeafMappingCircuit( - packed_mapping_key - .arr - .iter() - .map(|t| t.to_target()) - .collect::>(), + packed_mapping_key.downcast_to_targets().arr.to_vec(), ); // row_id = H2int(row_unique_data || num_actual_columns) let inputs = row_unique_data @@ -158,7 +167,7 @@ impl LeafMappingCircuit { /// Full node from the MPT proof - pub(crate) node: VectorWire, + pub(crate) node: VectorWire, /// Leaf value pub(crate) value: Array, /// MPT root - pub(crate) root: KeccakWires<{ PAD_LEN(69) }>, + pub(crate) root: KeccakWires<{ PAD_LEN(MAX_LEAF_NODE_LEN) }>, /// Mapping slot associating wires including outer and inner mapping keys pub(crate) slot: MappingOfMappingsSlotWires, /// MPT metadata - metadata: TableMetadataTarget, + metadata: TableMetadataTarget, offset: Target, } @@ -66,7 +70,7 @@ pub struct LeafMappingOfMappingsCircuit { impl LeafMappingOfMappingsCircuit { pub fn build(b: &mut CBuilder) -> LeafMappingOfMappingsWires { let offset = b.add_virtual_target(); - let metadata = TableMetadataGadget::::build(b); + let metadata = TableMetadata::build(b, 2); let slot = MappingSlot::build_mapping_of_mappings(b, offset); let zero = b.zero(); @@ -76,23 +80,40 @@ impl LeafMappingOfMappingsCircuit = wires.node; + let node: VectorWire = wires.node; let root = wires.root; // Left pad the leaf value. let value: Array = left_pad_leaf_value(b, &wires.value); // Compute the metadata digest and the value digest - let input_values: [Array; 2] = [&slot.outer_key, &slot.inner_key] - .map(|key| Array::::pack(key, b, Endianness::Big)); - - let (input_metadata_digest, input_value_digest) = metadata.inputs_digests(b, &input_values); - let (extracted_metadata_digest, extracted_value_digest) = metadata.extracted_digests::<32>( + let input_values: [Array; 2] = + [&slot.outer_key, &slot.inner_key] + .map(|key| Array::::pack(key, b, Endianness::Big)); + // Add the key prefixes to the circuit as constants + let outer_key_prefix: [Target; PACKED_HASH_LEN] = left_pad32(OUTER_KEY_ID_PREFIX) + .pack(Endianness::Big) + .iter() + .map(|num| b.constant(F::from_canonical_u32(*num))) + .collect::>() + .try_into() + .expect("This should never fail"); + let inner_key_prefix: [Target; PACKED_HASH_LEN] = left_pad32(INNER_KEY_ID_PREFIX) + .pack(Endianness::Big) + .iter() + .map(|num| b.constant(F::from_canonical_u32(*num))) + .collect::>() + .try_into() + .expect("This should never fail"); + let extraction_id = [zero, zero, zero, zero, zero, zero, zero, slot.mapping_slot]; + let (input_metadata_digest, input_value_digest) = metadata.inputs_digests( b, - &value, - offset, - &[zero, zero, zero, zero, zero, zero, zero, slot.mapping_slot], + &input_values, + &[&outer_key_prefix, &inner_key_prefix], + &extraction_id, ); + let (extracted_metadata_digest, extracted_value_digest) = + metadata.extracted_digests::(b, &value, offset, &extraction_id); let metadata_digest = b.add_curve_point(&[input_metadata_digest, extracted_metadata_digest]); @@ -106,12 +127,7 @@ impl LeafMappingOfMappingsCircuit>() - }) + .flat_map(|arr| arr.downcast_to_targets().arr) .collect::>(); let row_unique_data = b.hash_n_to_hash_no_pad::(inputs); // row_id = H2int(row_unique_data || num_actual_columns) @@ -170,11 +186,7 @@ impl LeafMappingOfMappingsCircuit::assign( - pw, - &self.metadata, - &wires.metadata, - ); + TableMetadata::assign(pw, &self.metadata, &wires.metadata); pw.set_target(wires.offset, F::from_canonical_u8(self.evm_word)); } } @@ -294,6 +306,7 @@ mod tests { storage_slot.clone(), table_metadata.extracted_columns.clone(), ); + let values_digest = storage_value_digest( &table_metadata, &[outer_key, inner_key], diff --git a/mp2-v1/src/values_extraction/leaf_receipt.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs index fbafab9b2..84226ae59 100644 --- a/mp2-v1/src/values_extraction/leaf_receipt.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -1,23 +1,24 @@ //! Module handling the leaf node inside a Receipt Trie use super::{ - gadgets::metadata_gadget::{TableMetadata, TableMetadataGadget, TableMetadataTarget}, + gadgets::metadata_gadget::{TableMetadata, TableMetadataTarget}, public_inputs::{PublicInputs, PublicInputsArgs}, + GAS_USED_PREFIX, TX_INDEX_PREFIX, }; use alloy::primitives::Address; use anyhow::Result; use mp2_common::{ - array::{Array, Targetable, Vector, VectorWire}, - eth::EventLogInfo, + array::{extract_value, Array, Targetable, Vector, VectorWire}, + eth::{left_pad32, EventLogInfo}, group_hashing::CircuitBuilderGroupHashing, - keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN}, + keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN, PACKED_HASH_LEN}, mpt_sequential::{utils::bytes_to_nibbles, MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, poseidon::hash_to_int_target, public_inputs::PublicInputCommon, rlp::MAX_KEY_NIBBLE_LEN, types::{CBuilder, GFp}, - utils::{less_than_or_equal_to_unsafe, less_than_unsafe, ToTargets}, + utils::{less_than_unsafe, Endianness, Packer, ToTargets}, CHasher, D, F, }; use plonky2::{ @@ -57,14 +58,12 @@ where /// The key in the MPT Trie pub(crate) mpt_key: MPTKeyWire, /// The table metadata - pub(crate) metadata: TableMetadataTarget, + pub(crate) metadata: TableMetadataTarget, } /// Contains all the information for an [`Event`] in rlp form #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct EventWires { - /// Size in bytes of the whole event - size: Target, /// Packed contract address to check address: Array, /// Byte offset for the address from the beginning of a Log @@ -142,7 +141,7 @@ where // Build the event wires let event_wires = Self::build_event_wires(b); // Build the metadata - let metadata = TableMetadataGadget::build(b); + let metadata = TableMetadata::build(b, 2); let zero = b.zero(); let one = b.one(); @@ -166,7 +165,9 @@ where node.arr.arr[0], F::from_canonical_u64(1) - F::from_canonical_u64(247), ); - let key_header = node.arr.random_access_large_array(b, header_len_len); + // Since header_len_len can be at most 8 bytes its safe for us to just take the first 64 elements of the array here as it will + // always be in this range + let key_header = extract_value(b, &node.arr.arr[..64], header_len_len); let less_than_val = b.constant(F::from_canonical_u8(128)); let single_value = less_than_unsafe(b, key_header, less_than_val, 8); let key_len_maybe = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); @@ -192,21 +193,22 @@ where let gas_used_len = b.add_const(gas_used_header, -F::from_canonical_u64(128)); let initial_gas_index = b.add(gas_used_offset, one); - let final_gas_index = b.add(gas_used_offset, gas_used_len); + // We want gas_used_offset + gas_used_len + one here because we want to stop our sum one + // after the gas_used_offset + gas_used_len + let final_gas_index = b.add(initial_gas_index, gas_used_len); let combiner = b.constant(F::from_canonical_u64(1 << 8)); - + let mut last_byte_found = b._false(); let gas_used = (0..MAX_GAS_SIZE).fold(zero, |acc, i| { let access_index = b.add_const(initial_gas_index, F::from_canonical_u64(i)); let array_value = node.arr.random_access_large_array(b, access_index); - // If we have extracted a value from an index in the desired range (so lte final_gas_index) we want to add it. - // If access_index was strictly less than final_gas_index we need to multiply by 1 << 8 after (since the encoding is big endian) - let valid = less_than_or_equal_to_unsafe(b, access_index, final_gas_index, 12); + // Check to see if we have reached the index where we stop summing + let at_end = b.is_equal(access_index, final_gas_index); + last_byte_found = b.or(at_end, last_byte_found); - let tmp = b.mul(acc, combiner); - let tmp = b.add(tmp, array_value); - b.select(valid, tmp, acc) + let tmp = b.mul_add(acc, combiner, array_value); + b.select(last_byte_found, acc, tmp) }); let zero_u32 = b.zero_u32(); @@ -230,10 +232,31 @@ where zero_u32, U32Target::from_target(gas_used), ]); + // Add the key prefixes to the circuit as constants + let tx_index_prefix: [Target; PACKED_HASH_LEN] = left_pad32(TX_INDEX_PREFIX) + .pack(Endianness::Big) + .iter() + .map(|num| b.constant(F::from_canonical_u32(*num))) + .collect::>() + .try_into() + .expect("This should never fail"); + let gas_used_prefix: [Target; PACKED_HASH_LEN] = left_pad32(GAS_USED_PREFIX) + .pack(Endianness::Big) + .iter() + .map(|num| b.constant(F::from_canonical_u32(*num))) + .collect::>() + .try_into() + .expect("This should never fail"); + let extraction_id_packed = event_wires.event_signature.pack(b, Endianness::Big); + let extraction_id = extraction_id_packed.downcast_to_targets(); // Extract input values - let (input_metadata_digest, input_value_digest) = - metadata.inputs_digests(b, &[tx_index_input.clone(), gas_used_input.clone()]); + let (input_metadata_digest, input_value_digest) = metadata.inputs_digests( + b, + &[tx_index_input.clone(), gas_used_input.clone()], + &[&tx_index_prefix, &gas_used_prefix], + &extraction_id.arr, + ); // Now we verify extracted values let (address_extract, signature_extract, extracted_metadata_digest, extracted_value_digest) = metadata.extracted_receipt_digests( @@ -295,8 +318,6 @@ where } fn build_event_wires(b: &mut CBuilder) -> EventWires { - let size = b.add_virtual_target(); - // Packed address let address = Array::::new(b); @@ -310,7 +331,6 @@ where let sig_rel_offset = b.add_virtual_target(); EventWires { - size, address, add_rel_offset, event_signature, @@ -349,16 +369,10 @@ where wires.mpt_key.assign(pw, &key_nibbles, ptr); - TableMetadataGadget::::assign( - pw, - &self.metadata, - &wires.metadata, - ); + TableMetadata::assign(pw, &self.metadata, &wires.metadata); } pub fn assign_event_wires(&self, pw: &mut PartialWitness, wires: &EventWires) { - pw.set_target(wires.size, F::from_canonical_usize(self.size)); - wires .address .assign(pw, &self.address.0.map(GFp::from_canonical_u8)); diff --git a/mp2-v1/src/values_extraction/leaf_single.rs b/mp2-v1/src/values_extraction/leaf_single.rs index 94a045107..951c18971 100644 --- a/mp2-v1/src/values_extraction/leaf_single.rs +++ b/mp2-v1/src/values_extraction/leaf_single.rs @@ -1,7 +1,7 @@ //! Module handling the single variable inside a storage trie #![allow(clippy::identity_op)] use crate::values_extraction::{ - gadgets::metadata_gadget::{TableMetadata, TableMetadataGadget, TableMetadataTarget}, + gadgets::metadata_gadget::{TableMetadata, TableMetadataTarget}, public_inputs::{PublicInputs, PublicInputsArgs}, }; use anyhow::Result; @@ -14,7 +14,7 @@ use mp2_common::{ poseidon::{empty_poseidon_hash, hash_to_int_target}, public_inputs::PublicInputCommon, storage_key::{SimpleSlot, SimpleStructSlotWires}, - types::{CBuilder, GFp}, + types::{CBuilder, GFp, MAPPING_LEAF_VALUE_LEN}, utils::ToTargets, CHasher, D, F, }; @@ -44,7 +44,7 @@ pub struct LeafSingleWires { /// Storage single variable slot slot: SimpleStructSlotWires, /// MPT metadata - metadata: TableMetadataTarget, + metadata: TableMetadataTarget, /// Offset from the base slot, offset: Target, } @@ -60,7 +60,7 @@ pub struct LeafSingleCircuit { impl LeafSingleCircuit { pub fn build(b: &mut CBuilder) -> LeafSingleWires { - let metadata = TableMetadataGadget::build(b); + let metadata = TableMetadata::build(b, 0); let offset = b.add_virtual_target(); let slot = SimpleSlot::build_struct(b, offset); let zero = b.zero(); @@ -73,10 +73,10 @@ impl LeafSingleCircuit = left_pad_leaf_value(b, &wires.value); + let value: Array = left_pad_leaf_value(b, &wires.value); // Compute the metadata digest and the value digest - let (metadata_digest, value_digest) = metadata.extracted_digests::<32>( + let (metadata_digest, value_digest) = metadata.extracted_digests::( b, &value, offset, @@ -134,7 +134,7 @@ impl LeafSingleCircuit { @@ -168,8 +169,8 @@ impl StorageSlotInfo { extra.clone(), ); vec![ - InputColumnInfo::new(&[slot], outer_identifier, OUTER_KEY_ID_PREFIX, 32), - InputColumnInfo::new(&[slot], inner_identifier, INNER_KEY_ID_PREFIX, 32), + InputColumnInfo::new(&[slot], outer_identifier, OUTER_KEY_ID_PREFIX), + InputColumnInfo::new(&[slot], inner_identifier, INNER_KEY_ID_PREFIX), ] } _ => vec![], @@ -199,6 +200,76 @@ pub const GAS_USED_PREFIX: &[u8] = b"gas_used"; /// [`GAS_USED_PREFIX`] as a [`str`] pub const GAS_USED_NAME: &str = "gas_used"; +impl + From> for TableMetadata +{ + fn from(event: EventLogInfo) -> Self { + let extraction_id = event.event_signature; + + let tx_index_column_id = + identifier_for_tx_index_column(&event.event_signature, &event.address, &[]); + + let gas_used_column_id = + identifier_for_gas_used_column(&event.event_signature, &event.address, &[]); + + let tx_index_input_column = InputColumnInfo::new( + extraction_id.as_slice(), + tx_index_column_id, + TX_INDEX_PREFIX, + ); + let gas_used_index_column = InputColumnInfo::new( + extraction_id.as_slice(), + gas_used_column_id, + GAS_USED_PREFIX, + ); + + let topic_columns = event + .topics + .iter() + .enumerate() + .map(|(j, &offset)| { + ExtractedColumnInfo::new( + extraction_id.as_slice(), + identifier_for_topic_column( + &event.event_signature, + &event.address, + &[j as u8 + 1], + ), + offset, + 32, + 0, + ) + }) + .collect::>(); + + let data_columns = event + .data + .iter() + .enumerate() + .map(|(j, &offset)| { + ExtractedColumnInfo::new( + extraction_id.as_slice(), + identifier_for_data_column( + &event.event_signature, + &event.address, + &[j as u8 + 1], + ), + offset, + 32, + 0, + ) + }) + .collect::>(); + + let extracted_columns = [topic_columns, data_columns].concat(); + + TableMetadata::new( + &[tx_index_input_column, gas_used_index_column], + &extracted_columns, + ) + } +} + pub fn identifier_block_column() -> ColumnId { let inputs: Vec = BLOCK_ID_DST.to_fields(); H::hash_no_pad(&inputs).elements[0].to_canonical_u64() @@ -301,6 +372,86 @@ pub fn identifier_for_inner_mapping_key_column_raw(slot: u8, extra: Vec) -> compute_id_with_prefix_raw(INNER_KEY_ID_PREFIX, slot, extra) } +/// Compute tx index identifier for tx index variable. +/// `inner_key_id = H(PREFIX || event_signature || contract_address || chain_id || extra)[0]` +pub fn identifier_for_tx_index_column( + event_signature: &[u8; HASH_LEN], + contract_address: &Address, + extra: &[u8], +) -> ColumnId { + let extra = identifier_raw_extra(contract_address, 0, extra.to_vec()); + + let inputs: Vec = TX_INDEX_PREFIX + .iter() + .copied() + .chain(*event_signature) + .chain(extra) + .collect_vec() + .to_fields(); + + H::hash_no_pad(&inputs).elements[0].to_canonical_u64() +} + +/// Compute gas used identifier for gas used variable. +/// `inner_key_id = H(PREFIX || event_signature || contract_address || chain_id || extra)[0]` +pub fn identifier_for_gas_used_column( + event_signature: &[u8; HASH_LEN], + contract_address: &Address, + extra: &[u8], +) -> ColumnId { + let extra = identifier_raw_extra(contract_address, 0, extra.to_vec()); + + let inputs: Vec = GAS_USED_PREFIX + .iter() + .copied() + .chain(*event_signature) + .chain(extra) + .collect_vec() + .to_fields(); + + H::hash_no_pad(&inputs).elements[0].to_canonical_u64() +} + +/// Compute topic identifier for topic variable. +/// `inner_key_id = H(PREFIX || event_signature || contract_address || chain_id || extra)[0]` +pub fn identifier_for_topic_column( + event_signature: &[u8; HASH_LEN], + contract_address: &Address, + extra: &[u8], +) -> ColumnId { + let extra = identifier_raw_extra(contract_address, 0, extra.to_vec()); + + let inputs: Vec = TOPIC_PREFIX + .iter() + .copied() + .chain(*event_signature) + .chain(extra) + .collect_vec() + .to_fields(); + + H::hash_no_pad(&inputs).elements[0].to_canonical_u64() +} + +/// Compute data identifier for data variable. +/// `inner_key_id = H(PREFIX || event_signature || contract_address || chain_id || extra)[0]` +pub fn identifier_for_data_column( + event_signature: &[u8; HASH_LEN], + contract_address: &Address, + extra: &[u8], +) -> ColumnId { + let extra = identifier_raw_extra(contract_address, 0, extra.to_vec()); + + let inputs: Vec = DATA_PREFIX + .iter() + .copied() + .chain(*event_signature) + .chain(extra) + .collect_vec() + .to_fields(); + + H::hash_no_pad(&inputs).elements[0].to_canonical_u64() +} + /// Calculate ID with prefix. pub(crate) fn compute_id_with_prefix( prefix: &[u8], @@ -374,7 +525,7 @@ pub fn row_unique_data_for_mapping_of_mappings_leaf( } /// Function to compute a storage value digest -pub fn storage_value_digest( +fn storage_value_digest( table_metadata: &TableMetadata, keys: &[&[u8]], value: &[u8; MAPPING_LEAF_VALUE_LEN], @@ -394,3 +545,98 @@ pub fn storage_value_digest( ); table_metadata.storage_values_digest(padded_keys.as_slice(), value.as_slice(), slot.slot()) } + +/// Compute the metadata digest for single variable leaf. +pub fn compute_leaf_single_metadata_digest(slot_info: &StorageSlotInfo) -> Digest { + TableMetadata::new(&[], slot_info.table_info()).digest() +} + +/// Compute the values digest for single variable leaf. +pub fn compute_leaf_single_values_digest( + slot_info: &StorageSlotInfo, + value: [u8; MAPPING_LEAF_VALUE_LEN], +) -> Digest { + let table_metadata = TableMetadata::new(&[], slot_info.table_info()); + storage_value_digest(&table_metadata, &[], &value, slot_info) +} + +/// Compute the metadata digest for mapping variable leaf. +pub fn compute_leaf_mapping_metadata_digest( + slot_info: &StorageSlotInfo, + key_id: ColumnId, +) -> Digest { + let input_column = InputColumnInfo::new(&[slot_info.slot().slot()], key_id, KEY_ID_PREFIX); + TableMetadata::new(&[input_column], slot_info.table_info()).digest() +} + +/// Compute the values digest for mapping variable leaf. +pub fn compute_leaf_mapping_values_digest( + slot_info: &StorageSlotInfo, + value: [u8; MAPPING_LEAF_VALUE_LEN], + mapping_key: MappingKey, + key_id: ColumnId, +) -> Digest { + let input_column = InputColumnInfo::new(&[slot_info.slot().slot()], key_id, KEY_ID_PREFIX); + let table_metadata = TableMetadata::new(&[input_column], slot_info.table_info()); + storage_value_digest( + &table_metadata, + &[mapping_key.as_slice()], + &value, + slot_info, + ) +} + +/// Compute the metadata digest for mapping of mappings leaf. +pub fn compute_leaf_mapping_of_mappings_metadata_digest( + slot_info: &StorageSlotInfo, + outer_key_id: ColumnId, + inner_key_id: ColumnId, +) -> Digest { + let outer_key_column = InputColumnInfo::new( + &[slot_info.slot().slot()], + outer_key_id, + OUTER_KEY_ID_PREFIX, + ); + let inner_key_column = InputColumnInfo::new( + &[slot_info.slot().slot()], + inner_key_id, + INNER_KEY_ID_PREFIX, + ); + TableMetadata::new( + &[outer_key_column, inner_key_column], + slot_info.table_info(), + ) + .digest() +} + +/// Compute the values digest for mapping of mappings leaf. +pub fn compute_leaf_mapping_of_mappings_values_digest( + slot_info: &StorageSlotInfo, + value: [u8; MAPPING_LEAF_VALUE_LEN], + outer_mapping_data: (MappingKey, ColumnId), + inner_mapping_data: (MappingKey, ColumnId), +) -> Digest { + let (outer_key, outer_key_id) = outer_mapping_data; + let (inner_key, inner_key_id) = inner_mapping_data; + let outer_key_column = InputColumnInfo::new( + &[slot_info.slot().slot()], + outer_key_id, + OUTER_KEY_ID_PREFIX, + ); + let inner_key_column = InputColumnInfo::new( + &[slot_info.slot().slot()], + inner_key_id, + INNER_KEY_ID_PREFIX, + ); + let table_metadata = TableMetadata::new( + &[outer_key_column, inner_key_column], + slot_info.table_info(), + ); + + storage_value_digest( + &table_metadata, + &[outer_key.as_slice(), inner_key.as_slice()], + &value, + slot_info, + ) +} diff --git a/mp2-v1/src/values_extraction/planner.rs b/mp2-v1/src/values_extraction/planner.rs index e870a7f40..ccb5ac62d 100644 --- a/mp2-v1/src/values_extraction/planner.rs +++ b/mp2-v1/src/values_extraction/planner.rs @@ -549,10 +549,29 @@ pub mod tests { BlockUtil::fetch(&provider, BlockNumberOrTag::Number(block_number)).await?; let event_info = test_receipt_trie_helper().await?; + let mut proof_info = vec![]; + let mut success = false; + for _ in 0..10 { + match event_info + .query_receipt_proofs(&provider, block_number.into()) + .await + { + // For each of the logs return the transacion its included in, then sort and remove duplicates. + Ok(response) => { + proof_info = response; + success = true; + break; + } + Err(_) => { + tokio::time::sleep(std::time::Duration::from_secs(5)).await; + continue; + } + } + } - let proof_info = event_info - .query_receipt_proofs(&provider, block_number.into()) - .await?; + if !success { + return Err(anyhow!("Could not query mainnet successfully")); + } Ok((block_util, event_info, proof_info)) } diff --git a/mp2-v1/tests/common/cases/indexing.rs b/mp2-v1/tests/common/cases/indexing.rs index 62fc455b1..729b41e7f 100644 --- a/mp2-v1/tests/common/cases/indexing.rs +++ b/mp2-v1/tests/common/cases/indexing.rs @@ -16,15 +16,13 @@ use mp2_v1::{ ColumnID, }, values_extraction::{ - identifier_block_column, identifier_for_inner_mapping_key_column, - identifier_for_outer_mapping_key_column, identifier_for_value_column, DATA_NAME, - DATA_PREFIX, GAS_USED_NAME, GAS_USED_PREFIX, TOPIC_NAME, TOPIC_PREFIX, + identifier_block_column, identifier_for_data_column, identifier_for_gas_used_column, + identifier_for_inner_mapping_key_column, identifier_for_outer_mapping_key_column, + identifier_for_topic_column, identifier_for_value_column, DATA_NAME, GAS_USED_NAME, + TOPIC_NAME, }, }; -use plonky2::{ - field::types::{Field, PrimeField64}, - plonk::config::Hasher, -}; + use rand::{thread_rng, Rng}; use ryhope::storage::RoEpochKvStorage; @@ -60,10 +58,8 @@ use alloy::{ }; use mp2_common::{ eth::{EventLogInfo, StorageSlot}, - poseidon::H, proof::ProofWithVK, types::HashOutput, - F, }; /// Test slots for single values extraction @@ -1020,35 +1016,17 @@ pub fn compute_non_indexed_receipt_column_ids< >( event: &EventLogInfo, ) -> Vec<(String, ColumnID)> { - let gas_used_input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - GAS_USED_PREFIX, - ] - .concat() - .into_iter() - .map(F::from_canonical_u8) - .collect::>(); - let gas_used_column_id = H::hash_no_pad(&gas_used_input).elements[0]; + let gas_used_column_id = + identifier_for_gas_used_column(&event.event_signature, &event.address, &[]); let topic_ids = event .topics .iter() .enumerate() .map(|(j, _)| { - let input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - TOPIC_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(F::from_canonical_u8) - .collect::>(); ( format!("{}_{}", TOPIC_NAME, j + 1), - H::hash_no_pad(&input).elements[0].to_canonical_u64(), + identifier_for_topic_column(&event.event_signature, &event.address, &[j as u8 + 1]), ) }) .collect::>(); @@ -1058,28 +1036,15 @@ pub fn compute_non_indexed_receipt_column_ids< .iter() .enumerate() .map(|(j, _)| { - let input = [ - event.address.as_slice(), - event.event_signature.as_slice(), - DATA_PREFIX, - &[j as u8 + 1], - ] - .concat() - .into_iter() - .map(F::from_canonical_u8) - .collect::>(); ( format!("{}_{}", DATA_NAME, j + 1), - H::hash_no_pad(&input).elements[0].to_canonical_u64(), + identifier_for_data_column(&event.event_signature, &event.address, &[j as u8 + 1]), ) }) .collect::>(); [ - vec![( - GAS_USED_NAME.to_string(), - gas_used_column_id.to_canonical_u64(), - )], + vec![(GAS_USED_NAME.to_string(), gas_used_column_id)], topic_ids, data_ids, ] From e130bd9abe135a60a6d6f50c3ad5a61cc90de1eb Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 5 Feb 2025 18:33:06 +0000 Subject: [PATCH 44/47] Changed receipt proof query to make fewer calls --- mp2-common/src/eth.rs | 88 ++++++++++++++++++++----------------------- 1 file changed, 41 insertions(+), 47 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 56f8a80a0..50d2721c8 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -10,8 +10,8 @@ use alloy::{ rpc::{ json_rpc::RpcError, types::{ - Block, BlockTransactions, EIP1186AccountProofResponse, Filter, ReceiptEnvelope, - Transaction, TransactionReceipt, + Block, BlockTransactions, EIP1186AccountProofResponse, ReceiptEnvelope, Transaction, + TransactionReceipt, }, }, transports::Transport, @@ -429,60 +429,54 @@ impl EventLogInfo, block: BlockNumberOrTag, ) -> Result, MP2EthError> { - // Retrieve the transaction indices for the relevant logs - let tx_indices = self.retrieve_tx_indices(provider, block).await?; + let receipts = query_block_receipts(provider, block).await?; - // Construct the Receipt Trie for this block so we can retrieve MPT proofs. - let mut block_util = BlockUtil::fetch(provider, block).await?; - EventLogInfo::::extract_info(&tx_indices, &mut block_util) - } + let memdb = Arc::new(MemoryDB::new(true)); + let mut receipts_trie = EthTrie::new(memdb.clone()); - /// Function to query for relevant logs at a specific block, it returns a [`BTreeSet`] of the transaction indices that are relevant. - pub async fn retrieve_tx_indices( - &self, - provider: &RootProvider, - block: BlockNumberOrTag, - ) -> Result, MP2EthError> { - let filter = Filter::new() - .select(block) - .address(self.address) - .event_signature(B256::from(self.event_signature)); - for i in 0..RETRY_NUM - 1 { - debug!( - "Querying Receipt logs:\n\tevent signature = {:?}", - self.event_signature, - ); - match provider.get_logs(&filter).await { - // For each of the logs return the transacion its included in, then sort and remove duplicates. - Ok(response) => { - return Ok(BTreeSet::from_iter( - response.iter().map_while(|log| log.transaction_index), - )) + let mut indices = BTreeSet::::new(); + receipts.into_iter().try_for_each(|receipt| { + let tx_index_u64 = receipt.transaction_index.unwrap(); + + let tx_index = tx_index_u64.rlp_bytes(); + + let receipt_primitive = match receipt.inner { + CRE::Legacy(ref r) => CRE::Legacy(from_rpc_logs_to_consensus(r)), + CRE::Eip2930(ref r) => CRE::Eip2930(from_rpc_logs_to_consensus(r)), + CRE::Eip1559(ref r) => CRE::Eip1559(from_rpc_logs_to_consensus(r)), + CRE::Eip4844(ref r) => CRE::Eip4844(from_rpc_logs_to_consensus(r)), + CRE::Eip7702(ref r) => CRE::Eip7702(from_rpc_logs_to_consensus(r)), + _ => panic!("aie"), + }; + // To receipt method is infallible so unwrap is safe here + let receipt = receipt_primitive.as_receipt().unwrap(); + let relevant = receipt.logs.iter().find(|log| { + let address_check = log.address == self.address; + let topics = log.topics(); + if topics.is_empty() { + false + } else { + let sig_check = topics[0].0 == self.event_signature; + sig_check && address_check } - Err(e) => println!("Failed to query the Receipt logs at {i} time: {e:?}"), + }); + + if relevant.is_some() { + indices.insert(tx_index_u64); } - } - match provider.get_logs(&filter).await { - // For each of the logs return the transacion its included in, then sort and remove duplicates. - Ok(response) => Ok(BTreeSet::from_iter( - response.iter().map_while(|log| log.transaction_index), - )), - Err(_) => Err(MP2EthError::FetchError), - } - } - /// Function that takes a list of transaction indices in the form of a [`BTreeSet`] and a [`BlockUtil`] and returns a list of [`ReceiptProofInfo`]. - pub fn extract_info( - tx_indices: &BTreeSet, - block_util: &mut BlockUtil, - ) -> Result, MP2EthError> { - let mpt_root = block_util.receipts_trie.root_hash()?; - tx_indices + let body_rlp = receipt_primitive.encoded_2718(); + + receipts_trie.insert(&tx_index, &body_rlp) + })?; + let mpt_root = receipts_trie.root_hash()?; + + indices .iter() .map(|&tx_index| { let key = tx_index.rlp_bytes(); - let proof = block_util.receipts_trie.get_proof(&key[..])?; + let proof = receipts_trie.get_proof(&key[..])?; Ok(ReceiptProofInfo { mpt_proof: proof, From 143ffa6e2d405de34e5731b9d52d708b5f596ec1 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 5 Feb 2025 18:40:01 +0000 Subject: [PATCH 45/47] Changed receipt proof query to make fewer calls --- mp2-common/src/eth.rs | 175 +----------------------------------------- 1 file changed, 1 insertion(+), 174 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 50d2721c8..0d60aa55f 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -948,11 +948,7 @@ mod test { let mut block = BlockUtil::fetch(&provider, bna).await?; // check if we compute the RLP correctly now block.check()?; - let mut be = tryethers::BlockData::fetch(bn, url).await?; - be.check()?; - let er = be.receipts_trie.root_hash()?; - let ar = block.receipts_trie.root_hash()?; - assert_eq!(er, ar); + // dissect one receipt entry in the trie let tx_receipt = block.txs.first().unwrap(); // https://sepolia.etherscan.io/tx/0x9bef12fafd3962b0e0d66b738445d6ea2c1f3daabe10c889bd1916acc75d698b#eventlog @@ -1449,173 +1445,4 @@ mod test { rlp.append(inner); } } - // for compatibility check with alloy - mod tryethers { - - use std::sync::Arc; - - use anyhow::Result; - use eth_trie::{EthTrie, MemoryDB, Trie}; - use ethers::{ - providers::{Http, Middleware, Provider}, - types::{BlockId, Bytes, Transaction, TransactionReceipt, U64}, - }; - use rlp::{Encodable, RlpStream}; - - /// A wrapper around a transaction and its receipt. The receipt is used to filter - /// bad transactions, so we only compute over valid transactions. - pub struct TxAndReceipt(Transaction, TransactionReceipt); - - impl TxAndReceipt { - pub fn tx(&self) -> &Transaction { - &self.0 - } - pub fn receipt(&self) -> &TransactionReceipt { - &self.1 - } - pub fn tx_rlp(&self) -> Bytes { - self.0.rlp() - } - // TODO: this should be upstreamed to ethers-rs - pub fn receipt_rlp(&self) -> Bytes { - let tx_type = self.tx().transaction_type; - let mut rlp = RlpStream::new(); - rlp.begin_unbounded_list(); - match &self.1.status { - Some(s) if s.as_u32() == 1 => rlp.append(s), - _ => rlp.append_empty_data(), - }; - rlp.append(&self.1.cumulative_gas_used) - .append(&self.1.logs_bloom) - .append_list(&self.1.logs); - - rlp.finalize_unbounded_list(); - let rlp_bytes: Bytes = rlp.out().freeze().into(); - let mut encoded = vec![]; - match tx_type { - // EIP-2930 (0x01) - Some(x) if x == U64::from(0x1) => { - encoded.extend_from_slice(&[0x1]); - encoded.extend_from_slice(rlp_bytes.as_ref()); - encoded.into() - } - // EIP-1559 (0x02) - Some(x) if x == U64::from(0x2) => { - encoded.extend_from_slice(&[0x2]); - encoded.extend_from_slice(rlp_bytes.as_ref()); - encoded.into() - } - _ => rlp_bytes, - } - } - } - /// Structure containing the block header and its transactions / receipts. Amongst other things, - /// it is used to create a proof of inclusion for any transaction inside this block. - pub struct BlockData { - pub block: ethers::types::Block, - // TODO: add generics later - this may be re-used amongst different workers - pub tx_trie: EthTrie, - pub receipts_trie: EthTrie, - } - - impl BlockData { - pub async fn fetch + Send + Sync>( - blockid: T, - url: String, - ) -> Result { - let provider = - Provider::::try_from(url).expect("could not instantiate HTTP Provider"); - Self::fetch_from(&provider, blockid).await - } - pub async fn fetch_from + Send + Sync>( - provider: &Provider, - blockid: T, - ) -> Result { - let block = provider - .get_block_with_txs(blockid) - .await? - .expect("should have been a block"); - let receipts = provider - .get_block_receipts( - block - .number - .ok_or(anyhow::anyhow!("Couldn't unwrap block number"))?, - ) - .await - .map_err(|e| { - anyhow::anyhow!("Couldn't get ethers block receipts with error: {:?}", e) - })?; - - let tx_with_receipt = block - .transactions - .clone() - .into_iter() - .map(|tx| { - let tx_hash = tx.hash(); - let r = receipts - .iter() - .find(|r| r.transaction_hash == tx_hash) - .expect("RPC sending invalid data"); - // TODO remove cloning - TxAndReceipt(tx, r.clone()) - }) - .collect::>(); - - // check transaction root - let memdb = Arc::new(MemoryDB::new(true)); - let mut tx_trie = EthTrie::new(Arc::clone(&memdb)); - for tr in tx_with_receipt.iter() { - tx_trie - .insert(&tr.receipt().transaction_index.rlp_bytes(), &tr.tx_rlp()) - .expect("can't insert tx"); - } - - // check receipt root - let memdb = Arc::new(MemoryDB::new(true)); - let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); - for tr in tx_with_receipt.iter() { - if tr.tx().transaction_index.unwrap() == U64::from(0) { - println!( - "Ethers: Index {} -> {:?}", - tr.tx().transaction_index.unwrap(), - tr.receipt_rlp().to_vec() - ); - } - receipts_trie - .insert( - &tr.receipt().transaction_index.rlp_bytes(), - // TODO: make getter value for rlp encoding - &tr.receipt_rlp(), - ) - .expect("can't insert tx"); - } - let computed = tx_trie.root_hash().expect("root hash problem"); - let expected = block.transactions_root; - assert_eq!(expected, computed); - - let computed = receipts_trie.root_hash().expect("root hash problem"); - let expected = block.receipts_root; - assert_eq!(expected, computed); - - Ok(BlockData { - block, - tx_trie, - receipts_trie, - }) - } - - // recompute the receipts trie by first converting all receipts form RPC type to consensus type - // since in Alloy these are two different types and RLP functions are only implemented for - // consensus ones. - pub fn check(&mut self) -> Result<()> { - let computed = self.receipts_trie.root_hash()?; - let tx_computed = self.tx_trie.root_hash()?; - let expected = self.block.receipts_root; - let tx_expected = self.block.transactions_root; - assert_eq!(expected.0, computed.0); - assert_eq!(tx_expected.0, tx_computed.0); - Ok(()) - } - } - } } From f5c75af19de1d0cbaf859077f92895502e148318 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 5 Feb 2025 19:00:39 +0000 Subject: [PATCH 46/47] Changed receipt proof query to make fewer calls --- mp2-common/src/eth.rs | 27 +++++++------ mp2-test/src/mpt_sequential.rs | 12 ++++-- mp2-v1/src/values_extraction/planner.rs | 48 +++++++---------------- mp2-v1/tests/common/cases/table_source.rs | 4 +- 4 files changed, 40 insertions(+), 51 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 0d60aa55f..cdc72b845 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -428,7 +428,7 @@ impl EventLogInfo, block: BlockNumberOrTag, - ) -> Result, MP2EthError> { + ) -> Result<(Vec, B256), MP2EthError> { let receipts = query_block_receipts(provider, block).await?; let memdb = Arc::new(MemoryDB::new(true)); @@ -471,20 +471,23 @@ impl EventLogInfo, MP2EthError>>() + .collect::, MP2EthError>>()?, + B256::from(mpt_root.0), + )) } } diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 271c4d5d5..bd945a4ac 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -2,7 +2,7 @@ use alloy::{ eips::BlockNumberOrTag, network::TransactionBuilder, node_bindings::Anvil, - primitives::{Address, U256}, + primitives::{Address, B256, U256}, providers::{ext::AnvilApi, Provider, ProviderBuilder}, sol, }; @@ -56,6 +56,8 @@ pub struct ReceiptTestInfo pub event: EventLogInfo, /// The proofs for receipts relating to `self.query` pub proofs: Vec, + /// The root of the Receipt Trie at this block (in case there are no relevant events) + pub receipts_root: B256, } impl @@ -275,11 +277,15 @@ pub fn generate_receipt_test_info Extractable provider: &RootProvider, ) -> Result, MP2PlannerError> { // Query for the receipt proofs relating to this event at block number `epoch` - let proofs = self.query_receipt_proofs(provider, epoch.into()).await?; + let (proofs, receipt_root) = self.query_receipt_proofs(provider, epoch.into()).await?; let mut proof_cache = HashMap::>::new(); // Convert the paths into their keys using keccak if proofs.is_empty() { - let block = provider - .get_block_by_number(BlockNumberOrTag::Number(epoch), false.into()) - .await - .map_err(|_| MP2PlannerError::FetchError)? - .ok_or(MP2PlannerError::UpdateTreeError( - "Fetched Block with no relevant events but the result was None".to_string(), - ))?; - let receipt_root = block.header.receipts_root; - let dummy_input = InputEnum::Dummy(receipt_root); let proof_data = ProofData:: { node: vec![], @@ -426,12 +416,12 @@ impl Extractable #[cfg(test)] pub mod tests { - use alloy::{eips::BlockNumberOrTag, primitives::Address, providers::ProviderBuilder, sol}; + use alloy::{primitives::Address, providers::ProviderBuilder, sol}; use anyhow::anyhow; - use eth_trie::Trie; + use mp2_common::{ digest::Digest, - eth::{BlockUtil, ReceiptProofInfo}, + eth::ReceiptProofInfo, proof::ProofWithVK, types::GFp, utils::{Endianness, Packer}, @@ -451,7 +441,7 @@ pub mod tests { async fn test_receipt_update_tree() -> Result<()> { // First get the info we will feed in to our function let epoch: u64 = 21362445; - let (block_util, event_info, _) = build_test_data(epoch).await?; + let (receipts_root, event_info, _) = build_test_data(epoch).await?; let url = get_mainnet_url(); // get some tx and receipt @@ -459,10 +449,7 @@ pub mod tests { let extraction_plan = event_info.create_update_plan(epoch, &provider).await?; - assert_eq!( - *extraction_plan.update_tree.root(), - block_util.block.header.receipts_root - ); + assert_eq!(*extraction_plan.update_tree.root(), receipts_root); Ok(()) } @@ -477,7 +464,7 @@ pub mod tests { async fn test_receipt_proving(epoch: u64, pp: &PublicParameters<512, 5>) -> Result<()> { // First get the info we will feed in to our function - let (mut block_util, event_info, proof_info) = build_test_data(epoch).await?; + let (receipts_root, event_info, proof_info) = build_test_data(epoch).await?; let url = get_mainnet_url(); // get some tx and receipt @@ -511,12 +498,7 @@ pub mod tests { { assert_eq!( pi.root_hash(), - block_util - .receipts_trie - .root_hash()? - .0 - .to_vec() - .pack(Endianness::Little) + receipts_root.0.to_vec().pack(Endianness::Little) ); } @@ -537,19 +519,16 @@ pub mod tests { Ok(()) } - type TestData = (BlockUtil, EventLogInfo<2, 1>, Vec); + type TestData = (B256, EventLogInfo<2, 1>, Vec); /// Function that fetches a block together with its transaction trie and receipt trie for testing purposes. async fn build_test_data(block_number: u64) -> Result { let url = get_mainnet_url(); // get some tx and receipt let provider = ProviderBuilder::new().on_http(url.parse()?); - // We fetch a specific block which we know includes transactions relating to the PudgyPenguins contract. - let block_util = - BlockUtil::fetch(&provider, BlockNumberOrTag::Number(block_number)).await?; - let event_info = test_receipt_trie_helper().await?; let mut proof_info = vec![]; + let mut root = B256::default(); let mut success = false; for _ in 0..10 { match event_info @@ -557,8 +536,9 @@ pub mod tests { .await { // For each of the logs return the transacion its included in, then sort and remove duplicates. - Ok(response) => { + Ok((response, fetched_root)) => { proof_info = response; + root = fetched_root; success = true; break; } @@ -573,7 +553,7 @@ pub mod tests { return Err(anyhow!("Could not query mainnet successfully")); } - Ok((block_util, event_info, proof_info)) + Ok((root, event_info, proof_info)) } /// Function to build a list of [`ReceiptProofInfo`] for a set block. diff --git a/mp2-v1/tests/common/cases/table_source.rs b/mp2-v1/tests/common/cases/table_source.rs index 4daacd1d8..c80f198ed 100644 --- a/mp2-v1/tests/common/cases/table_source.rs +++ b/mp2-v1/tests/common/cases/table_source.rs @@ -863,7 +863,7 @@ where let block_number = ctx.block_number().await; let new_block_number = block_number as BlockPrimaryIndex; - let proof_infos = event + let (proof_infos, _) = event .query_receipt_proofs(provider.root(), block_number.into()) .await .unwrap(); @@ -934,7 +934,7 @@ where let block_number = ctx.block_number().await; let new_block_number = block_number as BlockPrimaryIndex; - let proof_infos = event + let (proof_infos, _) = event .query_receipt_proofs(provider.root(), block_number.into()) .await .unwrap(); From 54a23a29c6b68326384de76693e06d01e8746397 Mon Sep 17 00:00:00 2001 From: nicholas-mainardi Date: Thu, 6 Feb 2025 12:25:04 +0100 Subject: [PATCH 47/47] Fix typo in bracketer_primary_index query --- parsil/src/bracketer.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parsil/src/bracketer.rs b/parsil/src/bracketer.rs index a8dcc78a0..44bb9174a 100644 --- a/parsil/src/bracketer.rs +++ b/parsil/src/bracketer.rs @@ -44,7 +44,7 @@ pub fn bracket_primary_index( "SELECT {KEY} FROM {table_name} WHERE {VALID_FROM} <= $1 AND {VALID_UNTIL} >= $1 - AND {primary_index}:::NUMERIC < '{min_bound}'::DECIMAL + AND {primary_index} < '{min_bound}'::DECIMAL ORDER BY {KEY} DESC LIMIT 1" )) };