From 4c64bbae529be7784ed57d22eabb51b883e476a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Negovanovi=C4=87?= <93934272+Stefan-Ethernal@users.noreply.github.com> Date: Tue, 31 Dec 2024 14:38:32 +0100 Subject: [PATCH 1/3] chore: merge `develop` into `main` (#90) Signed-off-by: Arpit Temani Co-authored-by: Goran Rojovic <100121253+goran-ethernal@users.noreply.github.com> Co-authored-by: Arpit Temani Co-authored-by: Goran Rojovic Co-authored-by: Rachit Sonthalia <54906134+rachit77@users.noreply.github.com> --- .github/{.assets => assets}/aggkit-logo.svg | 0 .github/assets/cdk-logo-name.png | Bin 9309 -> 0 bytes .github/assets/cdk-logo.svg | 28 - .github/workflows/arm_deb_packager.yml | 36 +- .github/workflows/arm_rpm_packager.yml | 58 +- .github/workflows/codeql.yml | 35 +- .github/workflows/lint-pr.yml | 2 +- .github/workflows/lint.yml | 4 +- .github/workflows/mdbook.yml | 4 +- .github/workflows/release.yml | 4 +- .github/workflows/test-e2e-multi_pp.yml | 86 - .github/workflows/test-e2e.yml | 145 +- .github/workflows/test-resequence.yml | 14 +- .github/workflows/test-unit.yml | 16 +- .github/workflows/x86_deb_packager.yml | 38 +- .github/workflows/x86_rpm_packager.yml | 60 +- .gitignore | 2 +- .golangci.yml | 1 - Dockerfile | 10 +- Makefile | 21 +- agglayer/mock_agglayer_client.go | 2 +- aggoracle/chaingersender/evm.go | 9 +- aggregator/aggregator.go | 1665 ---------- aggregator/aggregator_test.go | 1921 ----------- aggregator/config.go | 139 - aggregator/db/dbstorage/dbstorage.go | 35 - aggregator/db/dbstorage/proof.go | 356 --- aggregator/db/dbstorage/proof_test.go | 151 - aggregator/db/dbstorage/sequence.go | 21 - aggregator/db/logger.go | 27 - aggregator/db/migrations.go | 122 - aggregator/db/migrations/0001.sql | 24 - aggregator/db/migrations_test.go | 28 - aggregator/ethmantypes/finalproofinputs.go | 10 - aggregator/interfaces.go | 100 - aggregator/mocks/mock_eth_tx_manager.go | 587 ---- aggregator/mocks/mock_etherman.go | 389 --- aggregator/mocks/mock_prover.go | 540 ---- aggregator/mocks/mock_rpc.go | 152 - aggregator/mocks/mock_storage.go | 690 ---- aggregator/mocks/mock_synchronizer.go | 697 ---- aggregator/mocks/mock_txer.go | 389 --- aggregator/prover/aggregator.pb.go | 2819 ----------------- aggregator/prover/aggregator_grpc.pb.go | 150 - aggregator/prover/mocks/mock_channel.go | 406 --- aggregator/prover/prover.go | 460 --- aggregator/prover/prover_test.go | 109 - aggsender/mocks/agg_sender_storage.go | 2 +- aggsender/mocks/block_notifier.go | 2 +- aggsender/mocks/epoch_notifier.go | 4 +- aggsender/mocks/generic_subscriber.go | 2 +- aggsender/mocks/l2_bridge_syncer.go | 4 +- aggsender/mocks/logger.go | 18 +- bridgesync/bridgesync_test.go | 32 +- bridgesync/mocks/eth_clienter.go | 2 +- cmd/main.go | 3 +- cmd/run.go | 257 +- common/components.go | 4 - config/config.go | 27 +- config/config_test.go | 92 - config/default.go | 126 +- crates/aggkit-config/src/aggregator.rs | 125 - crates/aggkit-config/src/lib.rs | 9 - crates/aggkit-config/src/sequence_sender.rs | 50 - crates/aggkit/Cargo.toml | 2 +- crates/aggkit/build.rs | 2 +- crates/aggkit/src/cli.rs | 20 - crates/aggkit/src/config_render.rs | 5 +- crates/aggkit/src/main.rs | 74 - dataavailability/config.go | 9 - dataavailability/dataavailability.go | 33 - .../datacommittee/datacommittee.go | 401 --- .../datacommittee/datacommittee_test.go | 142 - dataavailability/interfaces.go | 54 - .../mocks_da/batch_data_provider.go | 96 - dataavailability/mocks_da/da_backender.go | 2 +- dataavailability/mocks_da/data_manager.go | 218 -- dataavailability/mocks_da/func_sign_type.go | 94 - .../mocks_da/sequence_retriever.go | 98 - dataavailability/mocks_da/sequence_sender.go | 156 - .../mocks_da/sequence_sender_banana.go | 97 - .../mocks_da/sequence_sender_elderberry.go | 95 - etherman/aggregator.go | 130 - go.mod | 19 +- go.sum | 152 +- l1infotreesync/downloader.go | 9 +- l1infotreesync/downloader_test.go | 55 +- l1infotreesync/l1infotreesync.go | 12 +- lastgersync/e2e_test.go | 16 +- .../src/proto/aggregator/v1/aggregator.proto | 330 -- rpc/batch.go | 149 - rpc/batch_test.go | 265 -- rpc/types/rpcbatch.go | 18 - scripts/local_config | 30 +- sequencesender/config.go | 73 - sequencesender/ethtx.go | 398 --- sequencesender/ethtx_test.go | 786 ----- sequencesender/mocks/mock_etherman.go | 271 -- sequencesender/mocks/mock_ethtxmanager.go | 302 -- sequencesender/mocks/mock_rpc.go | 153 - sequencesender/mocks/mock_txbuilder.go | 367 --- sequencesender/seqsendertypes/types.go | 43 - sequencesender/sequencesender.go | 556 ---- sequencesender/sequencesender_test.go | 619 ---- sequencesender/txbuilder/banana_base.go | 247 -- sequencesender/txbuilder/banana_base_test.go | 162 - sequencesender/txbuilder/banana_types.go | 184 -- sequencesender/txbuilder/banana_validium.go | 153 - .../txbuilder/banana_validium_test.go | 139 - sequencesender/txbuilder/banana_zkevm.go | 128 - sequencesender/txbuilder/banana_zkevm_test.go | 137 - sequencesender/txbuilder/elderberry_base.go | 63 - .../txbuilder/elderberry_base_test.go | 101 - sequencesender/txbuilder/elderberry_types.go | 72 - .../txbuilder/elderberry_validium.go | 134 - .../txbuilder/elderberry_validium_test.go | 119 - sequencesender/txbuilder/elderberry_zkevm.go | 113 - .../txbuilder/elderberry_zkevm_test.go | 113 - sequencesender/txbuilder/interface.go | 38 - sequencesender/txbuilder/interface_test.go | 50 - .../mocks_txbuilder/cond_new_sequence.go | 103 - .../global_exit_root_banana_contractor.go | 139 - ...lobal_exit_root_banana_zkevm_contractor.go | 139 - .../txbuilder/mocks_txbuilder/l1_client.go | 98 - .../mocks_txbuilder/l1_info_syncer.go | 154 - .../rollup_banana_base_contractor.go | 93 - .../rollup_banana_validium_contractor.go | 163 - .../rollup_banana_zkevm_contractor.go | 162 - .../rollup_elderberry_validium_contractor.go | 104 - .../rollup_elderberry_zkevm_contractor.go | 103 - .../txbuilder/mocks_txbuilder/tx_builder.go | 367 --- .../txbuilder/validium_cond_num_batches.go | 34 - .../validium_cond_num_batches_test.go | 46 - .../txbuilder/zkevm_cond_max_size.go | 124 - .../txbuilder/zkevm_cond_max_size_test.go | 95 - sync/mock_downloader_test.go | 2 +- test/Makefile | 55 +- test/bats/fep/access-list-e2e.bats | 119 - test/bats/fep/basic-e2e.bats | 196 -- test/bats/fep/bridge-e2e.bats | 193 -- test/bats/fep/e2e.bats | 11 - test/bats/helpers/common-multi_cdk-setup.bash | 41 +- test/bats/helpers/common-setup.bash | 2 +- test/bats/helpers/lxly-bridge.bash | 94 +- test/combinations/fork11-rollup.yml | 9 - test/combinations/fork12-cdk-validium.yml | 8 - ...12-pessimistic-multi-attach-second-cdk.yml | 2 +- .../combinations/fork12-pessimistic-multi.yml | 4 +- test/combinations/fork12-pessimistic.yml | 4 +- test/combinations/fork12-rollup.yml | 8 - test/combinations/fork9-cdk-validium.yml | 12 - .../kurtosis-cdk-node-config.toml.template | 9 - test/config/test.config.toml | 102 - test/docker-compose.yml | 51 +- test/helpers/e2e.go | 6 +- test/run-e2e-multi_pp.sh | 8 +- test/run-e2e.sh | 12 +- test/scripts/agglayer_certificates_monitor.sh | 2 +- test/scripts/batch_verification_monitor.sh | 2 +- test/scripts/env.sh | 6 +- 160 files changed, 494 insertions(+), 24013 deletions(-) rename .github/{.assets => assets}/aggkit-logo.svg (100%) delete mode 100644 .github/assets/cdk-logo-name.png delete mode 100644 .github/assets/cdk-logo.svg delete mode 100644 .github/workflows/test-e2e-multi_pp.yml delete mode 100644 aggregator/aggregator.go delete mode 100644 aggregator/aggregator_test.go delete mode 100644 aggregator/config.go delete mode 100644 aggregator/db/dbstorage/dbstorage.go delete mode 100644 aggregator/db/dbstorage/proof.go delete mode 100644 aggregator/db/dbstorage/proof_test.go delete mode 100644 aggregator/db/dbstorage/sequence.go delete mode 100644 aggregator/db/logger.go delete mode 100644 aggregator/db/migrations.go delete mode 100644 aggregator/db/migrations/0001.sql delete mode 100644 aggregator/db/migrations_test.go delete mode 100644 aggregator/ethmantypes/finalproofinputs.go delete mode 100644 aggregator/interfaces.go delete mode 100644 aggregator/mocks/mock_eth_tx_manager.go delete mode 100644 aggregator/mocks/mock_etherman.go delete mode 100644 aggregator/mocks/mock_prover.go delete mode 100644 aggregator/mocks/mock_rpc.go delete mode 100644 aggregator/mocks/mock_storage.go delete mode 100644 aggregator/mocks/mock_synchronizer.go delete mode 100644 aggregator/mocks/mock_txer.go delete mode 100644 aggregator/prover/aggregator.pb.go delete mode 100644 aggregator/prover/aggregator_grpc.pb.go delete mode 100644 aggregator/prover/mocks/mock_channel.go delete mode 100644 aggregator/prover/prover.go delete mode 100644 aggregator/prover/prover_test.go delete mode 100644 crates/aggkit-config/src/aggregator.rs delete mode 100644 crates/aggkit-config/src/sequence_sender.rs delete mode 100644 dataavailability/config.go delete mode 100644 dataavailability/dataavailability.go delete mode 100644 dataavailability/datacommittee/datacommittee.go delete mode 100644 dataavailability/datacommittee/datacommittee_test.go delete mode 100644 dataavailability/interfaces.go delete mode 100644 dataavailability/mocks_da/batch_data_provider.go delete mode 100644 dataavailability/mocks_da/data_manager.go delete mode 100644 dataavailability/mocks_da/func_sign_type.go delete mode 100644 dataavailability/mocks_da/sequence_retriever.go delete mode 100644 dataavailability/mocks_da/sequence_sender.go delete mode 100644 dataavailability/mocks_da/sequence_sender_banana.go delete mode 100644 dataavailability/mocks_da/sequence_sender_elderberry.go delete mode 100644 etherman/aggregator.go delete mode 100644 proto/src/proto/aggregator/v1/aggregator.proto delete mode 100644 rpc/batch.go delete mode 100644 rpc/batch_test.go delete mode 100644 sequencesender/config.go delete mode 100644 sequencesender/ethtx.go delete mode 100644 sequencesender/ethtx_test.go delete mode 100644 sequencesender/mocks/mock_etherman.go delete mode 100644 sequencesender/mocks/mock_ethtxmanager.go delete mode 100644 sequencesender/mocks/mock_rpc.go delete mode 100644 sequencesender/mocks/mock_txbuilder.go delete mode 100644 sequencesender/seqsendertypes/types.go delete mode 100644 sequencesender/sequencesender.go delete mode 100644 sequencesender/sequencesender_test.go delete mode 100644 sequencesender/txbuilder/banana_base.go delete mode 100644 sequencesender/txbuilder/banana_base_test.go delete mode 100644 sequencesender/txbuilder/banana_types.go delete mode 100644 sequencesender/txbuilder/banana_validium.go delete mode 100644 sequencesender/txbuilder/banana_validium_test.go delete mode 100644 sequencesender/txbuilder/banana_zkevm.go delete mode 100644 sequencesender/txbuilder/banana_zkevm_test.go delete mode 100644 sequencesender/txbuilder/elderberry_base.go delete mode 100644 sequencesender/txbuilder/elderberry_base_test.go delete mode 100644 sequencesender/txbuilder/elderberry_types.go delete mode 100644 sequencesender/txbuilder/elderberry_validium.go delete mode 100644 sequencesender/txbuilder/elderberry_validium_test.go delete mode 100644 sequencesender/txbuilder/elderberry_zkevm.go delete mode 100644 sequencesender/txbuilder/elderberry_zkevm_test.go delete mode 100644 sequencesender/txbuilder/interface.go delete mode 100644 sequencesender/txbuilder/interface_test.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/cond_new_sequence.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/global_exit_root_banana_contractor.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/global_exit_root_banana_zkevm_contractor.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/l1_client.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/l1_info_syncer.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/rollup_banana_base_contractor.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/rollup_banana_validium_contractor.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/rollup_banana_zkevm_contractor.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/rollup_elderberry_validium_contractor.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/rollup_elderberry_zkevm_contractor.go delete mode 100644 sequencesender/txbuilder/mocks_txbuilder/tx_builder.go delete mode 100644 sequencesender/txbuilder/validium_cond_num_batches.go delete mode 100644 sequencesender/txbuilder/validium_cond_num_batches_test.go delete mode 100644 sequencesender/txbuilder/zkevm_cond_max_size.go delete mode 100644 sequencesender/txbuilder/zkevm_cond_max_size_test.go delete mode 100644 test/bats/fep/access-list-e2e.bats delete mode 100644 test/bats/fep/basic-e2e.bats delete mode 100644 test/bats/fep/bridge-e2e.bats delete mode 100644 test/bats/fep/e2e.bats delete mode 100644 test/combinations/fork11-rollup.yml delete mode 100644 test/combinations/fork12-cdk-validium.yml delete mode 100644 test/combinations/fork12-rollup.yml delete mode 100644 test/combinations/fork9-cdk-validium.yml diff --git a/.github/.assets/aggkit-logo.svg b/.github/assets/aggkit-logo.svg similarity index 100% rename from .github/.assets/aggkit-logo.svg rename to .github/assets/aggkit-logo.svg diff --git a/.github/assets/cdk-logo-name.png b/.github/assets/cdk-logo-name.png deleted file mode 100644 index 4b2059361a7d216388140d5b694701ee9e183595..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9309 zcmXwf2RxhK_rF?=oftt;Nz6)6)UF+^y{TQZwf8D*YLlQfYLud=y=zl@kJ_Vl1+_jX zwH5#9_xH~$ue_d|d+s^s-sgGFJ>!km(s)WidXE$j5064w38{mJhmXWv2Lgz3zuUw< zV|aM%?8-bl3GwHn}H8BF3H1{24Q$o#XJFM*Hp6DxnRf(bYR;}YHf`P`t z*M;$Vvyg+k+yE^$d}vv!AZ~q!LK3GCj2tcIE5#k_j==|oyVT6)0zFjG zm8Yk|_pOFL33r9uh)UYD=3@;`&1dfacr`$hD$@jy5xb~DrDQuWX=^@)UP{QMEv zqY_uPP@p_OS%?LODckaCB+ZsC)+%gV2md4Y-~B1==u-t=`K>n zw7v9^+Rtw8?gORXBtbFITLaeMqhdLik49LiPR8klkfMxG9PeSe`S*$ZH!d1(NJ<}e+qe`?C4l~5xxH-gu{{mVA%hZ zkQ-Pq@PtF|W#d?Z^2Gd#Kx7yOCk1NrTQD>@^7bkueTf0)Q*kT2&w^9;i@eU;%AH`C zxb^l~HpxHUEDKwQf4cnT$+F9a?<;zAQoNJG3A73u4K7HC@)+LS*1NT>GLVQY4kn(x zYh3ql;eI&&69c}125b+7@y4wQOrc<;^GrSW2FBV=$8l63e&0gRHM6%!}6hE`e z?W5ym4#27GA_WtIN@*K{tu=A8e?Y4t^1RHAF4+8Y578aq?Rt|cbP6v#Xo$2Isz)V| zI3~kR45V?=OE=DMC4~mZa8dz_Oubhx)=NPIx;2>!9t@~Nn0{Ve9Y0Sik^48L%ed|` z?}ASen{}|y?0Bn}bDB_lb=e+@Av0dRwY$lX2I-a7-FY6)*Eo1f7{n2N4vjHkkgBWs zUdgwn)K>CDT0pJhdNyU*%w8Pbl1^}>wwGLX92ks{aQeT^JgMZ11ez{(Cs9uac}MOE z;<(uFm5{**pJkS1CH9F_Q{GM-vor$+RbfS}5phy>EVrAP%|R_k=s-E1ZAoSvfwLgi z?tu;{D7q>yoKN52{_In7QVYgxZ`>XIFjwzxz2*A$ay}w0HT9QyE2{bLukUHPw(#AX zPf&&M$(Mp3e+BF{Y*-i>8SVD6zI`nV!iFeK4IH$37rEPp6%~l(D4n$F`}hNmBWwhl zKFjN9X^m;|d{*lEruf9S_0p_)Kxpk!zaRVicUx}+C5!*(oE+;Q z7<;!Yq4QkzR{vl!|J#w>$E<)*>TA^X`NkhxT|p{S7U|b#!&?*i`xU(Jdfv0P{vFR% zh3m;a)LbcN!#;^-&^9qQNBu6$&nPl$dHt#igEt}Yo=McDvpb4jl!vS@pfq;bsllc7 z^$g2QnOT52M`WMr3p~0+C`RewPabRiZ3{17-{j?b_Z)m0J5SF)Liw7ROJAMmYu{s{ zqKfyM_FK5NGJ4S=xk{q5nc=hLhL?pbTu37ncm1;ZE5Bg5fa3OT8BZHg&igx zm1WHD{`C=aSwvO-m})&7fRiQhn)v8*ME4Pa0_P))US0m zj(sTt*3)Q3UwD}|9mvnsFxoOmhoraCt@->{T0lJN+k2^#AOAHrHr|ZwMlf99jQXS` zKxhR*7N~pK`Q9ZHxxlm99Njxv5?~t(X{y+ruhXRk#ap{klo{6)k@C6F(NxH;(=rFtLl$wa;H3yu!j2Rs<3sl<Y6zZ);+ z7P2r4>feKQwzq8?!Rmcx7 z803Gg4Sf_6&K@5h|1R@)g-SFn zLswUKBEU2y-`Log2RZcpm~GAN+vUk_F?C!eWG1_BWS>hkP1R?ADIIX|0>(x*6(N^5j zS^)ASnnjeM@wnk>NNn|;XHm&h#AMsa&ruC?PYkGCFy^T9^;eNnf>QfY{4IuYp`UG; z`9sIPC?`a_yarI$U+ND<8U}o)#aLAJS(~n@0q;!C&fb_`epc&$3ZA__o!9SNHge>4 z0rQ#KJwo4H%(Px-z(SFvSPiwy978P-D?8RQNSU^MTSs3%kwNvK!E-~TbJxb7Qt%I! z*1h7tn%Y_iGyjV1Ty^rGPonn}J`3v{wqE(6?@0^hUm!$2P5Qq`SSMb1h5mRRxVQIY zPYJH4!~=)Jx3{-n)Ya8#ZrzA|wd{&`K60&c?dJciKixxIF!EDxR8&+sXO?E-*4dMo zdsDHYJ_3=M^HncL*sewtU;fQ@0$JiKYOBlDcScYx(@h0q_DxJon*8Q#ou?KT7t5wC z>WA0vEq(fc;HyvP)XXr&U=|xUzWo`NEFWk9zZ&|SnZ#VNT}T@rIa~}Re*3CbVNoXE za8XpKHLX1xUl_ypby&-VQPSr>zK^D+L~#>j4@_FFkIQ9VAFlQ1+giTv(le{8tUT~} z%kgo|CtbqZAs<6>{_SOIYO3kZH?@?9E60B}Mq0BFkOXYfyLazq30RZ&|I&k~RGZD; zS0W!L_p`s3)>l+iq<{%hAq*=lR%KU$@ZTO9{khxXa&fd(+!-y79EF#EJFG^eQU$Oq z<6OQai9PVNW0>{t&%3Unpr;H8IAu*dcug^dp3%?BOpQXPfhwM_pT1+(cAP0&JQ!7= z>TWz66#l$>6GpqsmEi{)ucfSiFo$zMPZHXIl@w%3R$w1PjEag%HDi?@vq3gDQGGIl zii0X;D|-(+L6F;D*O`D-%;8vA_+DC(|7Q9l1dReU>H<>))`7Ki~J0`Skv3&fE_Z z5WT$A98e5}tVU*b9(*iOI9>1@`6aeJC2Ns_B`G&=POX==GT*y4Yx3dM1FcMDBKmp7 z|Hg~4(qS<_4*QeHlD%7CTD}*VPauipn)+5^tv<3Rhfavhs@@pGqm;RMx zB=K%r7_qAMT&ZGnr97)wLF;bwuwPav)HfoF^NM?p{AY8;<`Wj?S2tIuj*C`q=Y_G6 zx&7X1T`{$)^yR@}%zIH4F^>hpYZXDE##(VJ;cS7~QFs}})Zz87V_=jAQ3J4wIZUxnvCG?g#vW%JW7EsQe^ztd{1z~ij@es3HUj^OMh?@slrf? zQumxh({Pob#c=y#Z;#s-y)WLmiVW?rPzES7+>wlf>BGRx?jLF-cjzx$-oT%-+l7YK zNuMu-MgB6qe?=G38L%p(7aDy}c)%Of#`25*?fGDvJD{Lq=R~Zf%H#-8AZ7# zNj7)MS?s>i(&%W#(r$g8+Q+Z7tD&&B!|u?OTHndO4EOJo&KFTEI9)@}j4wXT8w?c1B*0%2s*)1{ZRP44zb*%vHYT6s*Fx zP1{<4x`owCz4PZc-WE`w`lLW|*gp&W{ZC9zG4KPzY=y1%N}Wd*3y`@TVjk znX7Sp#)#$g5xc5Zn@($y54>S|Bx(0k+(ztfdea#TFyQdYd@B8|#^aC9L9l7b(&Oy< zdND4tVncLol&V+0)9~!83aNYNdxqfp;krX;;gGw`^5|3(1=DO{ zPa+*gj7Mjc6I>(~1aNQ&m2K)Tsr(QWwMbiUVqoB-bt+N*LATWiTmXmpW;B_6dyTY^$3>wfbPMaZW1W zL~qMs&?Y2VE5Xy|`25d?Xh5@OCfqV4`g;)0Kk~Zt5+7I=het2_QU0xkO@mV$4ko9j z+I8%X7ZiSFx@2xWVOO~*o6#d8$myXQ;HPN_>y zDf;zf*5o^_>6?@PW=pCZy=E28P%dOgtT$&*ON#+}8BSz>+N}s_T1e5!T7Tf-7X)_% zU6&r0{&YtckltDFoF%NozOo9N%plP28X-r3Mp;2fabAm$oGn+ouB;QgA#DcGSf<>W zZx;!G>b#3G^J2=-n6$`t>mPRo!Zwy1WCNN{ zrGA??3#h$q=BVcm9w)olPuY9@lh|D-E)A@VKt_Rijj124>zOwh(mPsNXH8m?mQm`2 zD3+*~!5nZWU>R1fM|WrZu&6GZV7Xs2I+`TmclUW*DT7C#1(T#ti4T+njtZIF58Bc5 z-BSd~SQka<85_?PMAH2%(!W#hu|81MxWn+Jh9lznVJ*TdAWbKk->10Nz(`M`aq3tj zS3=S)Zg4sgRE13+;g65wMr><$D;zxA(~7c%Z)tZvc!S@KiX zH%Dcs4{dYgyh0f@4O`wZi&KlC`l=zD!&I%Gcl>dQjC|b>7OqZyyCId>3#qdbT$DDs zZQ55;Qv*J9v3R}%%=>P|NQl!^!CUR1!wbt#Oe8MCTAP*^*!rl z-~6=jMxvTc zcCI>epf~c)_{ZQCz6m3t4g~T_U`nC=VXmz|^jziwou2eY!Y0zadgGvYI5u^K>g3B(@B_#ys-$+piD| z1|z!{AY%)|ItYCr(|HhST!!KZw&)Jh=Q^>ryH;UN7uWw>W}5d<1rAieS+jfdyOXLW z$~Ps<4r1LCJ=QYQ^5znTX2Z|>>F@}kfB(Pb)@6MpWK}sCJw7(JB9ZrSa9+uC+LqKm zOOnq5K2No*N#B&{%qkV}@IcgKhSfw{%E+xIlf#;|$Ys)^qj(%O>Im3-%fG`@RPvN= zCdE;QEt1($>-$QFxu^jjGM2Lb$F_z%Gu?@CjFNw#KJlz2C;a^a-S_d`v`6;i?zZT= z_x)rYr+{@vHohacXES535f>*mEeUKpP~Ay48Q7tm;?F}c(@4}6J)n6A__|r&@_bbG zS9RBNN{Kcs66S1_pqaw6JID}0UC-ZqG)`Uq=W25-$Hb`)pp0J#pbG~|%xL|*cbULm z_WW71L5`<~ga+Tf!Rk%eter9u*?uSqjmSkm1MavR*O0cFs;mX7XFN8#G{?4JCeuHY zm4(YboMIQ8@)B%F@jAA%feqVldr!)Lmm}0}E~_yA6g1>c04<t=waMkmlNAWXrSgAZex4&nK%~`|u zd>E{xoj&cF`mHc!VX{!Sl-E?-_T5>#Uc=mn^%ow=>t8X>v6YXEUSMe*-UJQ38e?*R zBd9kZB^${^OI`x!KuP{^LdZX*{TTZns%3eEJk*+=MS^WRPC_gHkdhe&UYC z;=pf}G=}Z>Y|;LTEFtCAM7~Fn&jPo6*ZSh-$0kUwKCSV21)VL6-YsSPP@WQ8tS*eU zL6`+3qt_An`c5H)S!pGN?$j?Ko(n^%!m9B2bRQ_s3pFf$hcvoW(&qkP<2b5TmNsiN zzh7ee$V|K=^78Vk zGTpmZ%hk;0S;j?Y=2la-0$;F|IE{Gr0N)IOE{t~<@gquaw3LuhbJ3m0z0+K-aFHCb zT>Bsj9jFw7)7=siGDw#<4s3gplt)lTqGu?2^&*7;m*>*h3vJgcMMLCAy{=|W_`ZJW zB_(RF1W&y+`0Vm)<#g~Ra6fC_cXKprG=`Q%8ka76bP6E(eyO!jy`gx%FFWH;;G)cb zP)NoDUa?dJqURrsUNRJkm{6QKySmOaMg-@n`)yl5Sbbk~S!tMl*@>xqnyn z7H-8WlbQ$fM*cWF^sttD(ghmWzerUPM+1>=TyJzj@G-A+xpY(~8lojo)vj8wQf>dtJ7c&CywU3m!_5`qHY49o>r2{z3&9YT7|i=qM|l30{HaewB}PBP`cwwcH}cNGnPSld!BodVFTxNWmV0~C*VA> zhzi&Q`}8+i@CTR7(lfBxi~9^hFVqTps_iXtHKVyOI_#fx-=$ajiU7joehI>klhUdf zL-ezh;hhJ%dB`O%y5=(#Rn_I+ei{zNMjL9ttXiOz1F1Nj>1ht4YxjYuF4!S(9j z_OhEF9)Addn6{$c6e!*!OWQm9OPI4klef%@TtT&aH#$u5n_qqgIEnjv|yRi83%xY$-lIo~PW?c=(dSonEdx zCS-vbF>2O|_Xmb>x2IOD9964F-a2S4y@KH z6sdad)4-d_0 zCoYgYMA*Jp-tR)w464&)EJ(QyPo?v4N>=)_{7#K!2_{Bat5pJUC8S2oeYJwfr?Kv4 zP;dbOz&~kZyL4h=d|Z(1s(j%cXdk_-K(vPD$}nukRih&o9VTUI?~8kUQ320VpulYP z{T7VlXlP)VB83j*LY)_eH%xFYC0$fhGcg)|{4N8APEYNh5zlH7Xy1HDxLA^9Gnk}=R$eS9@lxcl7 z`B}ItLn`2YgWF1nJbzYDztP_#@fhnsMKv($d9YHK+|MS5F1Egu;bUVU)XnQn#YIS^ z0(swbsR;oYOzjBQ8ZhSBlPiT+k9br9*Ixh{}m6+w)B zsqNh+4%t{s0CocjH|IH~zY#xWzb&xIvDwVN2x#cJ2=OG&o_wLoTK{aF`SlY;JcI&J zLD^0&ba{WF!Bf@7rFfjKq01F&<(Ur1{$%*=ih3|UiZh>9;aX1RT84>ia`Li~P8s|r z{j(I8Yo+NB{UEVOjqkpBB89fvu-syaV46LA5)fp}qZEf>Fl=V#U$bC*dj%E~! z76sO8bagC!swO~+YM4su4LE@F21y8lN?~w z)&1`seIKR&@AkX&79IvkvWs-(R$g*h42?9O2&|4%?)u^?b&*lS!^5xafeI;1ZYVB& zt)IkLT+$$H9B3h|>ao(Jlt_vV9~RakCn9nqOwQkKV@)9 z(&eU_8gCNLDW`7Eo;@y^b7foFSrMS(>GInfY7=~h4)uk?;s@b-#M2!~Nl6>MB!1c>V6v$AI(exN~r4LN_%7ck(VQg)Af_|o4I32IS4XQ92*V4zB*fd z|145*-3O6C(^eATOl#kA_INhq`B$6%aNp%OzKQ_>m!>iYe1;2JTg=^338;un_6yW9 zh2YZs>F)fgo5+m8+lSZOut@eG_IIt!tic`7z<2$xJWNaEYNF`*rIV6UI7J~C8yD^w z+xNk^jKw=KF|nqhpem1TVq!wbJnW}vS}K3Nl(}bp>@-xBoP~$1P1Ln<@ACZXI0=Rw zqn;)bv()Il-DRZ5XfSO|WE zptLVZ^8V`$$-CbMzqva~98>NO2LyEF`hte{Mn>ihvs`=GgFK*P1@%#jU& zX|s{3rJLk2nlidM7Y6q2P&G}N#wj`_8WKDXX;A+>OyfY*uuH~<1whYL*^9!0CQsT^ zV0n?TVnIw=$v&*LTTaT#{Q`Bp-9z^qAABhky3*B@QjDU2uJM(?=?x<`d8(+ofIUgE zVsZ=NF3*1ur@$iLHA(RW+o66iK->=+PM8!B%HzfB)bbu7-g3wtEw4xEt3Tq;WcRtv zfbdYoA0=gly7>-i>)GqUq>+U=e}7b3*TLlh?(N&8m+Q915!r-vC2{Ugl$ifpXwY?J zp&kQCP%yYZZv23nV3vr5w~dN$B#rs9f|sL z-b*`ha5EN;HomdjPF2@8F$^y%F0Nr&_nbu}1LI$pOhM%p*Wt+xhELU1J@Z5}B4eH_ ze}Jf}Zusy#cra_PYS^ao$f_$c-Pzf>OhAT!sUVv^zJAW77Y|Cr^kCmyT`ytuR*+K( zZ!vGlA&?1B#+TM5AHyzz%YUk;v%Y>6EOyDKfBxEi3)f(Tdnr<0ifZZKk$scO{%DXR zl%2_uuS(S`xto~Rom~Z}RtoP*>9$2JCYEKm;y%NfePBWV2wBk4nrYfueBD^-aDgR%5i1 zUWx-1^xacMfIoy6*HB}!Of8U57szorQvvkzmsIbGnLKMFuS#LO;g@olJ94SrgG@i* z=YRfXer9wOXu`sJ6dG`KJj3dKVX%m6+*(;oId$Ddo0PO~nQ`pZ?MBnVLJ8Y3kc&0D zAP47M2!+`iZ~?xY)@*R6z$>wt=Dt zVQeHU5W42>C5HB}&&1N@t-(-p-~#7)H|egN6ie zF&_T`je*|YV=8g*M!WP|LWxB#C8(d`e`p2D>Bhxd%t7dv#L-g8TbO|SI3)rp^?!(q zaRRxnQPbOT@)wN{B5%h9e{2Oq8ngbf67!2fBDIX;+;7za7)!jsHiEeNb1ex6?th=f zTBBW-OFkv&->T(_*__Wb%OTXZp>5kfivL9UZEjA;`sx*h%WoUpN`Rhg)Xu9|wA+EOysy*xQOq8-T8>HbxI zE2W>+pI3pP8`!sL4$TH!h7XFU-2xOszcATM) - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/.github/workflows/arm_deb_packager.yml b/.github/workflows/arm_deb_packager.yml index 4a11e6e5..64d451c6 100644 --- a/.github/workflows/arm_deb_packager.yml +++ b/.github/workflows/arm_deb_packager.yml @@ -44,46 +44,46 @@ jobs: - name: Build the rust binary run: | BUILD_SCRIPT_DISABLED=1 - cargo build --release --bin aggkit-cli + cargo build --release --bin cdk - name: making directory structure - run: mkdir -p packaging/deb/aggkit/usr/bin/ + run: mkdir -p packaging/deb/cdk/usr/bin/ - name: copying necessary binary for arm64 - run: cp -rp target/aggkit packaging/deb/aggkit/usr/bin/aggkit + run: cp -rp target/cdk-node packaging/deb/cdk/usr/bin/cdk-node - name: copying rust binary for arm64 - run: cp -rp target/release/aggkit-cli packaging/deb/aggkit-cli/usr/bin/aggkit-cli + run: cp -rp target/release/cdk packaging/deb/cdk/usr/bin/cdk # Control file creation - name: Create control file run: | - echo "Package: aggkit" >> packaging/deb/aggkit/DEBIAN/control - echo "Version: ${{ env.VERSION }}" >> packaging/deb/aggkit/DEBIAN/control - echo "Section: base" >> packaging/deb/aggkit/DEBIAN/control - echo "Priority: optional" >> packaging/deb/aggkit/DEBIAN/control - echo "Architecture: arm64" >> packaging/deb/aggkit/DEBIAN/control - echo "Maintainer: devops@polygon.technology" >> packaging/deb/aggkit/DEBIAN/control - echo "Description: aggkit binary package" >> packaging/deb/aggkit/DEBIAN/control + echo "Package: cdk" >> packaging/deb/cdk/DEBIAN/control + echo "Version: ${{ env.VERSION }}" >> packaging/deb/cdk/DEBIAN/control + echo "Section: base" >> packaging/deb/cdk/DEBIAN/control + echo "Priority: optional" >> packaging/deb/cdk/DEBIAN/control + echo "Architecture: arm64" >> packaging/deb/cdk/DEBIAN/control + echo "Maintainer: devops@polygon.technology" >> packaging/deb/cdk/DEBIAN/control + echo "Description: cdk binary package" >> packaging/deb/cdk/DEBIAN/control - - name: Creating package for binary for aggkit ${{ env.ARCH }} - run: cp -rp packaging/deb/aggkit packaging/deb/aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }} + - name: Creating package for binary for cdk ${{ env.ARCH }} + run: cp -rp packaging/deb/cdk packaging/deb/cdk-${{ env.GIT_TAG }}-${{ env.ARCH }} env: ARCH: arm64 - name: Running package build - run: dpkg-deb --build --root-owner-group packaging/deb/aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }} + run: dpkg-deb --build --root-owner-group packaging/deb/cdk-${{ env.GIT_TAG }}-${{ env.ARCH }} env: ARCH: arm64 - name: create checksum for the arm64 package - run: cd packaging/deb/ && sha256sum aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb > aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb.checksum + run: cd packaging/deb/ && sha256sum cdk-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb > cdk-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb.checksum env: ARCH: arm64 - - name: Release aggkit Packages + - name: Release cdk Packages uses: softprops/action-gh-release@v2 with: tag_name: ${{ env.GIT_TAG }} prerelease: true files: | - packaging/deb/aggkit**.deb - packaging/deb/aggkit**.deb.checksum + packaging/deb/cdk**.deb + packaging/deb/cdk**.deb.checksum diff --git a/.github/workflows/arm_rpm_packager.yml b/.github/workflows/arm_rpm_packager.yml index c513a24a..614b80f2 100644 --- a/.github/workflows/arm_rpm_packager.yml +++ b/.github/workflows/arm_rpm_packager.yml @@ -35,13 +35,13 @@ jobs: - name: Download deps for project run: go mod download - - name: Building aggkit for amd64 + - name: Building cdk-node for amd64 run: make build - - name: Building the aggkit for arm64 + - name: Building the cdk run: | BUILD_SCRIPT_DISABLED=1 - cargo build --release --bin aggkit-cli + cargo build --release --bin cdk - name: Installing some dependencies run: sudo apt-get update && sudo apt-get install -y rpm @@ -53,51 +53,51 @@ jobs: mkdir -p packaging/rpm/RPMS mkdir -p packaging/rpm/SRPMS - touch packaging/rpm/aggkit.spec - echo "Name: aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "Version: ${{ env.GIT_TAG1 }}" >> packaging/rpm/SPECS/aggkit.spec - echo "Release: 1%{?dist}" >> packaging/rpm/SPECS/aggkit.spec - echo "License: GPL/AGPL" >> packaging/rpm/SPECS/aggkit.spec - echo "BuildArch: aarch64" >> packaging/rpm/SPECS/aggkit.spec - echo "Summary: aggkit rpm package" >> packaging/rpm/SPECS/aggkit.spec + touch packaging/rpm/cdk.spec + echo "Name: cdk" >> packaging/rpm/SPECS/cdk.spec + echo "Version: ${{ env.GIT_TAG1 }}" >> packaging/rpm/SPECS/cdk.spec + echo "Release: 1%{?dist}" >> packaging/rpm/SPECS/cdk.spec + echo "License: GPL/AGPL" >> packaging/rpm/SPECS/cdk.spec + echo "BuildArch: aarch64" >> packaging/rpm/SPECS/cdk.spec + echo "Summary: cdk rpm package" >> packaging/rpm/SPECS/cdk.spec - echo "%description" >> packaging/rpm/SPECS/aggkit.spec - echo "aggkit rpm package" >> packaging/rpm/SPECS/aggkit.spec + echo "%description" >> packaging/rpm/SPECS/cdk.spec + echo "cdk rpm package" >> packaging/rpm/SPECS/cdk.spec - echo "%pre" >> packaging/rpm/SPECS/aggkit.spec - echo "getent group aggkit >/dev/null || groupadd -r aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "getent passwd aggkit >/dev/null || useradd -s /bin/false -d /opt/aggkit -r aggkit -g aggkit" >> packaging/rpm/SPECS/aggkit.spec + echo "%pre" >> packaging/rpm/SPECS/cdk.spec + echo "getent group cdk >/dev/null || groupadd -r cdk" >> packaging/rpm/SPECS/cdk.spec + echo "getent passwd cdk >/dev/null || useradd -s /bin/false -d /opt/cdk -r cdk -g cdk" >> packaging/rpm/SPECS/cdk.spec - echo "%install" >> packaging/rpm/SPECS/aggkit.spec - echo "mkdir -p %{buildroot}/usr/bin" >> packaging/rpm/SPECS/aggkit.spec - echo "cp /home/runner/work/aggkit/aggkit/target/aggkit %{buildroot}/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "cp /home/runner/work/aggkit/aggkit/target/release/aggkit %{buildroot}/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec + echo "%install" >> packaging/rpm/SPECS/cdk.spec + echo "mkdir -p %{buildroot}/usr/bin" >> packaging/rpm/SPECS/cdk.spec + echo "cp /home/runner/work/cdk/cdk/target/cdk-node %{buildroot}/usr/bin/cdk-node" >> packaging/rpm/SPECS/cdk.spec + echo "cp /home/runner/work/cdk/cdk/target/release/cdk %{buildroot}/usr/bin/cdk" >> packaging/rpm/SPECS/cdk.spec - echo "%files" >> packaging/rpm/SPECS/aggkit.spec - echo "/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec + echo "%files" >> packaging/rpm/SPECS/cdk.spec + echo "/usr/bin/cdk" >> packaging/rpm/SPECS/cdk.spec + echo "/usr/bin/cdk-node" >> packaging/rpm/SPECS/cdk.spec - name: Construct rpm package run: | - rpmbuild --define "_topdir /home/runner/work/aggkit/aggkit/packaging/rpm_build" \ + rpmbuild --define "_topdir /home/runner/work/cdk/cdk/packaging/rpm_build" \ --define "_builddir %{_topdir}/BUILD" \ --define "_rpmdir %{_topdir}/RPMS" \ --define "_srcrpmdir %{_topdir}/SRPMS" \ --define "__spec_install_post /bin/true" \ - -bb packaging/rpm/SPECS/aggkit.spec + -bb packaging/rpm/SPECS/cdk.spec - name: Rename file for post rpm build and for checksum - run: mv /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/aarch64/aggkit-${{ env.GIT_TAG1 }}-1.aarch64.rpm /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/aarch64/aggkit-${{ env.GIT_TAG1 }}.aarch64.rpm + run: mv /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/aarch64/cdk-${{ env.GIT_TAG1 }}-1.aarch64.rpm /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/aarch64/cdk-${{ env.GIT_TAG1 }}.aarch64.rpm - name: Checksum for the rpm package - run: sha256sum /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/aarch64/aggkit-${{ env.GIT_TAG1 }}.aarch64.rpm > /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/aarch64/aggkit-${{ env.GIT_TAG1 }}.aarch64.rpm.checksum + run: sha256sum /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/aarch64/cdk-${{ env.GIT_TAG1 }}.aarch64.rpm > /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/aarch64/cdk-${{ env.GIT_TAG1 }}.aarch64.rpm.checksum - - name: Release aggkit Packages + - name: Release cdk Packages uses: softprops/action-gh-release@v2 with: tag_name: ${{ env.GIT_TAG }} prerelease: true files: | - packaging/rpm_build/RPMS/aarch64/aggkit-**.rpm - packaging/rpm_build/RPMS/aarch64/aggkit-**.rpm.checksum + packaging/rpm_build/RPMS/aarch64/cdk-**.rpm + packaging/rpm_build/RPMS/aarch64/cdk-**.rpm.checksum diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index f6205e61..90b7643b 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -10,8 +10,8 @@ on: jobs: analyze: name: Analyze - runs-on: ${{ matrix.language == 'swift' && 'macos-latest' || 'ubuntu-latest' }} - timeout-minutes: ${{ matrix.language == 'swift' && 120 || 360 }} + runs-on: amd-runner-2204 + timeout-minutes: 360 permissions: actions: read contents: read @@ -27,18 +27,35 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 + # Setup environment + - name: Setup Go + if: ${{ matrix.language == 'go' }} + uses: actions/setup-go@v5 + with: + go-version: 1.22.x + + - name: Verify Go version + if: ${{ matrix.language == 'go' }} + run: go version + + # Export dependencies + - name: Export Go dependencies + if: ${{ matrix.language == 'go' }} + run: go mod tidy + # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} - # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 + # Build project + - name: Build Go project + if: ${{ matrix.language == 'go' }} + run: make build-go + # Perform CodeQL Analysis - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 with: - category: "/language:${{ matrix.language }}" + category: "/codeql/${{ matrix.language }}-analysis" diff --git a/.github/workflows/lint-pr.yml b/.github/workflows/lint-pr.yml index 9a24eaf2..b6702d97 100644 --- a/.github/workflows/lint-pr.yml +++ b/.github/workflows/lint-pr.yml @@ -12,7 +12,7 @@ permissions: jobs: title: name: Validate PR title - runs-on: ubuntu-latest + runs-on: amd-runner-2204 steps: - uses: amannn/action-semantic-pull-request@v5 env: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 77255d39..1bed27f9 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -9,12 +9,12 @@ on: pull_request: jobs: lint: - runs-on: ubuntu-latest + runs-on: amd-runner-2204 steps: - name: Install Go uses: actions/setup-go@v5 with: - go-version: 1.21.x + go-version: 1.22.x - name: Checkout code uses: actions/checkout@v4 - name: golangci-lint diff --git a/.github/workflows/mdbook.yml b/.github/workflows/mdbook.yml index 8ea4f496..209c1044 100644 --- a/.github/workflows/mdbook.yml +++ b/.github/workflows/mdbook.yml @@ -27,7 +27,7 @@ concurrency: jobs: # Build job build: - runs-on: ubuntu-latest + runs-on: arm-runner-2204 env: MDBOOK_VERSION: 0.4.36 steps: @@ -54,7 +54,7 @@ jobs: environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} - runs-on: ubuntu-latest + runs-on: amd-runner-2204 needs: build steps: - name: Deploy to GitHub Pages diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f388cdd0..935a3907 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ env: jobs: build: - runs-on: ubuntu-latest + runs-on: amd-runner-2204 strategy: fail-fast: false matrix: @@ -98,7 +98,7 @@ jobs: retention-days: 1 merge: - runs-on: ubuntu-latest + runs-on: arm-runner-2204 needs: - build steps: diff --git a/.github/workflows/test-e2e-multi_pp.yml b/.github/workflows/test-e2e-multi_pp.yml deleted file mode 100644 index a04cc183..00000000 --- a/.github/workflows/test-e2e-multi_pp.yml +++ /dev/null @@ -1,86 +0,0 @@ -# based on: https://github.com/0xPolygon/kurtosis-cdk/blob/jhilliard/multi-pp-testing/multi-pp-test.sh.md -name: Test e2e multi pp -on: - push: - branches: - - '**' - workflow_dispatch: {} - - -jobs: - test-e2e-multi_pp: - strategy: - fail-fast: false - matrix: - go-version: [ 1.22.x ] - goarch: [ "amd64" ] - e2e-group: - - "fork12-pessimistic" - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Install Go - uses: actions/setup-go@v5 - with: - go-version: ${{ matrix.go-version }} - env: - GOARCH: ${{ matrix.goarch }} - - - - name: Build Docker - run: make build-docker - - - name: Build Tools - run: make build-tools - - - name: Checkout kurtosis-cdk - uses: actions/checkout@v4 - with: - repository: 0xPolygon/kurtosis-cdk - path: kurtosis-cdk - ref: jhilliard/multi-pp-testing - - - name: Install Kurtosis CDK tools - uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk - - - name: Install polycli - run: | - git clone https://github.com/0xPolygon/polygon-cli -b jhilliard/alonso - cd polygon-cli - make install - cp ~/go/bin/polycli /usr/local/bin/polycli - /usr/local/bin/polycli version - - - name: Setup Bats and bats libs - uses: bats-core/bats-action@2.0.0 - - - name: Test - run: make test-e2e-fork12-multi-pessimistic - - working-directory: test - env: - KURTOSIS_FOLDER: ${{ github.workspace }}/kurtosis-cdk - BATS_LIB_PATH: /usr/lib/ - agglayer_prover_sp1_key: ${{ secrets.SP1_PRIVATE_KEY }} - - - name: Dump enclave logs - if: failure() - run: kurtosis dump ./dump - - - name: Generate archive name - if: failure() - run: | - archive_name="dump_run_with_args_${{matrix.e2e-group}}_${{ github.run_id }}" - echo "ARCHIVE_NAME=${archive_name}" >> "$GITHUB_ENV" - echo "Generated archive name: ${archive_name}" - kurtosis service exec aggkit cdk-node-001 'cat /etc/cdk/cdk-node-config.toml' > ./dump/cdk-node-config.toml - - - name: Upload logs - if: failure() - uses: actions/upload-artifact@v4 - with: - name: ${{ env.ARCHIVE_NAME }} - path: ./dump diff --git a/.github/workflows/test-e2e.yml b/.github/workflows/test-e2e.yml index 76d2b078..8df39db4 100644 --- a/.github/workflows/test-e2e.yml +++ b/.github/workflows/test-e2e.yml @@ -7,59 +7,50 @@ on: jobs: - build-aggkit-image: - runs-on: ubuntu-latest + build-cdk-image: + runs-on: amd-runner-2204 timeout-minutes: 20 steps: - - uses: actions/checkout@v4 - - uses: actions/setup-go@v5 + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Go + uses: actions/setup-go@v5 with: go-version: 1.22.x - - name: Build aggkit docker image + - name: Build cdk docker image run: make build-docker - - name: Save aggkit image to archive - run: docker save --output /tmp/aggkit.tar aggkit + - name: Save cdk image to archive + run: docker save --output /tmp/cdk.tar cdk - name: Upload archive uses: actions/upload-artifact@v4 with: - name: aggkit - path: /tmp/aggkit.tar + name: cdk + path: /tmp/cdk.tar test-e2e: - runs-on: ubuntu-latest + name: E2E tests (different groups) + runs-on: amd-runner-2204 timeout-minutes: 30 - needs: build-aggkit-image + needs: build-cdk-image strategy: fail-fast: false matrix: e2e-group: - - "fork9-validium" - - "fork11-rollup" - - "fork12-validium" - - "fork12-rollup" - "fork12-pessimistic" steps: - - uses: actions/checkout@v4 - - - name: Checkout kurtosis-cdk repository + - name: Checkout code uses: actions/checkout@v4 - with: - go-version: ${{ matrix.go-version }} - env: - GOARCH: ${{ matrix.goarch }} - - - name: Build Docker - run: make build-docker - + - name: Checkout kurtosis-cdk uses: actions/checkout@v4 with: repository: 0xPolygon/kurtosis-cdk path: kurtosis-cdk - ref: v0.2.24 + ref: v0.2.25 - name: Install Kurtosis CDK tools uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk @@ -77,24 +68,110 @@ jobs: - name: Setup Bats and bats libs uses: bats-core/bats-action@2.0.0 - - name: Download aggkit archive + - name: Download cdk archive uses: actions/download-artifact@v4 with: - name: aggkit + name: cdk path: /tmp - - name: Load aggkit image + - name: Load cdk image run: | - docker load --input /tmp/aggkit.tar + docker load --input /tmp/cdk.tar docker image ls -a - - name: Run e2e tests + - name: Run E2E tests run: make test-e2e-${{ matrix.e2e-group }} working-directory: test env: KURTOSIS_FOLDER: ${{ github.workspace }}/kurtosis-cdk BATS_LIB_PATH: /usr/lib/ - agglayer_prover_sp1_key: ${{ secrets.SP1_PRIVATE_KEY }} + AGGLAYER_PROVER_SP1_KEY: ${{ secrets.SP1_PRIVATE_KEY }} + + - name: Dump enclave logs + if: failure() + run: kurtosis dump ./dump + + - name: Generate archive name + if: failure() + run: | + archive_name="dump_run_with_args_${{matrix.e2e-group}}_${{ github.run_id }}" + echo "ARCHIVE_NAME=${archive_name}" >> "$GITHUB_ENV" + echo "Generated archive name: ${archive_name}" + kurtosis service exec cdk cdk-node-001 'cat /etc/cdk/cdk-node-config.toml' > ./dump/cdk-node-config.toml + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: ${{ env.ARCHIVE_NAME }} + path: ./dump + + test-e2e-multi-pp: + name: E2E tests + needs: build-cdk-image + strategy: + fail-fast: false + matrix: + go-version: [ 1.22.x ] + goarch: [ "amd64" ] + e2e-group: + - "fork12-multi-pessimistic" + runs-on: amd-runner-2204 + timeout-minutes: 30 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Go + uses: actions/setup-go@v5 + with: + go-version: ${{ matrix.go-version }} + env: + GOARCH: ${{ matrix.goarch }} + + - name: Build Tools + run: make build-tools + + - name: Checkout kurtosis-cdk + uses: actions/checkout@v4 + with: + repository: 0xPolygon/kurtosis-cdk + path: kurtosis-cdk + ref: jhilliard/multi-pp-testing + + - name: Install Kurtosis CDK tools + uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk + + - name: Install polycli + run: | + git clone https://github.com/0xPolygon/polygon-cli -b jhilliard/alonso + cd polygon-cli + make install + cp ~/go/bin/polycli /usr/local/bin/polycli + /usr/local/bin/polycli version + + - name: Setup Bats and bats libs + uses: bats-core/bats-action@2.0.0 + + - name: Download cdk archive + uses: actions/download-artifact@v4 + with: + name: cdk + path: /tmp + + - name: Load cdk image + run: | + docker load --input /tmp/cdk.tar + docker image ls -a + + - name: Run E2E tests + run: make test-e2e-fork12-multi-pessimistic + working-directory: test + env: + KURTOSIS_FOLDER: ${{ github.workspace }}/kurtosis-cdk + BATS_LIB_PATH: /usr/lib/ + AGGLAYER_PROVER_SP1_KEY: ${{ secrets.SP1_PRIVATE_KEY }} - name: Dump enclave logs if: failure() @@ -106,7 +183,7 @@ jobs: archive_name="dump_run_with_args_${{matrix.e2e-group}}_${{ github.run_id }}" echo "ARCHIVE_NAME=${archive_name}" >> "$GITHUB_ENV" echo "Generated archive name: ${archive_name}" - kurtosis service exec aggkit cdk-node-001 'cat /etc/cdk/cdk-node-config.toml' > ./dump/cdk-node-config.toml + kurtosis service exec cdk cdk-node-001 'cat /etc/cdk/cdk-node-config.toml' > ./dump/cdk-node-config.toml - name: Upload logs if: failure() diff --git a/.github/workflows/test-resequence.yml b/.github/workflows/test-resequence.yml index d65d9506..eb733cf2 100644 --- a/.github/workflows/test-resequence.yml +++ b/.github/workflows/test-resequence.yml @@ -11,16 +11,16 @@ concurrency: jobs: Resequence: - runs-on: ubuntu-latest + runs-on: amd-runner-2204 # TODO: Add "cdk-validium" once it's ready # strategy: # matrix: # da-mode: [ "rollup" ] steps: - - name: Checkout aggkit + - name: Checkout cdk uses: actions/checkout@v4 with: - path: aggkit + path: cdk - name: Checkout cdk-erigon uses: actions/checkout@v4 @@ -34,7 +34,7 @@ jobs: with: repository: 0xPolygon/kurtosis-cdk path: kurtosis-cdk - ref: v0.2.24 + ref: v0.2.25 - name: Install Kurtosis CDK tools uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk @@ -50,8 +50,8 @@ jobs: /usr/local/bin/polycli version - name: Build docker image - working-directory: ./aggkit - run: docker build -t aggkit:local --file Dockerfile . + working-directory: ./cdk + run: docker build -t cdk:local --file Dockerfile . - name: Remove unused flags working-directory: ./kurtosis-cdk @@ -62,7 +62,7 @@ jobs: - name: Configure Kurtosis CDK working-directory: ./kurtosis-cdk run: | - /usr/local/bin/yq -i '.args.cdk_node_image = "aggkit:local"' params.yml + /usr/local/bin/yq -i '.args.cdk_node_image = "cdk:local"' params.yml /usr/local/bin/yq -i '.args.zkevm_rollup_fork_id = "12"' params.yml /usr/local/bin/yq -i '.args.zkevm_prover_image = "hermeznetwork/zkevm-prover:v8.0.0-RC5-fork.12"' params.yml /usr/local/bin/yq -i '.args.cdk_erigon_node_image = "jerrycgh/cdk-erigon:d5d04906f723f3f1d8c43c9e6baf3e18c27ff348"' params.yml diff --git a/.github/workflows/test-unit.yml b/.github/workflows/test-unit.yml index 66cfc010..7db18445 100644 --- a/.github/workflows/test-unit.yml +++ b/.github/workflows/test-unit.yml @@ -16,7 +16,7 @@ jobs: matrix: go-version: [1.22.4] goarch: ["amd64"] - runs-on: ubuntu-latest + runs-on: amd-runner-2204 steps: - name: Checkout code uses: actions/checkout@v4 @@ -32,9 +32,11 @@ jobs: - name: Test run: make test-unit - - - name: Analyze with SonarCloud - uses: sonarsource/sonarcloud-github-action@master - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} \ No newline at end of file + + # TODO: Uncomment the following lines to enable SonarCloud analysis, once the project is set up in SonarCloud + # + # - name: Analyze with SonarCloud + # uses: sonarsource/sonarcloud-github-action@master + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/x86_deb_packager.yml b/.github/workflows/x86_deb_packager.yml index 13320570..584aad6d 100644 --- a/.github/workflows/x86_deb_packager.yml +++ b/.github/workflows/x86_deb_packager.yml @@ -16,7 +16,7 @@ jobs: permissions: id-token: write contents: write - runs-on: ubuntu-latest + runs-on: amd-runner-2204 steps: - name: Checkout uses: actions/checkout@v4 @@ -43,47 +43,47 @@ jobs: - name: Build the rust binary run: | BUILD_SCRIPT_DISABLED=1 - cargo build --release --bin aggkit-cli + cargo build --release --bin cdk - name: making directory structure - run: mkdir -p packaging/deb/aggkit/usr/bin/ + run: mkdir -p packaging/deb/cdk/usr/bin/ - name: copying necessary binary for amd64 - run: cp -rp target/aggkit packaging/deb/aggkit/usr/bin/aggkit + run: cp -rp target/cdk-node packaging/deb/cdk/usr/bin/cdk-node - name: copying rust binary for amd64 - run: cp -rp target/release/aggkit-cli packaging/deb/aggkit/usr/bin/aggkit-cli + run: cp -rp target/release/cdk packaging/deb/cdk/usr/bin/cdk # Control file creation - name: Create control file run: | - echo "Package: aggkit" >> packaging/deb/aggkit/DEBIAN/control - echo "Version: ${{ env.VERSION }}" >> packaging/deb/aggkit/DEBIAN/control - echo "Section: base" >> packaging/deb/aggkit/DEBIAN/control - echo "Priority: optional" >> packaging/deb/aggkit/DEBIAN/control - echo "Architecture: amd64" >> packaging/deb/aggkit/DEBIAN/control - echo "Maintainer: devops@polygon.technology" >> packaging/deb/aggkit/DEBIAN/control - echo "Description: aggkit binary package" >> packaging/deb/aggkit/DEBIAN/control + echo "Package: cdk" >> packaging/deb/cdk/DEBIAN/control + echo "Version: ${{ env.VERSION }}" >> packaging/deb/cdk/DEBIAN/control + echo "Section: base" >> packaging/deb/cdk/DEBIAN/control + echo "Priority: optional" >> packaging/deb/cdk/DEBIAN/control + echo "Architecture: amd64" >> packaging/deb/cdk/DEBIAN/control + echo "Maintainer: devops@polygon.technology" >> packaging/deb/cdk/DEBIAN/control + echo "Description: cdk binary package" >> packaging/deb/cdk/DEBIAN/control - - name: Creating package for binary for aggkit ${{ env.ARCH }} - run: cp -rp packaging/deb/aggkit packaging/deb/aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }} + - name: Creating package for binary for cdk ${{ env.ARCH }} + run: cp -rp packaging/deb/cdk packaging/deb/cdk-${{ env.GIT_TAG }}-${{ env.ARCH }} env: ARCH: amd64 - name: Running package build - run: dpkg-deb --build --root-owner-group packaging/deb/aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }} + run: dpkg-deb --build --root-owner-group packaging/deb/cdk-${{ env.GIT_TAG }}-${{ env.ARCH }} env: ARCH: amd64 - name: Create checksum for the amd64 package - run: cd packaging/deb/ && sha256sum aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb > aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb.checksum + run: cd packaging/deb/ && sha256sum cdk-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb > cdk-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb.checksum env: ARCH: amd64 - - name: Release aggkit Packages + - name: Release cdk Packages uses: softprops/action-gh-release@v2 with: tag_name: ${{ env.GIT_TAG }} prerelease: true files: | - packaging/deb/aggkit**.deb - packaging/deb/aggkit**.deb.checksum + packaging/deb/cdk**.deb + packaging/deb/cdk**.deb.checksum diff --git a/.github/workflows/x86_rpm_packager.yml b/.github/workflows/x86_rpm_packager.yml index 9f06fb64..d62772ba 100644 --- a/.github/workflows/x86_rpm_packager.yml +++ b/.github/workflows/x86_rpm_packager.yml @@ -15,7 +15,7 @@ jobs: permissions: id-token: write contents: write - runs-on: ubuntu-latest + runs-on: amd-runner-2204 steps: - name: Checkout uses: actions/checkout@v4 @@ -34,13 +34,13 @@ jobs: - name: Download deps for project run: go mod download - - name: Building cdk-node for amd64 + - name: Building aggkit for amd64 run: make build - - name: Building the cdk + - name: Building the aggkit run: | BUILD_SCRIPT_DISABLED=1 - cargo build --release --bin cdk + cargo build --release --bin aggkit - name: Installing some dependencies run: sudo apt-get update && sudo apt-get install -y rpm @@ -52,51 +52,51 @@ jobs: mkdir -p packaging/rpm/RPMS mkdir -p packaging/rpm/SRPMS - touch packaging/rpm/cdk.spec - echo "Name: cdk" >> packaging/rpm/SPECS/cdk.spec - echo "Version: ${{ env.GIT_TAG1 }}" >> packaging/rpm/SPECS/cdk.spec - echo "Release: 1%{?dist}" >> packaging/rpm/SPECS/cdk.spec - echo "License: GPL/AGPL" >> packaging/rpm/SPECS/cdk.spec - echo "BuildArch: x86_64" >> packaging/rpm/SPECS/cdk.spec - echo "Summary: cdk rpm package" >> packaging/rpm/SPECS/cdk.spec + touch packaging/rpm/aggkit.spec + echo "Name: aggkit" >> packaging/rpm/SPECS/aggkit.spec + echo "Version: ${{ env.GIT_TAG1 }}" >> packaging/rpm/SPECS/aggkit.spec + echo "Release: 1%{?dist}" >> packaging/rpm/SPECS/aggkit.spec + echo "License: GPL/AGPL" >> packaging/rpm/SPECS/aggkit.spec + echo "BuildArch: x86_64" >> packaging/rpm/SPECS/aggkit.spec + echo "Summary: aggkit rpm package" >> packaging/rpm/SPECS/aggkit.spec - echo "%description" >> packaging/rpm/SPECS/cdk.spec - echo "cdk rpm package" >> packaging/rpm/SPECS/cdk.spec + echo "%description" >> packaging/rpm/SPECS/aggkit.spec + echo "aggkit rpm package" >> packaging/rpm/SPECS/aggkit.spec - echo "%pre" >> packaging/rpm/SPECS/cdk.spec - echo "getent group cdk >/dev/null || groupadd -r cdk" >> packaging/rpm/SPECS/cdk.spec - echo "getent passwd cdk >/dev/null || useradd -s /bin/false -d /opt/cdk -r cdk -g cdk" >> packaging/rpm/SPECS/cdk.spec + echo "%pre" >> packaging/rpm/SPECS/aggkit.spec + echo "getent group aggkit >/dev/null || groupadd -r aggkit" >> packaging/rpm/SPECS/aggkit.spec + echo "getent passwd aggkit >/dev/null || useradd -s /bin/false -d /opt/aggkit -r aggkit -g aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "%install" >> packaging/rpm/SPECS/cdk.spec - echo "mkdir -p %{buildroot}/usr/bin" >> packaging/rpm/SPECS/cdk.spec - echo "cp /home/runner/work/cdk/cdk/target/cdk-node %{buildroot}/usr/bin/cdk-node" >> packaging/rpm/SPECS/cdk.spec - echo "cp /home/runner/work/cdk/cdk/target/release/cdk %{buildroot}/usr/bin/cdk" >> packaging/rpm/SPECS/cdk.spec + echo "%install" >> packaging/rpm/SPECS/aggkit.spec + echo "mkdir -p %{buildroot}/usr/bin" >> packaging/rpm/SPECS/aggkit.spec + echo "cp /home/runner/work/aggkit/aggkit/target/aggkit-node %{buildroot}/usr/bin/aggkit-node" >> packaging/rpm/SPECS/aggkit.spec + echo "cp /home/runner/work/aggkit/aggkit/target/release/aggkit %{buildroot}/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "%files" >> packaging/rpm/SPECS/cdk.spec - echo "/usr/bin/cdk" >> packaging/rpm/SPECS/cdk.spec - echo "/usr/bin/cdk-node" >> packaging/rpm/SPECS/cdk.spec + echo "%files" >> packaging/rpm/SPECS/aggkit.spec + echo "/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec + echo "/usr/bin/aggkit-node" >> packaging/rpm/SPECS/aggkit.spec - name: Construct rpm package run: | - rpmbuild --define "_topdir /home/runner/work/cdk/cdk/packaging/rpm_build" \ + rpmbuild --define "_topdir /home/runner/work/aggkit/aggkit/packaging/rpm_build" \ --define "_builddir %{_topdir}/BUILD" \ --define "_rpmdir %{_topdir}/RPMS" \ --define "_srcrpmdir %{_topdir}/SRPMS" \ --define "__spec_install_post /bin/true" \ - -bb packaging/rpm/SPECS/cdk.spec + -bb packaging/rpm/SPECS/aggkit.spec - name: Rename file for post rpm build and for checksum - run: mv /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/x86_64/cdk-${{ env.GIT_TAG1 }}-1.x86_64.rpm /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/x86_64/cdk-${{ env.GIT_TAG1 }}.x86_64.rpm + run: mv /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/x86_64/aggkit-${{ env.GIT_TAG1 }}-1.x86_64.rpm /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/x86_64/aggkit-${{ env.GIT_TAG1 }}.x86_64.rpm - name: Checksum for the rpm package - run: sha256sum /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/x86_64/cdk-${{ env.GIT_TAG1 }}.x86_64.rpm > /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/x86_64/cdk-${{ env.GIT_TAG1 }}.x86_64.rpm.checksum + run: sha256sum /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/x86_64/aggkit-${{ env.GIT_TAG1 }}.x86_64.rpm > /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/x86_64/aggkit-${{ env.GIT_TAG1 }}.x86_64.rpm.checksum - - name: Release cdk Packages + - name: Release aggkit Packages uses: softprops/action-gh-release@v2 with: tag_name: ${{ env.GIT_TAG }} prerelease: true files: | - packaging/rpm_build/RPMS/x86_64/cdk-**.rpm - packaging/rpm_build/RPMS/x86_64/cdk-**.rpm.checksum + packaging/rpm_build/RPMS/x86_64/aggkit-**.rpm + packaging/rpm_build/RPMS/x86_64/aggkit-**.rpm.checksum diff --git a/.gitignore b/.gitignore index ce4e0058..5f16e099 100644 --- a/.gitignore +++ b/.gitignore @@ -13,5 +13,5 @@ coverage.out coverage.html .idea .idea/* - +build/* data diff --git a/.golangci.yml b/.golangci.yml index 00f17235..602f8d52 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -76,7 +76,6 @@ issues: - dupl exclude-dirs: - tests - - aggregator/db/migrations include: - EXC0012 # Exported (.+) should have comment( \(or a comment on this block\))? or be unexported - EXC0013 # Package comment should be of the form "(.+)... diff --git a/Dockerfile b/Dockerfile index 45fbf18d..3ac06bde 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,7 +24,7 @@ COPY --link crates crates COPY --link Cargo.toml Cargo.toml COPY --link Cargo.lock Cargo.lock -RUN cargo chef prepare --recipe-path recipe.json --bin aggkit +RUN cargo chef prepare --recipe-path recipe.json --bin cdk FROM chef AS builder @@ -37,15 +37,15 @@ COPY --link Cargo.toml Cargo.toml COPY --link Cargo.lock Cargo.lock ENV BUILD_SCRIPT_DISABLED=1 -RUN cargo build --release --bin aggkit +RUN cargo build --release --bin cdk # CONTAINER FOR RUNNING BINARY FROM --platform=${BUILDPLATFORM} debian:bookworm-slim RUN apt-get update && apt-get install -y ca-certificates sqlite3 procps libssl-dev && rm -rf /var/lib/apt/lists/* -COPY --from=builder /app/target/release/aggkit /usr/local/bin/ -COPY --from=build /go/src/github.com/agglayer/aggkit/target/aggkit /usr/local/bin/ +COPY --from=builder /app/target/release/cdk /usr/local/bin/ +COPY --from=build /go/src/github.com/agglayer/aggkit/target/cdk-node /usr/local/bin/ EXPOSE 5576/tcp -CMD ["/bin/sh", "-c", "aggkit"] +CMD ["/bin/sh", "-c", "cdk"] diff --git a/Makefile b/Makefile index 1c1a3bc2..a0e14560 100644 --- a/Makefile +++ b/Makefile @@ -12,7 +12,7 @@ endif GOBASE := $(shell pwd) GOBIN := $(GOBASE)/target GOENVVARS := GOBIN=$(GOBIN) CGO_ENABLED=1 GOARCH=$(ARCH) -GOBINARY := aggkit +GOBINARY := cdk-node GOCMD := $(GOBASE)/cmd LDFLAGS += -X 'github.com/agglayer/aggkit.Version=$(VERSION)' @@ -55,12 +55,12 @@ stop: check-docker check-docker-compose install-linter: check-go check-curl generate-code-from-proto: check-protoc -.PHONY: build -build: build-rust build-go build-tools## Builds the binaries locally into ./target +.PHONY: build ## Builds the binaries locally into ./target +build: build-rust build-go build-tools .PHONY: build-rust build-rust: - cargo build --release --jobs $(shell nproc) + export BUILD_SCRIPT_DISABLED=1 && cargo build --release .PHONY: build-go build-go: @@ -71,12 +71,12 @@ build-tools: ## Builds the tools $(GOENVVARS) go build -o $(GOBIN)/aggsender_find_imported_bridge ./tools/aggsender_find_imported_bridge .PHONY: build-docker -build-docker: ## Builds a docker image with the aggkit binary - docker build -t aggkit -f ./Dockerfile . +build-docker: ## Builds a docker image with the cdk binary + docker build -t cdk -f ./Dockerfile . .PHONY: build-docker-nc -build-docker-nc: ## Builds a docker image with the aggkit binary - but without build cache - docker build --no-cache=true -t aggkit -f ./Dockerfile . +build-docker-nc: ## Builds a docker image with the cdk binary - but without build cache + docker build --no-cache=true -t cdk -f ./Dockerfile . .PHONY: stop stop: ## Stops all services @@ -86,17 +86,12 @@ stop: ## Stops all services test-unit: trap '$(STOP)' EXIT; MallocNanoZone=0 go test -count=1 -short -race -p 1 -covermode=atomic -coverprofile=coverage.out -coverpkg ./... -timeout 15m ./... -.PHONY: test-seq_sender -test-seq_sender: - trap '$(STOP)' EXIT; MallocNanoZone=0 go test -count=1 -short -race -p 1 -covermode=atomic -coverprofile=../coverage.out -timeout 200s ./sequencesender/... - .PHONY: lint lint: ## Runs the linter export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/golangci-lint run --timeout 5m .PHONY: generate-code-from-proto generate-code-from-proto: ## Generates code from proto files - cd proto/src/proto/aggregator/v1 && protoc --proto_path=. --proto_path=../../../../include --go_out=../../../../../aggregator/prover --go-grpc_out=../../../../../aggregator/prover --go-grpc_opt=paths=source_relative --go_opt=paths=source_relative aggregator.proto cd proto/src/proto/datastream/v1 && protoc --proto_path=. --proto_path=../../../../include --go_out=../../../../../state/datastream --go-grpc_out=../../../../../state/datastream --go-grpc_opt=paths=source_relative --go_opt=paths=source_relative datastream.proto diff --git a/agglayer/mock_agglayer_client.go b/agglayer/mock_agglayer_client.go index 8b8c8689..6c5a3fbf 100644 --- a/agglayer/mock_agglayer_client.go +++ b/agglayer/mock_agglayer_client.go @@ -81,7 +81,7 @@ func (_c *AgglayerClientMock_GetCertificateHeader_Call) RunAndReturn(run func(co return _c } -// GetEpochConfiguration provides a mock function with no fields +// GetEpochConfiguration provides a mock function with given fields: func (_m *AgglayerClientMock) GetEpochConfiguration() (*ClockConfiguration, error) { ret := _m.Called() diff --git a/aggoracle/chaingersender/evm.go b/aggoracle/chaingersender/evm.go index fd3100c0..98ee9104 100644 --- a/aggoracle/chaingersender/evm.go +++ b/aggoracle/chaingersender/evm.go @@ -9,7 +9,6 @@ import ( "github.com/0xPolygon/cdk-contracts-tooling/contracts/l2-sovereign-chain/globalexitrootmanagerl2sovereignchain" "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" ethtxtypes "github.com/0xPolygon/zkevm-ethtx-manager/types" - aggkitcommon "github.com/agglayer/aggkit/common" cfgtypes "github.com/agglayer/aggkit/config/types" "github.com/agglayer/aggkit/log" "github.com/ethereum/go-ethereum" @@ -43,7 +42,7 @@ type EthTxManager interface { ) (common.Hash, error) } -type L2GERManager interface { +type L2GERManagerContract interface { GlobalExitRootMap(opts *bind.CallOpts, ger [common.HashLength]byte) (*big.Int, error) } @@ -59,7 +58,7 @@ type EVMConfig struct { type EVMChainGERSender struct { logger *log.Logger - l2GERManager L2GERManager + l2GERManager L2GERManagerContract l2GERManagerAddr common.Address l2GERManagerAbi *abi.ABI @@ -99,12 +98,12 @@ func NewEVMChainGERSender( } func (c *EVMChainGERSender) IsGERInjected(ger common.Hash) (bool, error) { - blockHashBigInt, err := c.l2GERManager.GlobalExitRootMap(&bind.CallOpts{Pending: false}, ger) + gerIndex, err := c.l2GERManager.GlobalExitRootMap(&bind.CallOpts{Pending: false}, ger) if err != nil { return false, fmt.Errorf("failed to check if global exit root is injected %s: %w", ger, err) } - return common.BigToHash(blockHashBigInt) != aggkitcommon.ZeroHash, nil + return gerIndex.Cmp(common.Big0) == 1, nil } func (c *EVMChainGERSender) InjectGER(ctx context.Context, ger common.Hash) error { diff --git a/aggregator/aggregator.go b/aggregator/aggregator.go deleted file mode 100644 index 37017216..00000000 --- a/aggregator/aggregator.go +++ /dev/null @@ -1,1665 +0,0 @@ -package aggregator - -import ( - "context" - "crypto/ecdsa" - "encoding/json" - "errors" - "fmt" - "math" - "math/big" - "net" - "strings" - "sync" - "sync/atomic" - "time" - "unicode" - - aggkittypes "github.com/0xPolygon/cdk-rpc/types" - "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" - ethtxlog "github.com/0xPolygon/zkevm-ethtx-manager/log" - ethtxtypes "github.com/0xPolygon/zkevm-ethtx-manager/types" - synclog "github.com/0xPolygonHermez/zkevm-synchronizer-l1/log" - "github.com/0xPolygonHermez/zkevm-synchronizer-l1/state/entities" - "github.com/0xPolygonHermez/zkevm-synchronizer-l1/synchronizer" - "github.com/0xPolygonHermez/zkevm-synchronizer-l1/synchronizer/l1_check_block" - "github.com/agglayer/aggkit/agglayer" - "github.com/agglayer/aggkit/aggregator/db/dbstorage" - ethmanTypes "github.com/agglayer/aggkit/aggregator/ethmantypes" - "github.com/agglayer/aggkit/aggregator/prover" - aggkitcommon "github.com/agglayer/aggkit/common" - "github.com/agglayer/aggkit/config/types" - "github.com/agglayer/aggkit/l1infotree" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/rpc" - "github.com/agglayer/aggkit/state" - "github.com/ethereum/go-ethereum/common" - "go.uber.org/zap/zapcore" - "google.golang.org/grpc" - grpchealth "google.golang.org/grpc/health/grpc_health_v1" - "google.golang.org/grpc/peer" -) - -const ( - mockedStateRoot = "0x090bcaf734c4f06c93954a827b45a6e8c67b8e0fd1e0a35a1c5982d6961828f9" - mockedLocalExitRoot = "0x17c04c3760510b48c6012742c540a81aba4bca2f78b9d14bfd2f123e2e53ea3e" - maxDBBigIntValue = 9223372036854775807 -) - -type finalProofMsg struct { - proverName string - proverID string - recursiveProof *state.Proof - finalProof *prover.FinalProof -} - -// Aggregator represents an aggregator -type Aggregator struct { - prover.UnimplementedAggregatorServiceServer - - cfg Config - logger *log.Logger - - storage StorageInterface - etherman Etherman - ethTxManager EthTxManagerClient - l1Syncr synchronizer.Synchronizer - halted atomic.Bool - accInputHashes map[uint64]common.Hash - accInputHashesMutex *sync.Mutex - - timeSendFinalProof time.Time - timeCleanupLockedProofs types.Duration - storageMutex *sync.Mutex - timeSendFinalProofMutex *sync.RWMutex - - finalProof chan finalProofMsg - verifyingProof bool - - witnessRetrievalChan chan state.DBBatch - - srv *grpc.Server - ctx context.Context - exit context.CancelFunc - - sequencerPrivateKey *ecdsa.PrivateKey - aggLayerClient agglayer.AgglayerClientInterface - - rpcClient RPCInterface -} - -// New creates a new aggregator. -func New( - ctx context.Context, - cfg Config, - logger *log.Logger, - etherman Etherman) (*Aggregator, error) { - // Create ethtxmanager client - cfg.EthTxManager.Log = ethtxlog.Config{ - Environment: ethtxlog.LogEnvironment(cfg.Log.Environment), - Level: cfg.Log.Level, - Outputs: cfg.Log.Outputs, - } - ethTxManager, err := ethtxmanager.New(cfg.EthTxManager) - if err != nil { - logger.Fatalf("error creating ethtxmanager client: %v", err) - } - - // Synchonizer logs - syncLogConfig := synclog.Config{ - Environment: synclog.LogEnvironment(cfg.Log.Environment), - Level: cfg.Log.Level, - Outputs: cfg.Log.Outputs, - } - - cfg.Synchronizer.Log = syncLogConfig - - // Create L1 synchronizer client - cfg.Synchronizer.Etherman.L1URL = cfg.EthTxManager.Etherman.URL - logger.Debugf("Creating synchronizer client with config: %+v", cfg.Synchronizer) - l1Syncr, err := synchronizer.NewSynchronizer(ctx, cfg.Synchronizer) - if err != nil { - logger.Fatalf("failed to create synchronizer client, error: %v", err) - } - - var ( - aggLayerClient agglayer.AgglayerClientInterface - sequencerPrivateKey *ecdsa.PrivateKey - ) - - if !cfg.SyncModeOnlyEnabled && cfg.SettlementBackend == AggLayer { - aggLayerClient = agglayer.NewAggLayerClient(cfg.AggLayerURL) - - sequencerPrivateKey, err = aggkitcommon.NewKeyFromKeystore(cfg.SequencerPrivateKey) - if err != nil { - return nil, err - } - } - - storage, err := dbstorage.NewDBStorage(cfg.DBPath) - if err != nil { - return nil, err - } - - a := &Aggregator{ - ctx: ctx, - cfg: cfg, - logger: logger, - storage: storage, - etherman: etherman, - ethTxManager: ethTxManager, - l1Syncr: l1Syncr, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - storageMutex: &sync.Mutex{}, - timeSendFinalProofMutex: &sync.RWMutex{}, - timeCleanupLockedProofs: cfg.CleanupLockedProofsInterval, - finalProof: make(chan finalProofMsg), - aggLayerClient: aggLayerClient, - sequencerPrivateKey: sequencerPrivateKey, - witnessRetrievalChan: make(chan state.DBBatch), - rpcClient: rpc.NewBatchEndpoints(cfg.RPCURL), - } - - if a.ctx == nil { - a.ctx, a.exit = context.WithCancel(a.ctx) - } - - // Set function to handle events on L1 - if !cfg.SyncModeOnlyEnabled { - a.l1Syncr.SetCallbackOnReorgDone(a.handleReorg) - a.l1Syncr.SetCallbackOnRollbackBatches(a.handleRollbackBatches) - } - - return a, nil -} - -func (a *Aggregator) getAccInputHash(batchNumber uint64) common.Hash { - a.accInputHashesMutex.Lock() - defer a.accInputHashesMutex.Unlock() - return a.accInputHashes[batchNumber] -} - -func (a *Aggregator) setAccInputHash(batchNumber uint64, accInputHash common.Hash) { - a.accInputHashesMutex.Lock() - defer a.accInputHashesMutex.Unlock() - a.accInputHashes[batchNumber] = accInputHash -} - -func (a *Aggregator) removeAccInputHashes(firstBatch, lastBatch uint64) { - a.accInputHashesMutex.Lock() - defer a.accInputHashesMutex.Unlock() - for i := firstBatch; i <= lastBatch; i++ { - delete(a.accInputHashes, i) - } -} - -func (a *Aggregator) handleReorg(reorgData synchronizer.ReorgExecutionResult) { - a.logger.Warnf("Reorg detected, reorgData: %+v", reorgData) - - // Get new latest verified batch number - lastVBatchNumber, err := a.l1Syncr.GetLastestVirtualBatchNumber(a.ctx) - if err != nil { - a.logger.Errorf("Error getting last virtual batch number: %v", err) - } else { - // Delete wip proofs - err = a.storage.DeleteUngeneratedProofs(a.ctx, nil) - if err != nil { - a.logger.Errorf("Error deleting ungenerated proofs: %v", err) - } else { - a.logger.Info("Deleted ungenerated proofs") - } - - // Delete any proof for the batches that have been rolled back - err = a.storage.DeleteGeneratedProofs(a.ctx, lastVBatchNumber+1, maxDBBigIntValue, nil) - if err != nil { - a.logger.Errorf("Error deleting generated proofs: %v", err) - } else { - a.logger.Infof("Deleted generated proofs for batches newer than %d", lastVBatchNumber) - } - } - - // Halt the aggregator - a.halted.Store(true) - for { - a.logger.Warnf( - "Halting the aggregator due to a L1 reorg. " + - "Reorged data has been deleted, so it is safe to manually restart the aggregator.", - ) - time.Sleep(10 * time.Second) //nolint:mnd - } -} - -func (a *Aggregator) handleRollbackBatches(rollbackData synchronizer.RollbackBatchesData) { - a.logger.Warnf("Rollback batches event, rollbackBatchesData: %+v", rollbackData) - - var err error - var accInputHash *common.Hash - - // Get new last verified batch number from L1 - lastVerifiedBatchNumber, err := a.etherman.GetLatestVerifiedBatchNum() - if err != nil { - a.logger.Errorf("Error getting latest verified batch number: %v", err) - } - - a.logger.Infof("Last Verified Batch Number:%v", lastVerifiedBatchNumber) - - // Check lastVerifiedBatchNumber makes sense - if err == nil && lastVerifiedBatchNumber > rollbackData.LastBatchNumber { - err = fmt.Errorf( - "last verified batch number %d is greater than the last batch number %d in the rollback data", - lastVerifiedBatchNumber, rollbackData.LastBatchNumber, - ) - } - - if err == nil { - accInputHash, err = a.getVerifiedBatchAccInputHash(a.ctx, lastVerifiedBatchNumber) - if err == nil { - a.accInputHashesMutex.Lock() - a.accInputHashes = make(map[uint64]common.Hash) - a.accInputHashesMutex.Unlock() - a.logger.Infof("Starting AccInputHash:%v", accInputHash.String()) - a.setAccInputHash(lastVerifiedBatchNumber, *accInputHash) - } - } - - // Delete wip proofs - if err == nil { - err = a.storage.DeleteUngeneratedProofs(a.ctx, nil) - if err != nil { - a.logger.Errorf("Error deleting ungenerated proofs: %v", err) - } else { - a.logger.Info("Deleted ungenerated proofs") - } - } - - // Delete any proof for the batches that have been rolled back - if err == nil { - err = a.storage.DeleteGeneratedProofs(a.ctx, rollbackData.LastBatchNumber+1, maxDBBigIntValue, nil) - if err != nil { - a.logger.Errorf("Error deleting generated proofs: %v", err) - } else { - a.logger.Infof("Deleted generated proofs for batches newer than %d", rollbackData.LastBatchNumber) - } - } - - if err == nil { - a.logger.Info("Handling rollback batches event finished successfully") - } else { - // Halt the aggregator - a.halted.Store(true) - for { - a.logger.Errorf("Halting the aggregator due to an error handling rollback batches event: %v", err) - time.Sleep(10 * time.Second) //nolint:mnd - } - } -} - -// Start starts the aggregator -func (a *Aggregator) Start() error { - // Initial L1 Sync blocking - err := a.l1Syncr.Sync(true) - if err != nil { - a.logger.Fatalf("Failed to synchronize from L1: %v", err) - return err - } - - // Keep syncing L1 - go func() { - err := a.l1Syncr.Sync(false) - if err != nil { - a.logger.Fatalf("Failed to synchronize from L1: %v", err) - } - }() - - if !a.cfg.SyncModeOnlyEnabled { - address := fmt.Sprintf("%s:%d", a.cfg.Host, a.cfg.Port) - lis, err := net.Listen("tcp", address) - if err != nil { - a.logger.Fatalf("Failed to listen: %v", err) - } - - a.srv = grpc.NewServer() - prover.RegisterAggregatorServiceServer(a.srv, a) - - healthService := newHealthChecker() - grpchealth.RegisterHealthServer(a.srv, healthService) - - // Get last verified batch number to set the starting point for verifications - lastVerifiedBatchNumber, err := a.etherman.GetLatestVerifiedBatchNum() - if err != nil { - return err - } - - a.logger.Infof("Last Verified Batch Number:%v", lastVerifiedBatchNumber) - - accInputHash, err := a.getVerifiedBatchAccInputHash(a.ctx, lastVerifiedBatchNumber) - if err != nil { - return err - } - - a.logger.Infof("Starting AccInputHash:%v", accInputHash.String()) - a.setAccInputHash(lastVerifiedBatchNumber, *accInputHash) - - // Delete existing proofs - err = a.storage.DeleteGeneratedProofs(a.ctx, lastVerifiedBatchNumber, maxDBBigIntValue, nil) - if err != nil { - return fmt.Errorf("failed to delete proofs table %w", err) - } - - a.resetVerifyProofTime() - - go a.cleanupLockedProofs() - go a.sendFinalProof() - go a.ethTxManager.Start() - - // A this point everything is ready, so start serving - go func() { - a.logger.Infof("Server listening on port %d", a.cfg.Port) - if err := a.srv.Serve(lis); err != nil { - a.exit() - a.logger.Fatalf("Failed to serve: %v", err) - } - }() - } - - <-a.ctx.Done() - - return a.ctx.Err() -} - -// Stop stops the Aggregator server. -func (a *Aggregator) Stop() { - a.exit() - a.srv.Stop() -} - -// Channel implements the bi-directional communication channel between the -// Prover client and the Aggregator server. -func (a *Aggregator) Channel(stream prover.AggregatorService_ChannelServer) error { - ctx := stream.Context() - var proverAddr net.Addr - p, ok := peer.FromContext(ctx) - if ok { - proverAddr = p.Addr - } - proverLogger := log.WithFields("module", aggkitcommon.PROVER) - prover, err := prover.New(proverLogger, stream, proverAddr, a.cfg.ProofStatePollingInterval) - if err != nil { - return err - } - - tmpLogger := proverLogger.WithFields( - "prover", prover.Name(), - "proverId", prover.ID(), - "proverAddr", prover.Addr(), - ) - tmpLogger.Info("Establishing stream connection with prover") - - // Check if prover supports the required Fork ID - if !prover.SupportsForkID(a.cfg.ForkId) { - err := errors.New("prover does not support required fork ID") - tmpLogger.Warn(FirstToUpper(err.Error())) - - return err - } - - for { - select { - case <-a.ctx.Done(): - // server disconnected - return a.ctx.Err() - case <-ctx.Done(): - // client disconnected - return ctx.Err() - - default: - if !a.halted.Load() { - isIdle, err := prover.IsIdle() - if err != nil { - tmpLogger.Errorf("Failed to check if prover is idle: %v", err) - time.Sleep(a.cfg.RetryTime.Duration) - - continue - } - if !isIdle { - tmpLogger.Debug("Prover is not idle") - time.Sleep(a.cfg.RetryTime.Duration) - - continue - } - - _, err = a.tryBuildFinalProof(ctx, prover, nil) - if err != nil { - tmpLogger.Errorf("Error checking proofs to verify: %v", err) - } - - proofGenerated, err := a.tryAggregateProofs(ctx, prover) - if err != nil { - tmpLogger.Errorf("Error trying to aggregate proofs: %v", err) - } - - if !proofGenerated { - proofGenerated, err = a.tryGenerateBatchProof(ctx, prover) - if err != nil { - tmpLogger.Errorf("Error trying to generate proof: %v", err) - } - } - if !proofGenerated { - // if no proof was generated (aggregated or batch) wait some time before retry - time.Sleep(a.cfg.RetryTime.Duration) - } // if proof was generated we retry immediately as probably we have more proofs to process - } - } - } -} - -// This function waits to receive a final proof from a prover. Once it receives -// the proof, it performs these steps in order: -// - send the final proof to L1 -// - wait for the synchronizer to catch up -// - clean up the cache of recursive proofs -func (a *Aggregator) sendFinalProof() { - for { - select { - case <-a.ctx.Done(): - return - case msg := <-a.finalProof: - ctx := a.ctx - proof := msg.recursiveProof - - tmpLogger := a.logger.WithFields( - "proofId", proof.ProofID, - "batches", fmt.Sprintf("%d-%d", proof.BatchNumber, proof.BatchNumberFinal)) - tmpLogger.Info("Verifying final proof with ethereum smart contract") - - a.startProofVerification() - - // Get Batch from RPC - rpcFinalBatch, err := a.rpcClient.GetBatch(proof.BatchNumberFinal) - if err != nil { - a.logger.Errorf("error getting batch %d from RPC: %v.", proof.BatchNumberFinal, err) - a.endProofVerification() - continue - } - - inputs := ethmanTypes.FinalProofInputs{ - FinalProof: msg.finalProof, - NewLocalExitRoot: rpcFinalBatch.LocalExitRoot().Bytes(), - NewStateRoot: rpcFinalBatch.StateRoot().Bytes(), - } - - switch a.cfg.SettlementBackend { - case AggLayer: - if success := a.settleWithAggLayer(ctx, proof, inputs); !success { - continue - } - default: - if success := a.settleDirect(ctx, proof, inputs); !success { - continue - } - } - - a.resetVerifyProofTime() - a.endProofVerification() - } - } -} - -func (a *Aggregator) settleWithAggLayer( - ctx context.Context, - proof *state.Proof, - inputs ethmanTypes.FinalProofInputs) bool { - proofStrNo0x := strings.TrimPrefix(inputs.FinalProof.Proof, "0x") - proofBytes := common.Hex2Bytes(proofStrNo0x) - tx := agglayer.Tx{ - LastVerifiedBatch: aggkittypes.ArgUint64(proof.BatchNumber - 1), - NewVerifiedBatch: aggkittypes.ArgUint64(proof.BatchNumberFinal), - ZKP: agglayer.ZKP{ - NewStateRoot: common.BytesToHash(inputs.NewStateRoot), - NewLocalExitRoot: common.BytesToHash(inputs.NewLocalExitRoot), - Proof: aggkittypes.ArgBytes(proofBytes), - }, - RollupID: a.etherman.GetRollupId(), - } - signedTx, err := tx.Sign(a.sequencerPrivateKey) - if err != nil { - a.logger.Errorf("failed to sign tx: %v", err) - a.handleFailureToAddVerifyBatchToBeMonitored(ctx, proof) - - return false - } - - a.logger.Debug("final proof: %+v", tx) - a.logger.Debug("final proof signedTx: ", signedTx.Tx.ZKP.Proof.Hex()) - txHash, err := a.aggLayerClient.SendTx(*signedTx) - if err != nil { - if errors.Is(err, agglayer.ErrAgglayerRateLimitExceeded) { - a.logger.Errorf("%s. Config param VerifyProofInterval should match the agglayer configured rate limit.", err) - } else { - a.logger.Errorf("failed to send tx to the agglayer: %v", err) - } - a.handleFailureToAddVerifyBatchToBeMonitored(ctx, proof) - return false - } - - a.logger.Infof("tx %s sent to agglayer, waiting to be mined", txHash.Hex()) - a.logger.Debugf("Timeout set to %f seconds", a.cfg.AggLayerTxTimeout.Duration.Seconds()) - waitCtx, cancelFunc := context.WithDeadline(ctx, time.Now().Add(a.cfg.AggLayerTxTimeout.Duration)) - defer cancelFunc() - if err := a.aggLayerClient.WaitTxToBeMined(txHash, waitCtx); err != nil { - a.logger.Errorf("agglayer didn't mine the tx: %v", err) - a.handleFailureToAddVerifyBatchToBeMonitored(ctx, proof) - - return false - } - - return true -} - -// settleDirect sends the final proof to the L1 smart contract directly. -func (a *Aggregator) settleDirect( - ctx context.Context, - proof *state.Proof, - inputs ethmanTypes.FinalProofInputs) bool { - // add batch verification to be monitored - sender := common.HexToAddress(a.cfg.SenderAddress) - to, data, err := a.etherman.BuildTrustedVerifyBatchesTxData( - proof.BatchNumber-1, proof.BatchNumberFinal, &inputs, sender, - ) - if err != nil { - a.logger.Errorf("Error estimating batch verification to add to eth tx manager: %v", err) - a.handleFailureToAddVerifyBatchToBeMonitored(ctx, proof) - - return false - } - - monitoredTxID, err := a.ethTxManager.Add(ctx, to, big.NewInt(0), data, a.cfg.GasOffset, nil) - if err != nil { - a.logger.Errorf("Error Adding TX to ethTxManager: %v", err) - mTxLogger := ethtxmanager.CreateLogger(monitoredTxID, sender, to) - mTxLogger.Errorf("Error to add batch verification tx to eth tx manager: %v", err) - a.handleFailureToAddVerifyBatchToBeMonitored(ctx, proof) - - return false - } - - // process monitored batch verifications before starting a next cycle - a.ethTxManager.ProcessPendingMonitoredTxs(ctx, func(result ethtxtypes.MonitoredTxResult) { - a.handleMonitoredTxResult(result, proof.BatchNumber, proof.BatchNumberFinal) - }) - - return true -} - -func (a *Aggregator) handleFailureToAddVerifyBatchToBeMonitored(ctx context.Context, proof *state.Proof) { - tmpLogger := a.logger.WithFields( - "proofId", proof.ProofID, - "batches", fmt.Sprintf("%d-%d", proof.BatchNumber, proof.BatchNumberFinal), - ) - proof.GeneratingSince = nil - err := a.storage.UpdateGeneratedProof(ctx, proof, nil) - if err != nil { - tmpLogger.Errorf("Failed updating proof state (false): %v", err) - } - a.endProofVerification() -} - -// buildFinalProof builds and return the final proof for an aggregated/batch proof. -func (a *Aggregator) buildFinalProof( - ctx context.Context, prover ProverInterface, proof *state.Proof) (*prover.FinalProof, error) { - tmpLogger := a.logger.WithFields( - "prover", prover.Name(), - "proverId", prover.ID(), - "proverAddr", prover.Addr(), - "recursiveProofId", *proof.ProofID, - "batches", fmt.Sprintf("%d-%d", proof.BatchNumber, proof.BatchNumberFinal), - ) - - finalProofID, err := prover.FinalProof(proof.Proof, a.cfg.SenderAddress) - if err != nil { - return nil, fmt.Errorf("failed to get final proof id: %w", err) - } - proof.ProofID = finalProofID - - tmpLogger.Infof("Final proof ID for batches [%d-%d]: %s", proof.BatchNumber, proof.BatchNumberFinal, *proof.ProofID) - tmpLogger = tmpLogger.WithFields("finalProofId", finalProofID) - - finalProof, err := prover.WaitFinalProof(ctx, *proof.ProofID) - if err != nil { - return nil, fmt.Errorf("failed to get final proof from prover: %w", err) - } - - // mock prover sanity check - if string(finalProof.Public.NewStateRoot) == mockedStateRoot && - string(finalProof.Public.NewLocalExitRoot) == mockedLocalExitRoot { - // This local exit root and state root come from the mock - // prover, use the one captured by the executor instead - rpcFinalBatch, err := a.rpcClient.GetBatch(proof.BatchNumberFinal) - if err != nil { - return nil, fmt.Errorf("error getting batch %d from RPC: %w", proof.BatchNumberFinal, err) - } - - tmpLogger.Warnf( - "NewLocalExitRoot and NewStateRoot look like a mock values, using values from executor instead: LER: %v, SR: %v", - rpcFinalBatch.LocalExitRoot().TerminalString(), rpcFinalBatch.StateRoot().TerminalString()) - finalProof.Public.NewStateRoot = rpcFinalBatch.StateRoot().Bytes() - finalProof.Public.NewLocalExitRoot = rpcFinalBatch.LocalExitRoot().Bytes() - } - - return finalProof, nil -} - -// tryBuildFinalProof checks if the provided proof is eligible to be used to -// build the final proof. If no proof is provided it looks for a previously -// generated proof. If the proof is eligible, then the final proof generation -// is triggered. -func (a *Aggregator) tryBuildFinalProof(ctx context.Context, prover ProverInterface, proof *state.Proof) (bool, error) { - proverName := prover.Name() - proverID := prover.ID() - - tmpLogger := a.logger.WithFields( - "prover", proverName, - "proverId", proverID, - "proverAddr", prover.Addr(), - ) - tmpLogger.Debug("tryBuildFinalProof start") - - if !a.canVerifyProof() { - tmpLogger.Debug("Time to verify proof not reached or proof verification in progress") - return false, nil - } - tmpLogger.Debug("Send final proof time reached") - - lastVerifiedBatchNumber, err := a.etherman.GetLatestVerifiedBatchNum() - if err != nil { - return false, err - } - - if proof == nil { - // we don't have a proof generating at the moment, check if we - // have a proof ready to verify - proof, err = a.getAndLockProofReadyToVerify(ctx, lastVerifiedBatchNumber) - if errors.Is(err, state.ErrNotFound) { - // nothing to verify, swallow the error - tmpLogger.Debug("No proof ready to verify") - return false, nil - } - if err != nil { - return false, err - } - - defer func() { - if err != nil { - // Set the generating state to false for the proof ("unlock" it) - proof.GeneratingSince = nil - err2 := a.storage.UpdateGeneratedProof(a.ctx, proof, nil) - if err2 != nil { - tmpLogger.Errorf("Failed to unlock proof: %v", err2) - } - } - }() - } else { - // we do have a proof generating at the moment, check if it is - // eligible to be verified - eligible, err := a.validateEligibleFinalProof(ctx, proof, lastVerifiedBatchNumber) - if err != nil { - return false, fmt.Errorf("failed to validate eligible final proof, %w", err) - } - if !eligible { - return false, nil - } - } - - tmpLogger = tmpLogger.WithFields( - "proofId", *proof.ProofID, - "batches", fmt.Sprintf("%d-%d", proof.BatchNumber, proof.BatchNumberFinal), - ) - - // at this point we have an eligible proof, build the final one using it - finalProof, err := a.buildFinalProof(ctx, prover, proof) - if err != nil { - err = fmt.Errorf("failed to build final proof, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - msg := finalProofMsg{ - proverName: proverName, - proverID: proverID, - recursiveProof: proof, - finalProof: finalProof, - } - - select { - case <-a.ctx.Done(): - return false, a.ctx.Err() - case a.finalProof <- msg: - } - - tmpLogger.Debug("tryBuildFinalProof end") - return true, nil -} - -func (a *Aggregator) validateEligibleFinalProof( - ctx context.Context, proof *state.Proof, lastVerifiedBatchNum uint64, -) (bool, error) { - batchNumberToVerify := lastVerifiedBatchNum + 1 - - if proof.BatchNumber != batchNumberToVerify { - if proof.BatchNumber < batchNumberToVerify && - proof.BatchNumberFinal >= batchNumberToVerify { - // We have a proof that contains some batches below the last batch verified, anyway can be eligible as final proof - a.logger.Warnf("Proof %d-%d contains some batches lower than last batch verified %d. Check anyway if it is eligible", - proof.BatchNumber, proof.BatchNumberFinal, lastVerifiedBatchNum) - } else if proof.BatchNumberFinal < batchNumberToVerify { - // We have a proof that contains batches below that the last batch verified, we need to delete this proof - a.logger.Warnf("Proof %d-%d lower than next batch to verify %d. Deleting it", - proof.BatchNumber, proof.BatchNumberFinal, batchNumberToVerify) - err := a.storage.DeleteGeneratedProofs(ctx, proof.BatchNumber, proof.BatchNumberFinal, nil) - if err != nil { - return false, fmt.Errorf("failed to delete discarded proof, err: %w", err) - } - - return false, nil - } else { - a.logger.Debugf("Proof batch number %d is not the following to last verfied batch number %d", - proof.BatchNumber, lastVerifiedBatchNum) - return false, nil - } - } - - bComplete, err := a.storage.CheckProofContainsCompleteSequences(ctx, proof, nil) - if err != nil { - return false, fmt.Errorf("failed to check if proof contains complete sequences, %w", err) - } - if !bComplete { - a.logger.Infof("Recursive proof %d-%d not eligible to be verified: not containing complete sequences", - proof.BatchNumber, proof.BatchNumberFinal) - return false, nil - } - - return true, nil -} - -func (a *Aggregator) getAndLockProofReadyToVerify( - ctx context.Context, lastVerifiedBatchNum uint64, -) (*state.Proof, error) { - a.storageMutex.Lock() - defer a.storageMutex.Unlock() - - // Get proof ready to be verified - proofToVerify, err := a.storage.GetProofReadyToVerify(ctx, lastVerifiedBatchNum, nil) - if err != nil { - return nil, err - } - - now := time.Now().Round(time.Microsecond) - proofToVerify.GeneratingSince = &now - - err = a.storage.UpdateGeneratedProof(ctx, proofToVerify, nil) - if err != nil { - return nil, err - } - - return proofToVerify, nil -} - -func (a *Aggregator) unlockProofsToAggregate(ctx context.Context, proof1 *state.Proof, proof2 *state.Proof) error { - // Release proofs from generating state in a single transaction - dbTx, err := a.storage.BeginTx(ctx, nil) - if err != nil { - a.logger.Warnf("Failed to begin transaction to release proof aggregation state, err: %v", err) - return err - } - - proof1.GeneratingSince = nil - err = a.storage.UpdateGeneratedProof(ctx, proof1, dbTx) - if err == nil { - proof2.GeneratingSince = nil - err = a.storage.UpdateGeneratedProof(ctx, proof2, dbTx) - } - - if err != nil { - if err := dbTx.Rollback(); err != nil { - err := fmt.Errorf("failed to rollback proof aggregation state: %w", err) - a.logger.Error(FirstToUpper(err.Error())) - return err - } - - return fmt.Errorf("failed to release proof aggregation state: %w", err) - } - - err = dbTx.Commit() - if err != nil { - return fmt.Errorf("failed to release proof aggregation state %w", err) - } - - return nil -} - -func (a *Aggregator) getAndLockProofsToAggregate( - ctx context.Context, prover ProverInterface) (*state.Proof, *state.Proof, error) { - tmpLogger := a.logger.WithFields( - "prover", prover.Name(), - "proverId", prover.ID(), - "proverAddr", prover.Addr(), - ) - - a.storageMutex.Lock() - defer a.storageMutex.Unlock() - - proof1, proof2, err := a.storage.GetProofsToAggregate(ctx, nil) - if err != nil { - return nil, nil, err - } - - // Set proofs in generating state in a single transaction - dbTx, err := a.storage.BeginTx(ctx, nil) - if err != nil { - tmpLogger.Errorf("Failed to begin transaction to set proof aggregation state, err: %v", err) - return nil, nil, err - } - - now := time.Now().Round(time.Microsecond) - proof1.GeneratingSince = &now - err = a.storage.UpdateGeneratedProof(ctx, proof1, dbTx) - if err == nil { - proof2.GeneratingSince = &now - err = a.storage.UpdateGeneratedProof(ctx, proof2, dbTx) - } - - if err != nil { - if err := dbTx.Rollback(); err != nil { - err := fmt.Errorf("failed to rollback proof aggregation state %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return nil, nil, err - } - - return nil, nil, fmt.Errorf("failed to set proof aggregation state %w", err) - } - - err = dbTx.Commit() - if err != nil { - return nil, nil, fmt.Errorf("failed to set proof aggregation state %w", err) - } - - return proof1, proof2, nil -} - -func (a *Aggregator) tryAggregateProofs(ctx context.Context, prover ProverInterface) (bool, error) { - proverName := prover.Name() - proverID := prover.ID() - - tmpLogger := a.logger.WithFields( - "prover", proverName, - "proverId", proverID, - "proverAddr", prover.Addr(), - ) - tmpLogger.Debug("tryAggregateProofs start") - - proof1, proof2, err0 := a.getAndLockProofsToAggregate(ctx, prover) - if errors.Is(err0, state.ErrNotFound) { - // nothing to aggregate, swallow the error - tmpLogger.Debug("Nothing to aggregate") - return false, nil - } - if err0 != nil { - return false, err0 - } - - var ( - aggrProofID *string - err error - ) - - defer func() { - if err != nil { - err2 := a.unlockProofsToAggregate(a.ctx, proof1, proof2) - if err2 != nil { - tmpLogger.Errorf("Failed to release aggregated proofs, err: %v", err2) - } - } - tmpLogger.Debug("tryAggregateProofs end") - }() - - tmpLogger.Infof("Aggregating proofs: %d-%d and %d-%d", - proof1.BatchNumber, proof1.BatchNumberFinal, proof2.BatchNumber, proof2.BatchNumberFinal) - - batches := fmt.Sprintf("%d-%d", proof1.BatchNumber, proof2.BatchNumberFinal) - tmpLogger = tmpLogger.WithFields("batches", batches) - - inputProver := map[string]interface{}{ - "recursive_proof_1": proof1.Proof, - "recursive_proof_2": proof2.Proof, - } - b, err := json.Marshal(inputProver) - if err != nil { - err = fmt.Errorf("failed to serialize input prover, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - proof := &state.Proof{ - BatchNumber: proof1.BatchNumber, - BatchNumberFinal: proof2.BatchNumberFinal, - Prover: &proverName, - ProverID: &proverID, - InputProver: string(b), - } - - aggrProofID, err = prover.AggregatedProof(proof1.Proof, proof2.Proof) - if err != nil { - err = fmt.Errorf("failed to get aggregated proof id, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - proof.ProofID = aggrProofID - - tmpLogger.Infof("Proof ID for aggregated proof: %v", *proof.ProofID) - tmpLogger = tmpLogger.WithFields("proofId", *proof.ProofID) - - recursiveProof, _, _, err := prover.WaitRecursiveProof(ctx, *proof.ProofID) - if err != nil { - err = fmt.Errorf("failed to get aggregated proof from prover, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - tmpLogger.Info("Aggregated proof generated") - - proof.Proof = recursiveProof - - // update the state by removing the 2 aggregated proofs and storing the - // newly generated recursive proof - dbTx, err := a.storage.BeginTx(ctx, nil) - if err != nil { - err = fmt.Errorf("failed to begin transaction to update proof aggregation state, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - err = a.storage.DeleteGeneratedProofs(ctx, proof1.BatchNumber, proof2.BatchNumberFinal, dbTx) - if err != nil { - if err := dbTx.Rollback(); err != nil { - err := fmt.Errorf("failed to rollback proof aggregation state, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - err = fmt.Errorf("failed to delete previously aggregated proofs, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - now := time.Now().Round(time.Microsecond) - proof.GeneratingSince = &now - - err = a.storage.AddGeneratedProof(ctx, proof, dbTx) - if err != nil { - if err := dbTx.Rollback(); err != nil { - err := fmt.Errorf("failed to rollback proof aggregation state, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - err = fmt.Errorf("failed to store the recursive proof, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - err = dbTx.Commit() - if err != nil { - err = fmt.Errorf("failed to store the recursive proof, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - // NOTE(pg): the defer func is useless from now on, use a different variable - // name for errors (or shadow err in inner scopes) to not trigger it. - - // state is up to date, check if we can send the final proof using the - // one just crafted. - finalProofBuilt, finalProofErr := a.tryBuildFinalProof(ctx, prover, proof) - if finalProofErr != nil { - // just log the error and continue to handle the aggregated proof - tmpLogger.Errorf("Failed trying to check if recursive proof can be verified: %v", finalProofErr) - } - - // NOTE(pg): prover is done, use a.ctx from now on - - if !finalProofBuilt { - proof.GeneratingSince = nil - - // final proof has not been generated, update the recursive proof - err := a.storage.UpdateGeneratedProof(a.ctx, proof, nil) - if err != nil { - err = fmt.Errorf("failed to store batch proof result, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - } - - return true, nil -} - -func (a *Aggregator) getVerifiedBatchAccInputHash(ctx context.Context, batchNumber uint64) (*common.Hash, error) { - accInputHash, err := a.etherman.GetBatchAccInputHash(ctx, batchNumber) - if err != nil { - return nil, err - } - - return &accInputHash, nil -} - -func (a *Aggregator) getAndLockBatchToProve( - ctx context.Context, prover ProverInterface, -) (*state.Batch, []byte, *state.Proof, error) { - proverID := prover.ID() - proverName := prover.Name() - - tmpLogger := a.logger.WithFields( - "prover", proverName, - "proverId", proverID, - "proverAddr", prover.Addr(), - ) - - a.storageMutex.Lock() - defer a.storageMutex.Unlock() - - // Get last virtual batch number from L1 - lastVerifiedBatchNumber, err := a.etherman.GetLatestVerifiedBatchNum() - if err != nil { - return nil, nil, nil, err - } - - proofExists := true - batchNumberToVerify := lastVerifiedBatchNumber - - // Look for the batch number to verify - for proofExists { - batchNumberToVerify++ - proofExists, err = a.storage.CheckProofExistsForBatch(ctx, batchNumberToVerify, nil) - if err != nil { - tmpLogger.Infof("Error checking proof exists for batch %d", batchNumberToVerify) - - return nil, nil, nil, err - } - - if proofExists { - accInputHash := a.getAccInputHash(batchNumberToVerify - 1) - if accInputHash == (common.Hash{}) && batchNumberToVerify > 1 { - tmpLogger.Warnf("AccInputHash for batch %d is not in memory, "+ - "deleting proofs to regenerate acc input hash chain in memory", batchNumberToVerify) - - err := a.storage.CleanupGeneratedProofs(ctx, math.MaxInt, nil) - if err != nil { - tmpLogger.Infof("Error cleaning up generated proofs for batch %d", batchNumberToVerify) - return nil, nil, nil, err - } - batchNumberToVerify-- - break - } - } - } - - // Check if the batch has been sequenced - sequence, err := a.l1Syncr.GetSequenceByBatchNumber(ctx, batchNumberToVerify) - if err != nil && !errors.Is(err, entities.ErrNotFound) { - return nil, nil, nil, err - } - - // Not found, so it it not possible to verify the batch yet - if sequence == nil || errors.Is(err, entities.ErrNotFound) { - tmpLogger.Infof("Sequencing event for batch %d has not been synced yet, "+ - "so it is not possible to verify it yet. Waiting ...", batchNumberToVerify) - - return nil, nil, nil, state.ErrNotFound - } - - stateSequence := state.Sequence{ - FromBatchNumber: sequence.FromBatchNumber, - ToBatchNumber: sequence.ToBatchNumber, - } - - // Get Batch from L1 Syncer - virtualBatch, err := a.l1Syncr.GetVirtualBatchByBatchNumber(a.ctx, batchNumberToVerify) - if err != nil && !errors.Is(err, entities.ErrNotFound) { - a.logger.Errorf("Error getting virtual batch: %v", err) - return nil, nil, nil, err - } else if errors.Is(err, entities.ErrNotFound) { - a.logger.Infof("Virtual batch %d has not been synced yet, "+ - "so it is not possible to verify it yet. Waiting ...", batchNumberToVerify) - return nil, nil, nil, state.ErrNotFound - } - - // Get Batch from RPC - rpcBatch, err := a.rpcClient.GetBatch(batchNumberToVerify) - if err != nil { - a.logger.Errorf("error getting batch %d from RPC: %v.", batchNumberToVerify, err) - return nil, nil, nil, err - } - - // Compare BatchL2Data from virtual batch and rpcBatch (skipping injected batch (1)) - if batchNumberToVerify != 1 && (common.Bytes2Hex(virtualBatch.BatchL2Data) != common.Bytes2Hex(rpcBatch.L2Data())) { - a.logger.Warnf("BatchL2Data from virtual batch %d does not match the one from RPC", batchNumberToVerify) - a.logger.Warnf("VirtualBatch BatchL2Data:%v", common.Bytes2Hex(virtualBatch.BatchL2Data)) - a.logger.Warnf("RPC BatchL2Data:%v", common.Bytes2Hex(rpcBatch.L2Data())) - } - - l1InfoRoot := common.Hash{} - - if virtualBatch.L1InfoRoot == nil { - log.Debugf("L1InfoRoot is nil for batch %d", batchNumberToVerify) - virtualBatch.L1InfoRoot = &l1InfoRoot - } - - // Ensure the old acc input hash is in memory - oldAccInputHash := a.getAccInputHash(batchNumberToVerify - 1) - if oldAccInputHash == (common.Hash{}) && batchNumberToVerify > 1 { - tmpLogger.Warnf("AccInputHash for previous batch (%d) is not in memory. Waiting ...", batchNumberToVerify-1) - return nil, nil, nil, state.ErrNotFound - } - - forcedBlockHashL1 := rpcBatch.ForcedBlockHashL1() - l1InfoRoot = *virtualBatch.L1InfoRoot - - if batchNumberToVerify == 1 { - l1Block, err := a.l1Syncr.GetL1BlockByNumber(ctx, virtualBatch.BlockNumber) - if err != nil { - a.logger.Errorf("Error getting l1 block: %v", err) - return nil, nil, nil, err - } - - forcedBlockHashL1 = l1Block.ParentHash - l1InfoRoot = rpcBatch.GlobalExitRoot() - } - - // Calculate acc input hash as the RPC is not returning the correct one at the moment - accInputHash := aggkitcommon.CalculateAccInputHash( - a.logger, - oldAccInputHash, - virtualBatch.BatchL2Data, - l1InfoRoot, - uint64(sequence.Timestamp.Unix()), - rpcBatch.LastCoinbase(), - forcedBlockHashL1, - ) - // Store the acc input hash - a.setAccInputHash(batchNumberToVerify, accInputHash) - - // Log params to calculate acc input hash - a.logger.Debugf("Calculated acc input hash for batch %d: %v", batchNumberToVerify, accInputHash) - a.logger.Debugf("OldAccInputHash: %v", oldAccInputHash) - a.logger.Debugf("L1InfoRoot: %v", virtualBatch.L1InfoRoot) - a.logger.Debugf("TimestampLimit: %v", uint64(sequence.Timestamp.Unix())) - a.logger.Debugf("LastCoinbase: %v", rpcBatch.LastCoinbase()) - a.logger.Debugf("ForcedBlockHashL1: %v", rpcBatch.ForcedBlockHashL1()) - - // Create state batch - stateBatch := &state.Batch{ - BatchNumber: rpcBatch.BatchNumber(), - Coinbase: rpcBatch.LastCoinbase(), - // Use L1 batch data - BatchL2Data: virtualBatch.BatchL2Data, - StateRoot: rpcBatch.StateRoot(), - LocalExitRoot: rpcBatch.LocalExitRoot(), - // Use calculated acc input - AccInputHash: accInputHash, - L1InfoTreeIndex: rpcBatch.L1InfoTreeIndex(), - L1InfoRoot: *virtualBatch.L1InfoRoot, - Timestamp: sequence.Timestamp, - GlobalExitRoot: rpcBatch.GlobalExitRoot(), - ChainID: a.cfg.ChainID, - ForkID: a.cfg.ForkId, - } - - // Request the witness from the server, if it is busy just keep looping until it is available - start := time.Now() - witness, err := a.rpcClient.GetWitness(batchNumberToVerify, a.cfg.UseFullWitness) - for err != nil { - if errors.Is(err, rpc.ErrBusy) { - a.logger.Debugf( - "Witness server is busy, retrying get witness for batch %d in %v", - batchNumberToVerify, a.cfg.RetryTime.Duration, - ) - } else { - a.logger.Errorf("Failed to get witness for batch %d, err: %v", batchNumberToVerify, err) - } - time.Sleep(a.cfg.RetryTime.Duration) - } - end := time.Now() - a.logger.Debugf("Time to get witness for batch %d: %v", batchNumberToVerify, end.Sub(start)) - - // Store the sequence in aggregator DB - err = a.storage.AddSequence(ctx, stateSequence, nil) - if err != nil { - tmpLogger.Infof("Error storing sequence for batch %d", batchNumberToVerify) - - return nil, nil, nil, err - } - - // All the data required to generate a proof is ready - tmpLogger.Infof("All information to generate proof for batch %d is ready", virtualBatch.BatchNumber) - tmpLogger = tmpLogger.WithFields("batch", virtualBatch.BatchNumber) - - now := time.Now().Round(time.Microsecond) - proof := &state.Proof{ - BatchNumber: virtualBatch.BatchNumber, - BatchNumberFinal: virtualBatch.BatchNumber, - Prover: &proverName, - ProverID: &proverID, - GeneratingSince: &now, - } - - // Avoid other prover to process the same batch - err = a.storage.AddGeneratedProof(ctx, proof, nil) - if err != nil { - tmpLogger.Errorf("Failed to add batch proof to DB for batch %d, err: %v", virtualBatch.BatchNumber, err) - - return nil, nil, nil, err - } - - return stateBatch, witness, proof, nil -} - -func (a *Aggregator) tryGenerateBatchProof(ctx context.Context, prover ProverInterface) (bool, error) { - tmpLogger := a.logger.WithFields( - "prover", prover.Name(), - "proverId", prover.ID(), - "proverAddr", prover.Addr(), - ) - tmpLogger.Debug("tryGenerateBatchProof start") - - batchToProve, witness, proof, err0 := a.getAndLockBatchToProve(ctx, prover) - if errors.Is(err0, state.ErrNotFound) || errors.Is(err0, entities.ErrNotFound) { - // nothing to proof, swallow the error - tmpLogger.Debug("Nothing to generate proof") - return false, nil - } - if err0 != nil { - return false, err0 - } - - tmpLogger = tmpLogger.WithFields("batch", batchToProve.BatchNumber) - - var ( - genProofID *string - err error - ) - - defer func() { - if err != nil { - tmpLogger.Debug("Deleting proof in progress") - err2 := a.storage.DeleteGeneratedProofs(a.ctx, proof.BatchNumber, proof.BatchNumberFinal, nil) - if err2 != nil { - tmpLogger.Errorf("Failed to delete proof in progress, err: %v", err2) - } - } - tmpLogger.Debug("tryGenerateBatchProof end") - }() - - tmpLogger.Infof("Sending zki + batch to the prover, batchNumber [%d]", batchToProve.BatchNumber) - inputProver, err := a.buildInputProver(ctx, batchToProve, witness) - if err != nil { - err = fmt.Errorf("failed to build input prover, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - tmpLogger.Infof("Sending a batch to the prover. OldAccInputHash [%#x], L1InfoRoot [%#x]", - inputProver.PublicInputs.OldAccInputHash, inputProver.PublicInputs.L1InfoRoot) - - genProofID, err = prover.BatchProof(inputProver) - if err != nil { - err = fmt.Errorf("failed to get batch proof id, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - proof.ProofID = genProofID - - tmpLogger = tmpLogger.WithFields("proofId", *proof.ProofID) - - resGetProof, stateRoot, accInputHash, err := prover.WaitRecursiveProof(ctx, *proof.ProofID) - if err != nil { - err = fmt.Errorf("failed to get proof from prover, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - - tmpLogger.Info("Batch proof generated") - - // Sanity Check: state root from the proof must match the one from the batch - if a.cfg.BatchProofSanityCheckEnabled { - a.performSanityChecks(tmpLogger, stateRoot, accInputHash, batchToProve) - } - proof.Proof = resGetProof - - // NOTE(pg): the defer func is useless from now on, use a different variable - // name for errors (or shadow err in inner scopes) to not trigger it. - - finalProofBuilt, finalProofErr := a.tryBuildFinalProof(ctx, prover, proof) - if finalProofErr != nil { - // just log the error and continue to handle the generated proof - tmpLogger.Errorf("Error trying to build final proof: %v", finalProofErr) - } - - // NOTE(pg): prover is done, use a.ctx from now on - - if !finalProofBuilt { - proof.GeneratingSince = nil - - // final proof has not been generated, update the batch proof - err := a.storage.UpdateGeneratedProof(a.ctx, proof, nil) - if err != nil { - err = fmt.Errorf("failed to store batch proof result, %w", err) - tmpLogger.Error(FirstToUpper(err.Error())) - return false, err - } - } - - return true, nil -} - -func (a *Aggregator) performSanityChecks(tmpLogger *log.Logger, stateRoot, accInputHash common.Hash, - batchToProve *state.Batch) { - // Sanity Check: state root from the proof must match the one from the batch - if (stateRoot != common.Hash{}) && (stateRoot != batchToProve.StateRoot) { - for { - tmpLogger.Errorf("HALTING: "+ - "State root from the proof does not match the expected for batch %d: Proof = [%s] Expected = [%s]", - batchToProve.BatchNumber, stateRoot.String(), batchToProve.StateRoot.String(), - ) - time.Sleep(a.cfg.RetryTime.Duration) - } - } else { - tmpLogger.Infof("State root sanity check for batch %d passed", batchToProve.BatchNumber) - } - - // Sanity Check: acc input hash from the proof must match the one from the batch - if (accInputHash != common.Hash{}) && (accInputHash != batchToProve.AccInputHash) { - for { - tmpLogger.Errorf("HALTING: Acc input hash from the proof does not match the expected for "+ - "batch %d: Proof = [%s] Expected = [%s]", - batchToProve.BatchNumber, accInputHash.String(), batchToProve.AccInputHash.String(), - ) - time.Sleep(a.cfg.RetryTime.Duration) - } - } else { - tmpLogger.Infof("Acc input hash sanity check for batch %d passed", batchToProve.BatchNumber) - } -} - -// canVerifyProof returns true if we have reached the timeout to verify a proof -// and no other prover is verifying a proof (verifyingProof = false). -func (a *Aggregator) canVerifyProof() bool { - a.timeSendFinalProofMutex.RLock() - defer a.timeSendFinalProofMutex.RUnlock() - - return a.timeSendFinalProof.Before(time.Now()) && !a.verifyingProof -} - -// startProofVerification sets the verifyingProof variable to true -// to indicate that there is a proof verification in progress. -func (a *Aggregator) startProofVerification() { - a.timeSendFinalProofMutex.Lock() - defer a.timeSendFinalProofMutex.Unlock() - a.verifyingProof = true -} - -// endProofVerification set verifyingProof to false to indicate that there is not proof verification in progress -func (a *Aggregator) endProofVerification() { - a.timeSendFinalProofMutex.Lock() - defer a.timeSendFinalProofMutex.Unlock() - a.verifyingProof = false -} - -// resetVerifyProofTime updates the timeout to verify a proof. -func (a *Aggregator) resetVerifyProofTime() { - a.timeSendFinalProofMutex.Lock() - defer a.timeSendFinalProofMutex.Unlock() - a.timeSendFinalProof = time.Now().Add(a.cfg.VerifyProofInterval.Duration) -} - -func (a *Aggregator) buildInputProver( - ctx context.Context, batchToVerify *state.Batch, witness []byte, -) (*prover.StatelessInputProver, error) { - isForcedBatch := false - batchRawData := &state.BatchRawV2{} - var err error - - if batchToVerify.BatchNumber == 1 || batchToVerify.ForcedBatchNum != nil { - isForcedBatch = true - } else { - batchRawData, err = state.DecodeBatchV2(batchToVerify.BatchL2Data) - if err != nil { - a.logger.Errorf("Failed to decode batch data, err: %v", err) - return nil, err - } - } - - l1InfoTreeData := map[uint32]*prover.L1Data{} - forcedBlockhashL1 := common.Hash{} - l1InfoRoot := batchToVerify.L1InfoRoot.Bytes() - //nolint:gocritic - if !isForcedBatch { - tree, err := l1infotree.NewL1InfoTree(a.logger, 32, [][32]byte{}) //nolint:mnd - if err != nil { - return nil, err - } - - leaves, err := a.l1Syncr.GetLeafsByL1InfoRoot(ctx, batchToVerify.L1InfoRoot) - if err != nil && !errors.Is(err, entities.ErrNotFound) { - return nil, err - } - - aLeaves := make([][32]byte, len(leaves)) - for i, leaf := range leaves { - aLeaves[i] = l1infotree.HashLeafData( - leaf.GlobalExitRoot, - leaf.PreviousBlockHash, - uint64(leaf.Timestamp.Unix())) - } - - for _, l2blockRaw := range batchRawData.Blocks { - _, contained := l1InfoTreeData[l2blockRaw.IndexL1InfoTree] - if !contained && l2blockRaw.IndexL1InfoTree != 0 { - leaves, err := a.l1Syncr.GetL1InfoTreeLeaves(ctx, []uint32{l2blockRaw.IndexL1InfoTree}) - if err != nil { - a.logger.Errorf("Error getting l1InfoTreeLeaf: %v", err) - return nil, err - } - - l1InfoTreeLeaf := leaves[l2blockRaw.IndexL1InfoTree] - - // Calculate smt proof - a.logger.Infof("Calling tree.ComputeMerkleProof") - smtProof, calculatedL1InfoRoot, err := tree.ComputeMerkleProof(l2blockRaw.IndexL1InfoTree, aLeaves) - if err != nil { - a.logger.Errorf("Error computing merkle proof: %v", err) - return nil, err - } - - if batchToVerify.L1InfoRoot != calculatedL1InfoRoot { - return nil, fmt.Errorf( - "error: l1InfoRoot mismatch. L1InfoRoot: %s, calculatedL1InfoRoot: %s. l1InfoTreeIndex: %d", - batchToVerify.L1InfoRoot.String(), calculatedL1InfoRoot.String(), l2blockRaw.IndexL1InfoTree, - ) - } - - protoProof := make([][]byte, len(smtProof)) - - for i, proof := range smtProof { - tmpProof := proof - protoProof[i] = tmpProof[:] - } - - l1InfoTreeData[l2blockRaw.IndexL1InfoTree] = &prover.L1Data{ - GlobalExitRoot: l1InfoTreeLeaf.GlobalExitRoot.Bytes(), - BlockhashL1: l1InfoTreeLeaf.PreviousBlockHash.Bytes(), - MinTimestamp: uint32(l1InfoTreeLeaf.Timestamp.Unix()), - SmtProof: protoProof, - } - } - } - } else { - // Initial batch must be handled differently - if batchToVerify.BatchNumber == 1 { - virtualBatch, err := a.l1Syncr.GetVirtualBatchByBatchNumber(ctx, batchToVerify.BatchNumber) - if err != nil { - a.logger.Errorf("Error getting virtual batch: %v", err) - return nil, err - } - l1Block, err := a.l1Syncr.GetL1BlockByNumber(ctx, virtualBatch.BlockNumber) - if err != nil { - a.logger.Errorf("Error getting l1 block: %v", err) - return nil, err - } - - forcedBlockhashL1 = l1Block.ParentHash - l1InfoRoot = batchToVerify.GlobalExitRoot.Bytes() - } - } - - // Ensure the old acc input hash is in memory - oldAccInputHash := a.getAccInputHash(batchToVerify.BatchNumber - 1) - if oldAccInputHash == (common.Hash{}) && batchToVerify.BatchNumber > 1 { - a.logger.Warnf("AccInputHash for previous batch (%d) is not in memory. Waiting ...", batchToVerify.BatchNumber-1) - return nil, fmt.Errorf("acc input hash for previous batch (%d) is not in memory", batchToVerify.BatchNumber-1) - } - - inputProver := &prover.StatelessInputProver{ - PublicInputs: &prover.StatelessPublicInputs{ - Witness: witness, - OldAccInputHash: oldAccInputHash.Bytes(), - OldBatchNum: batchToVerify.BatchNumber - 1, - ChainId: batchToVerify.ChainID, - ForkId: batchToVerify.ForkID, - BatchL2Data: batchToVerify.BatchL2Data, - L1InfoRoot: l1InfoRoot, - TimestampLimit: uint64(batchToVerify.Timestamp.Unix()), - SequencerAddr: batchToVerify.Coinbase.String(), - AggregatorAddr: a.cfg.SenderAddress, - L1InfoTreeData: l1InfoTreeData, - ForcedBlockhashL1: forcedBlockhashL1.Bytes(), - }, - } - - printInputProver(a.logger, inputProver) - return inputProver, nil -} - -func printInputProver(logger *log.Logger, inputProver *prover.StatelessInputProver) { - if !logger.IsEnabledLogLevel(zapcore.DebugLevel) { - return - } - - logger.Debugf("Witness length: %v", len(inputProver.PublicInputs.Witness)) - logger.Debugf("BatchL2Data length: %v", len(inputProver.PublicInputs.BatchL2Data)) - logger.Debugf("OldAccInputHash: %v", common.BytesToHash(inputProver.PublicInputs.OldAccInputHash)) - logger.Debugf("L1InfoRoot: %v", common.BytesToHash(inputProver.PublicInputs.L1InfoRoot)) - logger.Debugf("TimestampLimit: %v", inputProver.PublicInputs.TimestampLimit) - logger.Debugf("SequencerAddr: %v", inputProver.PublicInputs.SequencerAddr) - logger.Debugf("AggregatorAddr: %v", inputProver.PublicInputs.AggregatorAddr) - logger.Debugf("L1InfoTreeData: %+v", inputProver.PublicInputs.L1InfoTreeData) - logger.Debugf("ForcedBlockhashL1: %v", common.BytesToHash(inputProver.PublicInputs.ForcedBlockhashL1)) -} - -// healthChecker will provide an implementation of the HealthCheck interface. -type healthChecker struct{} - -// newHealthChecker returns a health checker according to standard package -// grpc.health.v1. -func newHealthChecker() *healthChecker { - return &healthChecker{} -} - -// HealthCheck interface implementation. - -// Check returns the current status of the server for unary gRPC health requests, -// for now if the server is up and able to respond we will always return SERVING. -func (hc *healthChecker) Check( - ctx context.Context, req *grpchealth.HealthCheckRequest, -) (*grpchealth.HealthCheckResponse, error) { - log.Info("Serving the Check request for health check") - - return &grpchealth.HealthCheckResponse{ - Status: grpchealth.HealthCheckResponse_SERVING, - }, nil -} - -// Watch returns the current status of the server for stream gRPC health requests, -// for now if the server is up and able to respond we will always return SERVING. -func (hc *healthChecker) Watch(req *grpchealth.HealthCheckRequest, server grpchealth.Health_WatchServer) error { - log.Info("Serving the Watch request for health check") - - return server.Send(&grpchealth.HealthCheckResponse{ - Status: grpchealth.HealthCheckResponse_SERVING, - }) -} - -func (a *Aggregator) handleMonitoredTxResult(result ethtxtypes.MonitoredTxResult, firstBatch, lastBatch uint64) { - mTxResultLogger := ethtxmanager.CreateMonitoredTxResultLogger(result) - if result.Status == ethtxtypes.MonitoredTxStatusFailed { - mTxResultLogger.Fatal("failed to send batch verification, TODO: review this fatal and define what to do in this case") - } - - // Wait for the transaction to be finalized, then we can safely delete all recursive - // proofs up to the last batch in this proof - - finaLizedBlockNumber, err := l1_check_block.L1FinalizedFetch.BlockNumber(a.ctx, a.etherman) - if err != nil { - mTxResultLogger.Errorf("failed to get finalized block number: %v", err) - } - - for result.MinedAtBlockNumber.Uint64() > finaLizedBlockNumber { - select { - case <-a.ctx.Done(): - return - case <-time.After(a.cfg.RetryTime.Duration): - finaLizedBlockNumber, err = l1_check_block.L1FinalizedFetch.BlockNumber(a.ctx, a.etherman) - if err != nil { - mTxResultLogger.Errorf("failed to get finalized block number: %v", err) - } - } - } - - err = a.storage.DeleteGeneratedProofs(a.ctx, firstBatch, lastBatch, nil) - if err != nil { - mTxResultLogger.Errorf("failed to delete generated proofs from %d to %d: %v", firstBatch, lastBatch, err) - } - - mTxResultLogger.Debugf("deleted generated proofs from %d to %d", firstBatch, lastBatch) - - // Remove the acc input hashes from the map - // leaving the last batch acc input hash as it will be used as old acc input hash - a.removeAccInputHashes(firstBatch, lastBatch-1) -} - -func (a *Aggregator) cleanupLockedProofs() { - for { - select { - case <-a.ctx.Done(): - return - case <-time.After(a.timeCleanupLockedProofs.Duration): - n, err := a.storage.CleanupLockedProofs(a.ctx, a.cfg.GeneratingProofCleanupThreshold, nil) - if err != nil { - a.logger.Errorf("Failed to cleanup locked proofs: %v", err) - } - if n == 1 { - a.logger.Warn("Found a stale proof and removed from cache") - } else if n > 1 { - a.logger.Warnf("Found %d stale proofs and removed from cache", n) - } - } - } -} - -// FirstToUpper returns the string passed as argument with the first letter in -// uppercase. -func FirstToUpper(s string) string { - runes := []rune(s) - runes[0] = unicode.ToUpper(runes[0]) - - return string(runes) -} diff --git a/aggregator/aggregator_test.go b/aggregator/aggregator_test.go deleted file mode 100644 index 8b2aa808..00000000 --- a/aggregator/aggregator_test.go +++ /dev/null @@ -1,1921 +0,0 @@ -package aggregator - -import ( - "bytes" - "context" - "crypto/ecdsa" - "crypto/elliptic" - "crypto/rand" - "database/sql" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "math/big" - "sync" - "sync/atomic" - "testing" - "time" - - "github.com/0xPolygonHermez/zkevm-synchronizer-l1/synchronizer" - "github.com/agglayer/aggkit/agglayer" - mocks "github.com/agglayer/aggkit/aggregator/mocks" - "github.com/agglayer/aggkit/aggregator/prover" - "github.com/agglayer/aggkit/config/types" - "github.com/agglayer/aggkit/log" - rpctypes "github.com/agglayer/aggkit/rpc/types" - "github.com/agglayer/aggkit/state" - "github.com/ethereum/go-ethereum/common" - ethTypes "github.com/ethereum/go-ethereum/core/types" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -var ( - proofID = "proofId" - proof = "proof" - proverName = "proverName" - proverID = "proverID" -) - -const ( - ownerProver = "prover" - ownerAggregator = "aggregator" - - // changeL2Block + deltaTimeStamp + indexL1InfoTree - codedL2BlockHeader = "0b73e6af6f00000001" - // 2 x [ tx coded in RLP + r,s,v,efficiencyPercentage] - codedRLP2Txs1 = "ee02843b9aca00830186a0944d5cf5032b2a844602278b01199ed191a86c93ff88016345785d8a0000808203e88080bff0e780ba7db409339fd3f71969fa2cbf1b8535f6c725a1499d3318d3ef9c2b6340ddfab84add2c188f9efddb99771db1fe621c981846394ea4f035c85bcdd51bffee03843b9aca00830186a0944d5cf5032b2a844602278b01199ed191a86c93ff88016345785d8a0000808203e880805b346aa02230b22e62f73608de9ff39a162a6c24be9822209c770e3685b92d0756d5316ef954eefc58b068231ccea001fb7ac763ebe03afd009ad71cab36861e1bff" - codedL2Block1 = codedL2BlockHeader + codedRLP2Txs1 -) - -type mox struct { - storageMock *mocks.StorageInterfaceMock - ethTxManager *mocks.EthTxManagerClientMock - etherman *mocks.EthermanMock - proverMock *mocks.ProverInterfaceMock - aggLayerClientMock *agglayer.AgglayerClientMock - synchronizerMock *mocks.SynchronizerInterfaceMock - rpcMock *mocks.RPCInterfaceMock - txerMock *mocks.TxerMock -} - -func WaitUntil(t *testing.T, wg *sync.WaitGroup, timeout time.Duration) { - t.Helper() - - done := make(chan struct{}) - go func() { - wg.Wait() - close(done) - }() - - select { - case <-done: - case <-time.After(timeout): - t.Fatalf("WaitGroup not done, test time expired after %s", timeout) - } -} - -func Test_Start(t *testing.T) { - mockStorage := new(mocks.StorageInterfaceMock) - mockL1Syncr := new(mocks.SynchronizerInterfaceMock) - mockEtherman := new(mocks.EthermanMock) - mockEthTxManager := new(mocks.EthTxManagerClientMock) - - mockL1Syncr.On("Sync", mock.Anything).Return(nil) - mockEtherman.On("GetLatestVerifiedBatchNum").Return(uint64(90), nil).Once() - mockEtherman.On("GetBatchAccInputHash", mock.Anything, uint64(90)).Return(common.Hash{}, nil).Once() - mockStorage.On("DeleteGeneratedProofs", mock.Anything, uint64(90), mock.Anything, nil).Return(nil).Once() - mockStorage.On("CleanupLockedProofs", mock.Anything, "", nil).Return(int64(0), nil) - - mockEthTxManager.On("Start").Return(nil) - - ctx := context.Background() - a := &Aggregator{ - storage: mockStorage, - logger: log.GetDefaultLogger(), - halted: atomic.Bool{}, - l1Syncr: mockL1Syncr, - etherman: mockEtherman, - ethTxManager: mockEthTxManager, - ctx: ctx, - storageMutex: &sync.Mutex{}, - timeSendFinalProofMutex: &sync.RWMutex{}, - timeCleanupLockedProofs: types.Duration{Duration: 5 * time.Second}, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - go func() { - err := a.Start() - require.NoError(t, err) - }() - time.Sleep(time.Second) - a.ctx.Done() - time.Sleep(time.Second) -} - -func Test_handleReorg(t *testing.T) { - t.Parallel() - - mockL1Syncr := new(mocks.SynchronizerInterfaceMock) - mockStorage := new(mocks.StorageInterfaceMock) - reorgData := synchronizer.ReorgExecutionResult{} - - a := &Aggregator{ - l1Syncr: mockL1Syncr, - storage: mockStorage, - logger: log.GetDefaultLogger(), - halted: atomic.Bool{}, - ctx: context.Background(), - } - - mockL1Syncr.On("GetLastestVirtualBatchNumber", mock.Anything).Return(uint64(100), nil).Once() - mockStorage.On("DeleteGeneratedProofs", mock.Anything, mock.Anything, mock.Anything, nil).Return(nil).Once() - mockStorage.On("DeleteUngeneratedProofs", mock.Anything, nil).Return(nil).Once() - - go a.handleReorg(reorgData) - time.Sleep(3 * time.Second) - - assert.True(t, a.halted.Load()) - mockStorage.AssertExpectations(t) - mockL1Syncr.AssertExpectations(t) -} - -func Test_handleRollbackBatches(t *testing.T) { - t.Parallel() - - mockEtherman := new(mocks.EthermanMock) - mockStorage := new(mocks.StorageInterfaceMock) - - // Test data - rollbackData := synchronizer.RollbackBatchesData{ - LastBatchNumber: 100, - } - - mockEtherman.On("GetLatestVerifiedBatchNum").Return(uint64(90), nil).Once() - mockEtherman.On("GetBatchAccInputHash", mock.Anything, uint64(90)).Return(common.Hash{}, nil).Once() - mockStorage.On("DeleteUngeneratedProofs", mock.Anything, mock.Anything).Return(nil).Once() - mockStorage.On("DeleteGeneratedProofs", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() - - a := Aggregator{ - ctx: context.Background(), - etherman: mockEtherman, - storage: mockStorage, - logger: log.GetDefaultLogger(), - halted: atomic.Bool{}, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - - a.halted.Store(false) - a.handleRollbackBatches(rollbackData) - - assert.False(t, a.halted.Load()) - mockEtherman.AssertExpectations(t) - mockStorage.AssertExpectations(t) -} - -func Test_handleRollbackBatchesHalt(t *testing.T) { - t.Parallel() - - mockEtherman := new(mocks.EthermanMock) - mockStorage := new(mocks.StorageInterfaceMock) - - mockEtherman.On("GetLatestVerifiedBatchNum").Return(uint64(110), nil).Once() - mockStorage.On("DeleteUngeneratedProofs", mock.Anything, mock.Anything).Return(nil).Once() - mockStorage.On("DeleteGeneratedProofs", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() - - // Test data - rollbackData := synchronizer.RollbackBatchesData{ - LastBatchNumber: 100, - } - - a := Aggregator{ - ctx: context.Background(), - etherman: mockEtherman, - storage: mockStorage, - logger: log.GetDefaultLogger(), - halted: atomic.Bool{}, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - - a.halted.Store(false) - go a.handleRollbackBatches(rollbackData) - time.Sleep(3 * time.Second) - - assert.True(t, a.halted.Load()) - mockEtherman.AssertExpectations(t) -} - -func Test_handleRollbackBatchesError(t *testing.T) { - t.Parallel() - - mockEtherman := new(mocks.EthermanMock) - mockStorage := new(mocks.StorageInterfaceMock) - - mockEtherman.On("GetLatestVerifiedBatchNum").Return(uint64(110), fmt.Errorf("error")).Once() - - // Test data - rollbackData := synchronizer.RollbackBatchesData{ - LastBatchNumber: 100, - } - - a := Aggregator{ - ctx: context.Background(), - etherman: mockEtherman, - storage: mockStorage, - logger: log.GetDefaultLogger(), - halted: atomic.Bool{}, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - - a.halted.Store(false) - go a.handleRollbackBatches(rollbackData) - time.Sleep(3 * time.Second) - - assert.True(t, a.halted.Load()) - mockEtherman.AssertExpectations(t) -} - -func Test_sendFinalProofSuccess(t *testing.T) { - require := require.New(t) - assert := assert.New(t) - batchNum := uint64(23) - batchNumFinal := uint64(42) - - recursiveProof := &state.Proof{ - Prover: &proverName, - ProverID: &proverID, - ProofID: &proofID, - BatchNumber: batchNum, - BatchNumberFinal: batchNumFinal, - } - finalProof := &prover.FinalProof{} - - testCases := []struct { - name string - setup func(m mox, a *Aggregator) - asserts func(a *Aggregator) - }{ - { - name: "Successfully settled on Agglayer", - setup: func(m mox, a *Aggregator) { - cfg := Config{ - SettlementBackend: AggLayer, - AggLayerTxTimeout: types.Duration{Duration: time.Millisecond * 1}, - } - a.cfg = cfg - - batch := rpctypes.NewRPCBatch(batchNumFinal, common.Hash{}, []string{}, []byte{}, common.Hash{}, common.Hash{}, common.Hash{}, common.Address{}, false) - m.rpcMock.On("GetBatch", batchNumFinal).Return(batch, nil) - - m.etherman.On("GetRollupId").Return(uint32(1)).Once() - testHash := common.BytesToHash([]byte("test hash")) - m.aggLayerClientMock.On("SendTx", mock.Anything).Return(testHash, nil) - m.aggLayerClientMock.On("WaitTxToBeMined", testHash, mock.Anything).Return(nil) - }, - asserts: func(a *Aggregator) { - assert.False(a.verifyingProof) - }, - }, - { - name: "Successfully settled on L1 (Direct)", - setup: func(m mox, a *Aggregator) { - senderAddr := common.BytesToAddress([]byte("sender address")).Hex() - toAddr := common.BytesToAddress([]byte("to address")) - data := []byte("data") - cfg := Config{ - SettlementBackend: L1, - SenderAddress: senderAddr, - GasOffset: uint64(10), - } - a.cfg = cfg - - batch := rpctypes.NewRPCBatch(batchNumFinal, common.Hash{}, []string{}, []byte{}, common.Hash{}, common.Hash{}, common.Hash{}, common.Address{}, false) - m.rpcMock.On("GetBatch", batchNumFinal).Return(batch, nil) - - m.etherman.On("BuildTrustedVerifyBatchesTxData", batchNum-1, batchNumFinal, mock.Anything, common.HexToAddress(senderAddr)).Return(&toAddr, data, nil).Once() - m.ethTxManager.On("Add", mock.Anything, &toAddr, big.NewInt(0), data, a.cfg.GasOffset, (*ethTypes.BlobTxSidecar)(nil)).Return(nil, nil).Once() - m.ethTxManager.On("ProcessPendingMonitoredTxs", mock.Anything, mock.Anything).Once() - }, - asserts: func(a *Aggregator) { - assert.False(a.verifyingProof) - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - storageMock := mocks.NewStorageInterfaceMock(t) - ethTxManager := mocks.NewEthTxManagerClientMock(t) - etherman := mocks.NewEthermanMock(t) - aggLayerClient := agglayer.NewAgglayerClientMock(t) - rpcMock := mocks.NewRPCInterfaceMock(t) - - curve := elliptic.P256() - privateKey, err := ecdsa.GenerateKey(curve, rand.Reader) - require.NoError(err, "error generating key") - - a := Aggregator{ - storage: storageMock, - etherman: etherman, - ethTxManager: ethTxManager, - aggLayerClient: aggLayerClient, - finalProof: make(chan finalProofMsg), - logger: log.GetDefaultLogger(), - verifyingProof: false, - storageMutex: &sync.Mutex{}, - timeSendFinalProofMutex: &sync.RWMutex{}, - sequencerPrivateKey: privateKey, - rpcClient: rpcMock, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - a.ctx, a.exit = context.WithCancel(context.Background()) - - m := mox{ - storageMock: storageMock, - ethTxManager: ethTxManager, - etherman: etherman, - aggLayerClientMock: aggLayerClient, - rpcMock: rpcMock, - } - if tc.setup != nil { - tc.setup(m, &a) - } - // send a final proof over the channel - go func() { - finalMsg := finalProofMsg{ - proverID: proverID, - recursiveProof: recursiveProof, - finalProof: finalProof, - } - a.finalProof <- finalMsg - time.Sleep(1 * time.Second) - a.exit() - }() - - a.sendFinalProof() - if tc.asserts != nil { - tc.asserts(&a) - } - }) - } -} - -func Test_sendFinalProofError(t *testing.T) { - require := require.New(t) - assert := assert.New(t) - errTest := errors.New("test error") - batchNum := uint64(23) - batchNumFinal := uint64(42) - sender := common.BytesToAddress([]byte("SenderAddress")) - senderAddr := sender.Hex() - - recursiveProof := &state.Proof{ - Prover: &proverName, - ProverID: &proverID, - ProofID: &proofID, - BatchNumber: batchNum, - BatchNumberFinal: batchNumFinal, - } - finalProof := &prover.FinalProof{} - - testCases := []struct { - name string - setup func(mox, *Aggregator) - asserts func(*Aggregator) - }{ - { - name: "Failed to settle on Agglayer: GetBatch error", - setup: func(m mox, a *Aggregator) { - m.rpcMock.On("GetBatch", batchNumFinal).Run(func(args mock.Arguments) { - // test is done, stop the sendFinalProof method - fmt.Println("Stopping sendFinalProof") - a.exit() - }).Return(nil, errTest).Once() - }, - asserts: func(a *Aggregator) { - assert.False(a.verifyingProof) - }, - }, - { - name: "Failed to settle on Agglayer: SendTx error", - setup: func(m mox, a *Aggregator) { - cfg := Config{ - SettlementBackend: AggLayer, - } - a.cfg = cfg - - batch := rpctypes.NewRPCBatch(batchNumFinal, common.Hash{}, []string{}, []byte{}, common.Hash{}, common.Hash{}, common.Hash{}, common.Address{}, false) - m.rpcMock.On("GetBatch", batchNumFinal).Return(batch, nil) - - m.etherman.On("GetRollupId").Return(uint32(1)).Once() - m.aggLayerClientMock.On("SendTx", mock.Anything).Run(func(args mock.Arguments) { - // test is done, stop the sendFinalProof method - fmt.Println("Stopping sendFinalProof") - a.exit() - }).Return(nil, errTest).Once() - m.storageMock.On("UpdateGeneratedProof", mock.Anything, mock.Anything, nil).Return(nil).Once() - }, - asserts: func(a *Aggregator) { - assert.False(a.verifyingProof) - }, - }, - { - name: "Failed to settle on Agglayer: WaitTxToBeMined error", - setup: func(m mox, a *Aggregator) { - cfg := Config{ - SettlementBackend: AggLayer, - AggLayerTxTimeout: types.Duration{Duration: time.Millisecond * 1}, - } - a.cfg = cfg - - batch := rpctypes.NewRPCBatch(batchNumFinal, common.Hash{}, []string{}, []byte{}, common.Hash{}, common.Hash{}, common.Hash{}, common.Address{}, false) - m.rpcMock.On("GetBatch", batchNumFinal).Return(batch, nil) - - m.etherman.On("GetRollupId").Return(uint32(1)).Once() - m.aggLayerClientMock.On("SendTx", mock.Anything).Return(common.Hash{}, nil).Once() - m.aggLayerClientMock.On("WaitTxToBeMined", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { - fmt.Println("Stopping sendFinalProof") - a.exit() - }).Return(errTest) - m.storageMock.On("UpdateGeneratedProof", mock.Anything, mock.Anything, nil).Return(nil).Once() - }, - asserts: func(a *Aggregator) { - assert.False(a.verifyingProof) - }, - }, - { - name: "Failed to settle on L1 (Direct): BuildTrustedVerifyBatchesTxData error", - setup: func(m mox, a *Aggregator) { - cfg := Config{ - SettlementBackend: L1, - SenderAddress: senderAddr, - } - a.cfg = cfg - - batch := rpctypes.NewRPCBatch(batchNumFinal, common.Hash{}, []string{}, []byte{}, common.Hash{}, common.Hash{}, common.Hash{}, common.Address{}, false) - m.rpcMock.On("GetBatch", batchNumFinal).Return(batch, nil) - - m.etherman.On("BuildTrustedVerifyBatchesTxData", batchNum-1, batchNumFinal, mock.Anything, sender).Run(func(args mock.Arguments) { - fmt.Println("Stopping sendFinalProof") - a.exit() - }).Return(nil, nil, errTest) - m.storageMock.On("UpdateGeneratedProof", mock.Anything, recursiveProof, nil).Return(nil).Once() - }, - asserts: func(a *Aggregator) { - assert.False(a.verifyingProof) - }, - }, - { - name: "Failed to settle on L1 (Direct): Error Adding TX to ethTxManager", - setup: func(m mox, a *Aggregator) { - cfg := Config{ - SettlementBackend: L1, - SenderAddress: senderAddr, - GasOffset: uint64(10), - } - a.cfg = cfg - - batch := rpctypes.NewRPCBatch(batchNumFinal, common.Hash{}, []string{}, []byte{}, common.Hash{}, common.Hash{}, common.Hash{}, common.Address{}, false) - m.rpcMock.On("GetBatch", batchNumFinal).Return(batch, nil) - - m.etherman.On("BuildTrustedVerifyBatchesTxData", batchNum-1, batchNumFinal, mock.Anything, sender).Return(nil, nil, nil).Once() - m.ethTxManager.On("Add", mock.Anything, mock.Anything, big.NewInt(0), mock.Anything, a.cfg.GasOffset, (*ethTypes.BlobTxSidecar)(nil)).Run(func(args mock.Arguments) { - fmt.Println("Stopping sendFinalProof") - a.exit() - }).Return(nil, errTest).Once() - m.storageMock.On("UpdateGeneratedProof", mock.Anything, recursiveProof, nil).Return(nil).Once() - }, - asserts: func(a *Aggregator) { - assert.False(a.verifyingProof) - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - storageMock := mocks.NewStorageInterfaceMock(t) - ethTxManager := mocks.NewEthTxManagerClientMock(t) - etherman := mocks.NewEthermanMock(t) - aggLayerClient := agglayer.NewAgglayerClientMock(t) - rpcMock := mocks.NewRPCInterfaceMock(t) - - curve := elliptic.P256() - privateKey, err := ecdsa.GenerateKey(curve, rand.Reader) - require.NoError(err, "error generating key") - - a := Aggregator{ - storage: storageMock, - etherman: etherman, - ethTxManager: ethTxManager, - aggLayerClient: aggLayerClient, - finalProof: make(chan finalProofMsg), - logger: log.GetDefaultLogger(), - verifyingProof: false, - storageMutex: &sync.Mutex{}, - timeSendFinalProofMutex: &sync.RWMutex{}, - sequencerPrivateKey: privateKey, - rpcClient: rpcMock, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - a.ctx, a.exit = context.WithCancel(context.Background()) - - m := mox{ - storageMock: storageMock, - ethTxManager: ethTxManager, - etherman: etherman, - aggLayerClientMock: aggLayerClient, - rpcMock: rpcMock, - } - if tc.setup != nil { - tc.setup(m, &a) - } - // send a final proof over the channel - go func() { - finalMsg := finalProofMsg{ - proverID: proverID, - recursiveProof: recursiveProof, - finalProof: finalProof, - } - a.finalProof <- finalMsg - }() - - a.sendFinalProof() - if tc.asserts != nil { - tc.asserts(&a) - } - }) - } -} - -func Test_buildFinalProof(t *testing.T) { - assert := assert.New(t) - batchNum := uint64(23) - batchNumFinal := uint64(42) - recursiveProof := &state.Proof{ - ProverID: &proverID, - Proof: "test proof", - ProofID: &proofID, - BatchNumber: batchNum, - BatchNumberFinal: batchNumFinal, - } - finalProofID := "finalProofID" - - testCases := []struct { - name string - setup func(mox, *Aggregator) - asserts func(err error, fProof *prover.FinalProof) - }{ - { - name: "using real prover", - setup: func(m mox, a *Aggregator) { - finalProof := prover.FinalProof{ - Public: &prover.PublicInputsExtended{ - NewStateRoot: []byte("StateRoot"), - NewLocalExitRoot: []byte("LocalExitRoot"), - }, - } - - m.proverMock.On("Name").Return("name").Once() - m.proverMock.On("ID").Return("id").Once() - m.proverMock.On("Addr").Return("addr").Once() - m.proverMock.On("FinalProof", recursiveProof.Proof, a.cfg.SenderAddress).Return(&finalProofID, nil).Once() - m.proverMock.On("WaitFinalProof", mock.Anything, finalProofID).Return(&finalProof, nil).Once() - }, - asserts: func(err error, fProof *prover.FinalProof) { - assert.NoError(err) - assert.True(bytes.Equal([]byte("StateRoot"), fProof.Public.NewStateRoot), "State roots should be equal") - assert.True(bytes.Equal([]byte("LocalExitRoot"), fProof.Public.NewLocalExitRoot), "LocalExit roots should be equal") - }, - }, - { - name: "using mock prover", - setup: func(m mox, a *Aggregator) { - finalProof := prover.FinalProof{ - Public: &prover.PublicInputsExtended{ - NewStateRoot: []byte(mockedStateRoot), - NewLocalExitRoot: []byte(mockedLocalExitRoot), - }, - } - - m.proverMock.On("Name").Return("name").Once() - m.proverMock.On("ID").Return("id").Once() - m.proverMock.On("Addr").Return("addr").Once() - m.proverMock.On("FinalProof", recursiveProof.Proof, a.cfg.SenderAddress).Return(&finalProofID, nil).Once() - m.proverMock.On("WaitFinalProof", mock.Anything, finalProofID).Return(&finalProof, nil).Once() - finalBatch := rpctypes.NewRPCBatch(batchNumFinal, common.Hash{}, []string{}, []byte{}, common.Hash{}, common.BytesToHash([]byte("mock LocalExitRoot")), common.BytesToHash([]byte("mock StateRoot")), common.Address{}, false) - m.rpcMock.On("GetBatch", batchNumFinal).Return(finalBatch, nil).Once() - }, - asserts: func(err error, fProof *prover.FinalProof) { - assert.NoError(err) - expStateRoot := common.BytesToHash([]byte("mock StateRoot")) - expLocalExitRoot := common.BytesToHash([]byte("mock LocalExitRoot")) - assert.True(bytes.Equal(expStateRoot.Bytes(), fProof.Public.NewStateRoot), "State roots should be equal") - assert.True(bytes.Equal(expLocalExitRoot.Bytes(), fProof.Public.NewLocalExitRoot), "LocalExit roots should be equal") - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - proverMock := mocks.NewProverInterfaceMock(t) - storageMock := mocks.NewStorageInterfaceMock(t) - rpcMock := mocks.NewRPCInterfaceMock(t) - m := mox{ - proverMock: proverMock, - storageMock: storageMock, - rpcMock: rpcMock, - } - a := Aggregator{ - storage: storageMock, - logger: log.GetDefaultLogger(), - cfg: Config{ - SenderAddress: common.BytesToAddress([]byte("from")).Hex(), - }, - rpcClient: rpcMock, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - - tc.setup(m, &a) - fProof, err := a.buildFinalProof(context.Background(), proverMock, recursiveProof) - tc.asserts(err, fProof) - }) - } -} - -func Test_tryBuildFinalProof(t *testing.T) { - assert := assert.New(t) - errTest := errors.New("test error") - from := common.BytesToAddress([]byte("from")) - cfg := Config{ - VerifyProofInterval: types.Duration{Duration: time.Millisecond * 1}, - SenderAddress: from.Hex(), - } - latestVerifiedBatchNum := uint64(22) - batchNum := uint64(23) - batchNumFinal := uint64(42) - finalProofID := "finalProofID" - finalProof := prover.FinalProof{ - Proof: "", - Public: &prover.PublicInputsExtended{ - NewStateRoot: []byte("newStateRoot"), - NewLocalExitRoot: []byte("newLocalExitRoot"), - }, - } - proofToVerify := state.Proof{ - ProofID: &proofID, - Proof: proof, - BatchNumber: batchNum, - BatchNumberFinal: batchNumFinal, - } - invalidProof := state.Proof{ - ProofID: &proofID, - Proof: proof, - BatchNumber: uint64(123), - BatchNumberFinal: uint64(456), - } - - proverCtx := context.WithValue(context.Background(), "owner", ownerProver) //nolint:staticcheck - matchProverCtxFn := func(ctx context.Context) bool { return ctx.Value("owner") == ownerProver } - matchAggregatorCtxFn := func(ctx context.Context) bool { return ctx.Value("owner") == ownerAggregator } - testCases := []struct { - name string - proof *state.Proof - setup func(mox, *Aggregator) - asserts func(bool, *Aggregator, error) - assertFinalMsg func(*finalProofMsg) - }{ - { - name: "can't verify proof (verifyingProof = true)", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Once() - m.proverMock.On("ID").Return(proverID).Once() - m.proverMock.On("Addr").Return("addr").Once() - a.verifyingProof = true - }, - asserts: func(result bool, a *Aggregator, err error) { - a.verifyingProof = false // reset - assert.False(result) - assert.NoError(err) - }, - }, - { - name: "can't verify proof (veryfy time not reached yet)", - setup: func(m mox, a *Aggregator) { - a.timeSendFinalProof = time.Now().Add(10 * time.Second) - m.proverMock.On("Name").Return(proverName).Once() - m.proverMock.On("ID").Return(proverID).Once() - m.proverMock.On("Addr").Return("addr").Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.NoError(err) - }, - }, - { - name: "nil proof, error requesting the proof triggers defer", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr").Twice() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - m.storageMock.On("GetProofReadyToVerify", mock.MatchedBy(matchProverCtxFn), latestVerifiedBatchNum, nil).Return(&proofToVerify, nil).Once() - proofGeneratingTrueCall := m.storageMock.On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proofToVerify, nil).Return(nil).Once() - m.proverMock.On("FinalProof", proofToVerify.Proof, from.String()).Return(nil, errTest).Once() - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proofToVerify, nil). - Return(nil). - Once(). - NotBefore(proofGeneratingTrueCall) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "nil proof, error building the proof triggers defer", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr").Twice() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - m.storageMock.On("GetProofReadyToVerify", mock.MatchedBy(matchProverCtxFn), latestVerifiedBatchNum, nil).Return(&proofToVerify, nil).Once() - proofGeneratingTrueCall := m.storageMock.On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proofToVerify, nil).Return(nil).Once() - m.proverMock.On("FinalProof", proofToVerify.Proof, from.String()).Return(&finalProofID, nil).Once() - m.proverMock.On("WaitFinalProof", mock.MatchedBy(matchProverCtxFn), finalProofID).Return(nil, errTest).Once() - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proofToVerify, nil). - Return(nil). - Once(). - NotBefore(proofGeneratingTrueCall) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "nil proof, generic error from GetProofReadyToVerify", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Once() - m.proverMock.On("ID").Return(proverID).Once() - m.proverMock.On("Addr").Return(proverID).Once() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - m.storageMock.On("GetProofReadyToVerify", mock.MatchedBy(matchProverCtxFn), latestVerifiedBatchNum, nil).Return(nil, errTest).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "nil proof, ErrNotFound from GetProofReadyToVerify", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Once() - m.proverMock.On("ID").Return(proverID).Once() - m.proverMock.On("Addr").Return(proverID).Once() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - m.storageMock.On("GetProofReadyToVerify", mock.MatchedBy(matchProverCtxFn), latestVerifiedBatchNum, nil).Return(nil, state.ErrNotFound).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.NoError(err) - }, - }, - { - name: "nil proof gets a proof ready to verify", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return(proverID).Twice() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - m.storageMock.On("GetProofReadyToVerify", mock.MatchedBy(matchProverCtxFn), latestVerifiedBatchNum, nil).Return(&proofToVerify, nil).Once() - m.storageMock.On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proofToVerify, nil).Return(nil).Once() - m.proverMock.On("FinalProof", proofToVerify.Proof, from.String()).Return(&finalProofID, nil).Once() - m.proverMock.On("WaitFinalProof", mock.MatchedBy(matchProverCtxFn), finalProofID).Return(&finalProof, nil).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.True(result) - assert.NoError(err) - }, - assertFinalMsg: func(msg *finalProofMsg) { - assert.Equal(finalProof.Proof, msg.finalProof.Proof) - assert.Equal(finalProof.Public.NewStateRoot, msg.finalProof.Public.NewStateRoot) - assert.Equal(finalProof.Public.NewLocalExitRoot, msg.finalProof.Public.NewLocalExitRoot) - }, - }, - { - name: "error checking if proof is a complete sequence", - proof: &proofToVerify, - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Once() - m.proverMock.On("ID").Return(proverID).Once() - m.proverMock.On("Addr").Return(proverID).Once() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - m.storageMock.On("CheckProofContainsCompleteSequences", mock.MatchedBy(matchProverCtxFn), &proofToVerify, nil).Return(false, errTest).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "invalid proof (not consecutive to latest verified batch) rejected", - proof: &invalidProof, - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Once() - m.proverMock.On("ID").Return(proverID).Once() - m.proverMock.On("Addr").Return(proverID).Once() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.NoError(err) - }, - }, - { - name: "invalid proof (not a complete sequence) rejected", - proof: &proofToVerify, - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Once() - m.proverMock.On("ID").Return(proverID).Once() - m.proverMock.On("Addr").Return(proverID).Once() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - m.storageMock.On("CheckProofContainsCompleteSequences", mock.MatchedBy(matchProverCtxFn), &proofToVerify, nil).Return(false, nil).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.NoError(err) - }, - }, - { - name: "valid proof", - proof: &proofToVerify, - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return(proverID).Twice() - m.etherman.On("GetLatestVerifiedBatchNum").Return(latestVerifiedBatchNum, nil).Once() - m.storageMock.On("CheckProofContainsCompleteSequences", mock.MatchedBy(matchProverCtxFn), &proofToVerify, nil).Return(true, nil).Once() - m.proverMock.On("FinalProof", proofToVerify.Proof, from.String()).Return(&finalProofID, nil).Once() - m.proverMock.On("WaitFinalProof", mock.MatchedBy(matchProverCtxFn), finalProofID).Return(&finalProof, nil).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.True(result) - assert.NoError(err) - }, - assertFinalMsg: func(msg *finalProofMsg) { - assert.Equal(finalProof.Proof, msg.finalProof.Proof) - assert.Equal(finalProof.Public.NewStateRoot, msg.finalProof.Public.NewStateRoot) - assert.Equal(finalProof.Public.NewLocalExitRoot, msg.finalProof.Public.NewLocalExitRoot) - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - storageMock := mocks.NewStorageInterfaceMock(t) - ethTxManager := mocks.NewEthTxManagerClientMock(t) - etherman := mocks.NewEthermanMock(t) - proverMock := mocks.NewProverInterfaceMock(t) - - a := Aggregator{ - cfg: cfg, - storage: storageMock, - etherman: etherman, - ethTxManager: ethTxManager, - logger: log.GetDefaultLogger(), - storageMutex: &sync.Mutex{}, - timeSendFinalProofMutex: &sync.RWMutex{}, - timeCleanupLockedProofs: cfg.CleanupLockedProofsInterval, - finalProof: make(chan finalProofMsg), - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - - aggregatorCtx := context.WithValue(context.Background(), "owner", ownerAggregator) //nolint:staticcheck - a.ctx, a.exit = context.WithCancel(aggregatorCtx) - m := mox{ - storageMock: storageMock, - ethTxManager: ethTxManager, - etherman: etherman, - proverMock: proverMock, - } - if tc.setup != nil { - tc.setup(m, &a) - } - - var wg sync.WaitGroup - if tc.assertFinalMsg != nil { - // wait for the final proof over the channel - wg := sync.WaitGroup{} - wg.Add(1) - go func() { - defer wg.Done() - msg := <-a.finalProof - tc.assertFinalMsg(&msg) - }() - } - - result, err := a.tryBuildFinalProof(proverCtx, proverMock, tc.proof) - - if tc.asserts != nil { - tc.asserts(result, &a, err) - } - - if tc.assertFinalMsg != nil { - WaitUntil(t, &wg, time.Second) - } - }) - } -} - -func Test_tryAggregateProofs(t *testing.T) { - require := require.New(t) - assert := assert.New(t) - errTest := errors.New("test error") - cfg := Config{ - VerifyProofInterval: types.Duration{Duration: time.Millisecond * 1}, - } - - recursiveProof := "recursiveProof" - proverCtx := context.WithValue(context.Background(), "owner", ownerProver) //nolint:staticcheck - matchProverCtxFn := func(ctx context.Context) bool { return ctx.Value("owner") == ownerProver } - matchAggregatorCtxFn := func(ctx context.Context) bool { return ctx.Value("owner") == ownerAggregator } - batchNum := uint64(23) - batchNumFinal := uint64(42) - proof1 := state.Proof{ - Proof: "proof1", - BatchNumber: batchNum, - } - proof2 := state.Proof{ - Proof: "proof2", - BatchNumberFinal: batchNumFinal, - } - testCases := []struct { - name string - setup func(mox, *Aggregator) - asserts func(bool, *Aggregator, error) - }{ - { - name: "getAndLockProofsToAggregate returns generic error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(nil, nil, errTest).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "getAndLockProofsToAggregate returns ErrNotFound", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(nil, nil, state.ErrNotFound).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.NoError(err) - }, - }, - { - name: "getAndLockProofsToAggregate error updating proofs", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - m.txerMock.On("Rollback").Return(nil).Once() - m.storageMock.On("BeginTx", mock.Anything, (*sql.TxOptions)(nil)).Return(m.txerMock, nil).Once() - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(&proof1, &proof2, nil).Once() - - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(errTest). - Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - - { - name: "AggregatedProof error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - - lockProofsTxBegin := m.storageMock.On("BeginTx", mock.MatchedBy(matchProverCtxFn), (*sql.TxOptions)(nil)).Return(m.txerMock, nil).Once() - // lockProofsTxCommit := m.proverMock.On("Commit").Return(nil).Once() - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(&proof1, &proof2, nil).Once() - proof1GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - proof2GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - // Use a type assertion with a check - proofArg, ok := args[1].(*state.Proof) - if !ok { - assert.Fail("Expected argument of type *state.Proof") - } - assert.NotNil(proofArg.GeneratingSince) - }). - Return(nil). - Once() - m.proverMock.On("AggregatedProof", proof1.Proof, proof2.Proof).Return(nil, errTest).Once() - m.storageMock.On("BeginTx", mock.Anything, (*sql.TxOptions)(nil)).Return(m.txerMock, nil).Once().NotBefore(lockProofsTxBegin) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - if !ok { - assert.Fail("Expected argument of type *state.Proof") - } - assert.Nil(proofArg.GeneratingSince) - }). - Return(nil). - Once(). - NotBefore(proof1GeneratingTrueCall) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - if !ok { - assert.Fail("Expected argument of type *state.Proof") - } - assert.Nil(proofArg.GeneratingSince, "Expected GeneratingSince to be nil") - }). - Return(nil). - Once(). - NotBefore(proof2GeneratingTrueCall) - m.txerMock.On("Commit").Return(nil) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - - { - name: "WaitRecursiveProof prover error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - lockProofsTxBegin := m.storageMock.On("BeginTx", mock.MatchedBy(matchProverCtxFn), (*sql.TxOptions)(nil)).Return(m.txerMock, nil).Once() - // lockProofsTxCommit := dbTx.On("Commit", mock.MatchedBy(matchProverCtxFn)).Return(nil).Once() - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(&proof1, &proof2, nil).Once() - proof1GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - if !ok { - assert.Fail("Expected argument of type *state.Proof") - } - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - proof2GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - m.proverMock.On("AggregatedProof", proof1.Proof, proof2.Proof).Return(&proofID, nil).Once() - - m.proverMock.On("WaitRecursiveProof", mock.MatchedBy(matchProverCtxFn), proofID).Return("", common.Hash{}, common.Hash{}, errTest).Once() - m.storageMock.On("BeginTx", mock.MatchedBy(matchAggregatorCtxFn), (*sql.TxOptions)(nil)).Return(m.txerMock, nil).Once().NotBefore(lockProofsTxBegin) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.Nil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once(). - NotBefore(proof1GeneratingTrueCall) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.Nil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once(). - NotBefore(proof2GeneratingTrueCall) - m.txerMock.On("Commit").Return(nil) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - - { - name: "unlockProofsToAggregate error after WaitRecursiveProof prover error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return(proverID) - // lockProofsTxBegin := m.storageMock.On("BeginTx", mock.MatchedBy(matchProverCtxFn)).Return(m.txerMock, nil).Once() - m.txerMock.On("Commit").Return(nil) - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(&proof1, &proof2, nil).Once() - proof1GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - m.proverMock.On("AggregatedProof", proof1.Proof, proof2.Proof).Return(&proofID, nil).Once() - m.proverMock.On("WaitRecursiveProof", mock.MatchedBy(matchProverCtxFn), proofID).Return("", common.Hash{}, common.Hash{}, errTest).Once() - m.storageMock.On("BeginTx", mock.Anything, (*sql.TxOptions)(nil)).Return(m.txerMock, nil) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.Nil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(errTest). - Once(). - NotBefore(proof1GeneratingTrueCall) - m.txerMock.On("Rollback").Return(nil).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "rollback after DeleteGeneratedProofs error in db transaction", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - // lockProofsTxBegin := m.storageMock.On("BeginTx", mock.MatchedBy(matchProverCtxFn)).Return(dbTx, nil).Twice() - // lockProofsTxCommit := dbTx.On("Commit", mock.MatchedBy(matchProverCtxFn)).Return(nil).Once() - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(&proof1, &proof2, nil).Once() - proof1GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - proof2GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - m.proverMock.On("AggregatedProof", proof1.Proof, proof2.Proof).Return(&proofID, nil).Once() - m.proverMock.On("WaitRecursiveProof", mock.MatchedBy(matchProverCtxFn), proofID).Return(recursiveProof, common.Hash{}, common.Hash{}, nil).Once() - m.storageMock.On("DeleteGeneratedProofs", mock.MatchedBy(matchProverCtxFn), proof1.BatchNumber, proof2.BatchNumberFinal, mock.Anything).Return(errTest).Once() - m.txerMock.On("Rollback").Return(nil).Once() - m.storageMock.On("BeginTx", mock.Anything, (*sql.TxOptions)(nil)).Return(m.txerMock, nil) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.Nil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once(). - NotBefore(proof1GeneratingTrueCall) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.Nil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once(). - NotBefore(proof2GeneratingTrueCall) - m.txerMock.On("Commit").Return(nil) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - - { - name: "rollback after AddGeneratedProof error in db transaction", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - // lockProofsTxBegin := m.storageMock.On("BeginTx", mock.MatchedBy(matchProverCtxFn)).Return(dbTx, nil).Twice() - // lockProofsTxCommit := dbTx.On("Commit", mock.MatchedBy(matchProverCtxFn)).Return(nil).Once() - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(&proof1, &proof2, nil).Once() - proof1GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - proof2GeneratingTrueCall := m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - m.proverMock.On("AggregatedProof", proof1.Proof, proof2.Proof).Return(&proofID, nil).Once() - m.proverMock.On("WaitRecursiveProof", mock.MatchedBy(matchProverCtxFn), proofID).Return(recursiveProof, common.Hash{}, common.Hash{}, nil).Once() - m.storageMock.On("DeleteGeneratedProofs", mock.MatchedBy(matchProverCtxFn), proof1.BatchNumber, proof2.BatchNumberFinal, mock.Anything).Return(nil).Once() - m.storageMock.On("AddGeneratedProof", mock.MatchedBy(matchProverCtxFn), mock.Anything, mock.Anything).Return(errTest).Once() - m.txerMock.On("Rollback").Return(nil).Once() - m.storageMock.On("BeginTx", mock.Anything, (*sql.TxOptions)(nil)).Return(m.txerMock, nil) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.Nil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once(). - NotBefore(proof1GeneratingTrueCall) - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.Nil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once(). - NotBefore(proof2GeneratingTrueCall) - m.txerMock.On("Commit").Return(nil) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "time to send final, state error", - setup: func(m mox, a *Aggregator) { - a.accInputHashes = make(map[uint64]common.Hash) - a.cfg.VerifyProofInterval = types.Duration{Duration: time.Nanosecond} - m.proverMock.On("Name").Return(proverName).Times(3) - m.proverMock.On("ID").Return(proverID).Times(3) - m.proverMock.On("Addr").Return("addr") - m.storageMock.On("BeginTx", mock.Anything, (*sql.TxOptions)(nil)).Return(m.txerMock, nil) - m.txerMock.On("Commit").Return(nil) - m.storageMock.On("GetProofsToAggregate", mock.MatchedBy(matchProverCtxFn), nil).Return(&proof1, &proof2, nil).Once() - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof1, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - m.storageMock. - On("UpdateGeneratedProof", mock.MatchedBy(matchProverCtxFn), &proof2, mock.Anything). - Run(func(args mock.Arguments) { - proofArg, ok := args[1].(*state.Proof) - assert.True(ok, "Expected argument of type *state.Proof") - assert.NotNil(proofArg.GeneratingSince, "Expected GeneratingSince to be not nil") - }). - Return(nil). - Once() - - m.proverMock.On("AggregatedProof", proof1.Proof, proof2.Proof).Return(&proofID, nil).Once() - m.proverMock.On("WaitRecursiveProof", mock.MatchedBy(matchProverCtxFn), proofID).Return(recursiveProof, common.Hash{}, common.Hash{}, nil).Once() - m.storageMock.On("DeleteGeneratedProofs", mock.MatchedBy(matchProverCtxFn), proof1.BatchNumber, proof2.BatchNumberFinal, m.txerMock).Return(nil).Once() - expectedInputProver := map[string]interface{}{ - "recursive_proof_1": proof1.Proof, - "recursive_proof_2": proof2.Proof, - } - b, err := json.Marshal(expectedInputProver) - require.NoError(err) - m.storageMock.On("AddGeneratedProof", mock.MatchedBy(matchProverCtxFn), mock.Anything, mock.Anything).Run( - func(args mock.Arguments) { - proof, ok := args[1].(*state.Proof) - if !ok { - t.Fatalf("expected args[1] to be of type *state.Proof, got %T", args[1]) - } - assert.Equal(proof1.BatchNumber, proof.BatchNumber) - assert.Equal(proof2.BatchNumberFinal, proof.BatchNumberFinal) - assert.Equal(&proverName, proof.Prover) - assert.Equal(&proverID, proof.ProverID) - assert.Equal(string(b), proof.InputProver) - assert.Equal(recursiveProof, proof.Proof) - assert.InDelta(time.Now().Unix(), proof.GeneratingSince.Unix(), float64(time.Second)) - }, - ).Return(nil).Once() - - m.etherman.On("GetLatestVerifiedBatchNum").Return(uint64(42), errTest).Once() - m.storageMock.On("UpdateGeneratedProof", mock.MatchedBy(matchAggregatorCtxFn), mock.Anything, nil).Run( - func(args mock.Arguments) { - proof, ok := args[1].(*state.Proof) - if !ok { - t.Fatalf("expected args[1] to be of type *state.Proof, got %T", args[1]) - } - assert.Equal(proof1.BatchNumber, proof.BatchNumber) - assert.Equal(proof2.BatchNumberFinal, proof.BatchNumberFinal) - assert.Equal(&proverName, proof.Prover) - assert.Equal(&proverID, proof.ProverID) - assert.Equal(string(b), proof.InputProver) - assert.Equal(recursiveProof, proof.Proof) - assert.Nil(proof.GeneratingSince) - }, - ).Return(nil).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.True(result) - assert.NoError(err) - }, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - storageMock := mocks.NewStorageInterfaceMock(t) - ethTxManager := mocks.NewEthTxManagerClientMock(t) - etherman := mocks.NewEthermanMock(t) - proverMock := mocks.NewProverInterfaceMock(t) - txerMock := mocks.NewTxerMock(t) - a := Aggregator{ - cfg: cfg, - storage: storageMock, - etherman: etherman, - ethTxManager: ethTxManager, - logger: log.GetDefaultLogger(), - storageMutex: &sync.Mutex{}, - timeSendFinalProofMutex: &sync.RWMutex{}, - timeCleanupLockedProofs: cfg.CleanupLockedProofsInterval, - finalProof: make(chan finalProofMsg), - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - aggregatorCtx := context.WithValue(context.Background(), "owner", ownerAggregator) //nolint:staticcheck - a.ctx, a.exit = context.WithCancel(aggregatorCtx) - m := mox{ - storageMock: storageMock, - ethTxManager: ethTxManager, - etherman: etherman, - proverMock: proverMock, - txerMock: txerMock, - } - if tc.setup != nil { - tc.setup(m, &a) - } - a.resetVerifyProofTime() - - result, err := a.tryAggregateProofs(proverCtx, proverMock) - - if tc.asserts != nil { - tc.asserts(result, &a, err) - } - }) - } -} - -func Test_tryGenerateBatchProof(t *testing.T) { - require := require.New(t) - assert := assert.New(t) - from := common.BytesToAddress([]byte("from")) - cfg := Config{ - VerifyProofInterval: types.Duration{Duration: time.Duration(10000000)}, - SenderAddress: from.Hex(), - IntervalAfterWhichBatchConsolidateAnyway: types.Duration{Duration: time.Second * 1}, - ChainID: uint64(1), - ForkId: uint64(12), - BatchProofSanityCheckEnabled: true, - } - lastVerifiedBatchNum := uint64(22) - - batchNum := uint64(23) - - batchToProve := state.Batch{ - BatchNumber: batchNum, - } - - proofID := "proofId" - - proverName := "proverName" - proverID := "proverID" - errTest := errors.New("test error") - errAIH := fmt.Errorf("failed to build input prover, acc input hash for previous batch (22) is not in memory") - proverCtx := context.WithValue(context.Background(), "owner", ownerProver) //nolint:staticcheck - matchProverCtxFn := func(ctx context.Context) bool { return ctx.Value("owner") == ownerProver } - matchAggregatorCtxFn := func(ctx context.Context) bool { return ctx.Value("owner") == ownerAggregator } - fixedTimestamp := time.Date(2023, 10, 13, 15, 0, 0, 0, time.UTC) - - l1InfoTreeLeaf := []synchronizer.L1InfoTreeLeaf{ - { - GlobalExitRoot: common.Hash{}, - PreviousBlockHash: common.Hash{}, - Timestamp: fixedTimestamp, - }, - { - GlobalExitRoot: common.Hash{}, - PreviousBlockHash: common.Hash{}, - Timestamp: fixedTimestamp, - }, - { - GlobalExitRoot: common.Hash{}, - PreviousBlockHash: common.Hash{}, - Timestamp: fixedTimestamp, - }, - { - GlobalExitRoot: common.Hash{}, - PreviousBlockHash: common.Hash{}, - Timestamp: fixedTimestamp, - }, - } - - testCases := []struct { - name string - setup func(mox, *Aggregator) - asserts func(bool, *Aggregator, error) - }{ - { - name: "getAndLockBatchToProve returns AIH error", - setup: func(m mox, a *Aggregator) { - sequence := synchronizer.SequencedBatches{ - FromBatchNumber: uint64(1), - ToBatchNumber: uint64(2), - } - l1InfoRoot := common.HexToHash("0x057e9950fbd39b002e323f37c2330d0c096e66919e24cc96fb4b2dfa8f4af782") - - virtualBatch := synchronizer.VirtualBatch{ - BatchNumber: 1, - BatchL2Data: []byte{ - 0xb, 0x0, 0x0, 0x0, 0x7b, 0x0, 0x0, 0x1, 0xc8, 0xb, 0x0, 0x0, 0x3, 0x15, 0x0, 0x1, 0x8a, 0xf8, - }, - L1InfoRoot: &l1InfoRoot, - } - rpcBatch := rpctypes.NewRPCBatch(lastVerifiedBatchNum+1, common.Hash{}, []string{}, []byte("batchL2Data"), common.Hash{}, common.BytesToHash([]byte("mock LocalExitRoot")), common.BytesToHash([]byte("mock StateRoot")), common.Address{}, false) - - m.proverMock.On("Name").Return(proverName) - m.proverMock.On("ID").Return(proverID) - m.proverMock.On("Addr").Return("addr") - m.etherman.On("GetLatestVerifiedBatchNum").Return(uint64(0), nil) - m.storageMock.On("CheckProofExistsForBatch", mock.Anything, uint64(1), nil).Return(false, nil) - m.synchronizerMock.On("GetSequenceByBatchNumber", mock.Anything, mock.Anything).Return(&sequence, nil) - m.synchronizerMock.On("GetVirtualBatchByBatchNumber", mock.Anything, mock.Anything).Return(&virtualBatch, nil) - m.synchronizerMock.On("GetL1BlockByNumber", mock.Anything, mock.Anything).Return(&synchronizer.L1Block{ParentHash: common.Hash{}}, nil) - m.rpcMock.On("GetBatch", mock.Anything).Return(rpcBatch, nil) - m.rpcMock.On("GetWitness", mock.Anything, false).Return([]byte("witness"), nil) - m.storageMock.On("AddGeneratedProof", mock.Anything, mock.Anything, nil).Return(nil) - m.storageMock.On("AddSequence", mock.Anything, mock.Anything, nil).Return(nil) - m.storageMock.On("DeleteGeneratedProofs", mock.Anything, uint64(1), uint64(1), nil).Return(nil) - m.synchronizerMock.On("GetLeafsByL1InfoRoot", mock.Anything, l1InfoRoot).Return(l1InfoTreeLeaf, nil) - m.synchronizerMock.On("GetL1InfoTreeLeaves", mock.Anything, mock.Anything).Return(map[uint32]synchronizer.L1InfoTreeLeaf{ - 1: { - BlockNumber: uint64(1), - }, - }, nil) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorContains(err, errAIH.Error()) - }, - }, - { - name: "getAndLockBatchToProve returns generic error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - m.etherman.On("GetLatestVerifiedBatchNum").Return(uint64(0), errTest).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "getAndLockBatchToProve returns ErrNotFound", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr") - m.etherman.On("GetLatestVerifiedBatchNum").Return(uint64(0), state.ErrNotFound).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.NoError(err) - }, - }, - { - name: "BatchProof prover error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr").Twice() - - batchL2Data, err := hex.DecodeString(codedL2Block1) - require.NoError(err) - l1InfoRoot := common.HexToHash("0x057e9950fbd39b002e323f37c2330d0c096e66919e24cc96fb4b2dfa8f4af782") - - virtualBatch := synchronizer.VirtualBatch{ - BatchNumber: lastVerifiedBatchNum + 1, - BatchL2Data: batchL2Data, - L1InfoRoot: &l1InfoRoot, - } - - m.synchronizerMock.On("GetVirtualBatchByBatchNumber", mock.Anything, mock.Anything).Return(&virtualBatch, nil).Once() - m.etherman.On("GetLatestVerifiedBatchNum").Return(lastVerifiedBatchNum, nil).Once() - m.storageMock.On("CheckProofExistsForBatch", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Return(true, nil) - m.storageMock.On("CleanupGeneratedProofs", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() - sequence := synchronizer.SequencedBatches{ - FromBatchNumber: uint64(10), - ToBatchNumber: uint64(20), - } - m.synchronizerMock.On("GetSequenceByBatchNumber", mock.Anything, mock.Anything).Return(&sequence, nil) - - rpcBatch := rpctypes.NewRPCBatch(lastVerifiedBatchNum+1, common.Hash{}, []string{}, batchL2Data, common.Hash{}, common.BytesToHash([]byte("mock LocalExitRoot")), common.BytesToHash([]byte("mock StateRoot")), common.Address{}, false) - rpcBatch.SetLastL2BLockTimestamp(uint64(time.Now().Unix())) - m.rpcMock.On("GetWitness", mock.Anything, false).Return([]byte("witness"), nil) - m.rpcMock.On("GetBatch", mock.Anything).Return(rpcBatch, nil) - m.storageMock.On("AddSequence", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Return(nil).Once() - m.storageMock.On("AddGeneratedProof", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Run( - func(args mock.Arguments) { - proof, ok := args[1].(*state.Proof) - if !ok { - t.Fatalf("expected args[1] to be of type *state.Proof, got %T", args[1]) - } - assert.Equal(batchToProve.BatchNumber, proof.BatchNumber) - assert.Equal(batchToProve.BatchNumber, proof.BatchNumberFinal) - assert.Equal(&proverName, proof.Prover) - assert.Equal(&proverID, proof.ProverID) - assert.InDelta(time.Now().Unix(), proof.GeneratingSince.Unix(), float64(time.Second)) - }, - ).Return(nil).Once() - m.synchronizerMock.On("GetLeafsByL1InfoRoot", mock.Anything, l1InfoRoot).Return(l1InfoTreeLeaf, nil) - m.synchronizerMock.On("GetL1InfoTreeLeaves", mock.Anything, mock.Anything).Return(map[uint32]synchronizer.L1InfoTreeLeaf{ - 1: { - BlockNumber: uint64(35), - }, - }, nil) - - m.proverMock.On("BatchProof", mock.Anything).Return(nil, errTest).Once() - m.storageMock.On("DeleteGeneratedProofs", mock.MatchedBy(matchAggregatorCtxFn), batchToProve.BatchNumber, batchToProve.BatchNumber, nil).Return(nil).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "WaitRecursiveProof prover error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr").Twice() - - batchL2Data, err := hex.DecodeString(codedL2Block1) - require.NoError(err) - l1InfoRoot := common.HexToHash("0x057e9950fbd39b002e323f37c2330d0c096e66919e24cc96fb4b2dfa8f4af782") - - virtualBatch := synchronizer.VirtualBatch{ - BatchNumber: lastVerifiedBatchNum + 1, - BatchL2Data: batchL2Data, - L1InfoRoot: &l1InfoRoot, - } - - m.synchronizerMock.On("GetVirtualBatchByBatchNumber", mock.Anything, lastVerifiedBatchNum+1).Return(&virtualBatch, nil).Once() - - m.etherman.On("GetLatestVerifiedBatchNum").Return(lastVerifiedBatchNum, nil).Once() - m.storageMock.On("CheckProofExistsForBatch", mock.MatchedBy(matchProverCtxFn), mock.AnythingOfType("uint64"), nil).Return(false, nil).Once() - sequence := synchronizer.SequencedBatches{ - FromBatchNumber: uint64(10), - ToBatchNumber: uint64(20), - } - m.synchronizerMock.On("GetSequenceByBatchNumber", mock.MatchedBy(matchProverCtxFn), lastVerifiedBatchNum+1).Return(&sequence, nil).Once() - rpcBatch := rpctypes.NewRPCBatch(lastVerifiedBatchNum+1, common.Hash{}, []string{}, batchL2Data, common.Hash{}, common.BytesToHash([]byte("mock LocalExitRoot")), common.BytesToHash([]byte("mock StateRoot")), common.Address{}, false) - rpcBatch.SetLastL2BLockTimestamp(uint64(time.Now().Unix())) - m.rpcMock.On("GetWitness", lastVerifiedBatchNum+1, false).Return([]byte("witness"), nil) - m.storageMock.On("AddSequence", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Return(nil).Once() - m.storageMock.On("AddGeneratedProof", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Run( - func(args mock.Arguments) { - proof, ok := args[1].(*state.Proof) - if !ok { - t.Fatalf("expected args[1] to be of type *state.Proof, got %T", args[1]) - } - assert.Equal(batchToProve.BatchNumber, proof.BatchNumber) - assert.Equal(batchToProve.BatchNumber, proof.BatchNumberFinal) - assert.Equal(&proverName, proof.Prover) - assert.Equal(&proverID, proof.ProverID) - assert.InDelta(time.Now().Unix(), proof.GeneratingSince.Unix(), float64(time.Second)) - }, - ).Return(nil).Once() - - m.synchronizerMock.On("GetLeafsByL1InfoRoot", mock.Anything, l1InfoRoot).Return(l1InfoTreeLeaf, nil) - m.synchronizerMock.On("GetL1InfoTreeLeaves", mock.Anything, mock.Anything).Return(map[uint32]synchronizer.L1InfoTreeLeaf{ - 1: { - BlockNumber: uint64(35), - }, - }, nil) - - m.rpcMock.On("GetBatch", lastVerifiedBatchNum+1).Return(rpcBatch, nil) - m.proverMock.On("BatchProof", mock.Anything).Return(&proofID, nil).Once() - m.proverMock.On("WaitRecursiveProof", mock.MatchedBy(matchProverCtxFn), proofID).Return("", common.Hash{}, common.Hash{}, errTest).Once() - m.storageMock.On("DeleteGeneratedProofs", mock.MatchedBy(matchAggregatorCtxFn), batchToProve.BatchNumber, batchToProve.BatchNumber, nil).Return(nil) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - { - name: "WaitRecursiveProof no error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName) - m.proverMock.On("ID").Return(proverID) - m.proverMock.On("Addr").Return("addr") - - batchL2Data, err := hex.DecodeString(codedL2Block1) - require.NoError(err) - l1InfoRoot := common.HexToHash("0x057e9950fbd39b002e323f37c2330d0c096e66919e24cc96fb4b2dfa8f4af782") - - virtualBatch := synchronizer.VirtualBatch{ - BatchNumber: lastVerifiedBatchNum + 1, - BatchL2Data: batchL2Data, - L1InfoRoot: &l1InfoRoot, - } - - m.synchronizerMock.On("GetVirtualBatchByBatchNumber", mock.Anything, lastVerifiedBatchNum+1).Return(&virtualBatch, nil) - - m.etherman.On("GetLatestVerifiedBatchNum").Return(lastVerifiedBatchNum, nil) - m.storageMock.On("CheckProofExistsForBatch", mock.MatchedBy(matchProverCtxFn), mock.AnythingOfType("uint64"), nil).Return(false, nil) - sequence := synchronizer.SequencedBatches{ - FromBatchNumber: uint64(10), - ToBatchNumber: uint64(20), - } - m.synchronizerMock.On("GetSequenceByBatchNumber", mock.MatchedBy(matchProverCtxFn), lastVerifiedBatchNum+1).Return(&sequence, nil) - rpcBatch := rpctypes.NewRPCBatch(lastVerifiedBatchNum+1, common.Hash{}, []string{}, batchL2Data, common.Hash{}, common.BytesToHash([]byte("mock LocalExitRoot")), common.BytesToHash([]byte("mock StateRoot")), common.Address{}, false) - rpcBatch.SetLastL2BLockTimestamp(uint64(time.Now().Unix())) - m.rpcMock.On("GetWitness", lastVerifiedBatchNum+1, false).Return([]byte("witness"), nil) - m.storageMock.On("AddSequence", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Return(nil) - m.storageMock.On("AddGeneratedProof", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Run( - func(args mock.Arguments) { - proof, ok := args[1].(*state.Proof) - if !ok { - t.Fatalf("expected args[1] to be of type *state.Proof, got %T", args[1]) - } - assert.Equal(batchToProve.BatchNumber, proof.BatchNumber) - assert.Equal(batchToProve.BatchNumber, proof.BatchNumberFinal) - assert.Equal(&proverName, proof.Prover) - assert.Equal(&proverID, proof.ProverID) - assert.InDelta(time.Now().Unix(), proof.GeneratingSince.Unix(), float64(time.Second)) - }, - ).Return(nil) - - m.synchronizerMock.On("GetLeafsByL1InfoRoot", mock.Anything, l1InfoRoot).Return(l1InfoTreeLeaf, nil) - m.synchronizerMock.On("GetL1InfoTreeLeaves", mock.Anything, mock.Anything).Return(map[uint32]synchronizer.L1InfoTreeLeaf{ - 1: { - BlockNumber: uint64(35), - }, - }, nil) - - m.rpcMock.On("GetBatch", lastVerifiedBatchNum+1).Return(rpcBatch, nil) - m.proverMock.On("BatchProof", mock.Anything).Return(&proofID, nil) - m.proverMock.On("WaitRecursiveProof", mock.MatchedBy(matchProverCtxFn), proofID).Return("", common.Hash{}, common.Hash{}, nil) - m.storageMock.On("UpdateGeneratedProof", mock.Anything, mock.Anything, nil).Return(nil) - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.True(result) - assert.NoError(err) - }, - }, - { - name: "DeleteBatchProofs error after WaitRecursiveProof prover error", - setup: func(m mox, a *Aggregator) { - m.proverMock.On("Name").Return(proverName).Twice() - m.proverMock.On("ID").Return(proverID).Twice() - m.proverMock.On("Addr").Return("addr").Twice() - - batchL2Data, err := hex.DecodeString(codedL2Block1) - require.NoError(err) - l1InfoRoot := common.HexToHash("0x057e9950fbd39b002e323f37c2330d0c096e66919e24cc96fb4b2dfa8f4af782") - - m.etherman.On("GetLatestVerifiedBatchNum").Return(lastVerifiedBatchNum, nil).Once() - m.storageMock.On("CheckProofExistsForBatch", mock.MatchedBy(matchProverCtxFn), mock.AnythingOfType("uint64"), nil).Return(false, nil).Once() - sequence := synchronizer.SequencedBatches{ - FromBatchNumber: uint64(10), - ToBatchNumber: uint64(20), - } - m.synchronizerMock.On("GetSequenceByBatchNumber", mock.MatchedBy(matchProverCtxFn), lastVerifiedBatchNum+1).Return(&sequence, nil).Once() - rpcBatch := rpctypes.NewRPCBatch(lastVerifiedBatchNum+1, common.Hash{}, []string{}, batchL2Data, common.Hash{}, common.BytesToHash([]byte("mock LocalExitRoot")), common.BytesToHash([]byte("mock StateRoot")), common.Address{}, false) - rpcBatch.SetLastL2BLockTimestamp(uint64(time.Now().Unix())) - m.rpcMock.On("GetBatch", lastVerifiedBatchNum+1).Return(rpcBatch, nil) - m.storageMock.On("AddSequence", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Return(nil).Once() - m.storageMock.On("AddGeneratedProof", mock.MatchedBy(matchProverCtxFn), mock.Anything, nil).Run( - func(args mock.Arguments) { - proof, ok := args[1].(*state.Proof) - if !ok { - t.Fatalf("expected args[1] to be of type *state.Proof, got %T", args[1]) - } - assert.Equal(batchToProve.BatchNumber, proof.BatchNumber) - assert.Equal(batchToProve.BatchNumber, proof.BatchNumberFinal) - assert.Equal(&proverName, proof.Prover) - assert.Equal(&proverID, proof.ProverID) - assert.InDelta(time.Now().Unix(), proof.GeneratingSince.Unix(), float64(time.Second)) - }, - ).Return(nil).Once() - - m.synchronizerMock.On("GetLeafsByL1InfoRoot", mock.Anything, l1InfoRoot).Return(l1InfoTreeLeaf, nil) - m.synchronizerMock.On("GetL1InfoTreeLeaves", mock.Anything, mock.Anything).Return(map[uint32]synchronizer.L1InfoTreeLeaf{ - 1: { - BlockNumber: uint64(35), - }, - }, nil) - - m.rpcMock.On("GetWitness", lastVerifiedBatchNum+1, false).Return([]byte("witness"), nil) - - virtualBatch := synchronizer.VirtualBatch{ - BatchNumber: lastVerifiedBatchNum + 1, - BatchL2Data: batchL2Data, - L1InfoRoot: &l1InfoRoot, - } - - m.synchronizerMock.On("GetVirtualBatchByBatchNumber", mock.Anything, lastVerifiedBatchNum+1).Return(&virtualBatch, nil).Once() - - m.proverMock.On("BatchProof", mock.Anything).Return(&proofID, nil).Once() - m.proverMock.On("WaitRecursiveProof", mock.MatchedBy(matchProverCtxFn), proofID).Return("", common.Hash{}, common.Hash{}, errTest).Once() - m.storageMock.On("DeleteGeneratedProofs", mock.MatchedBy(matchAggregatorCtxFn), batchToProve.BatchNumber, batchToProve.BatchNumber, nil).Return(errTest).Once() - }, - asserts: func(result bool, a *Aggregator, err error) { - assert.False(result) - assert.ErrorIs(err, errTest) - }, - }, - } - - for x, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - storageMock := mocks.NewStorageInterfaceMock(t) - ethTxManager := mocks.NewEthTxManagerClientMock(t) - etherman := mocks.NewEthermanMock(t) - proverMock := mocks.NewProverInterfaceMock(t) - synchronizerMock := mocks.NewSynchronizerInterfaceMock(t) - mockRPC := mocks.NewRPCInterfaceMock(t) - - a := Aggregator{ - cfg: cfg, - storage: storageMock, - etherman: etherman, - ethTxManager: ethTxManager, - logger: log.GetDefaultLogger(), - storageMutex: &sync.Mutex{}, - timeSendFinalProofMutex: &sync.RWMutex{}, - timeCleanupLockedProofs: cfg.CleanupLockedProofsInterval, - finalProof: make(chan finalProofMsg), - l1Syncr: synchronizerMock, - rpcClient: mockRPC, - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - if x > 0 { - a.accInputHashes = populateAccInputHashes() - } - aggregatorCtx := context.WithValue(context.Background(), "owner", ownerAggregator) //nolint:staticcheck - a.ctx, a.exit = context.WithCancel(aggregatorCtx) - - m := mox{ - storageMock: storageMock, - ethTxManager: ethTxManager, - etherman: etherman, - proverMock: proverMock, - synchronizerMock: synchronizerMock, - rpcMock: mockRPC, - } - if tc.setup != nil { - tc.setup(m, &a) - } - a.resetVerifyProofTime() - - result, err := a.tryGenerateBatchProof(proverCtx, proverMock) - - if tc.asserts != nil { - tc.asserts(result, &a, err) - } - }) - } -} - -func populateAccInputHashes() map[uint64]common.Hash { - accInputHashes := make(map[uint64]common.Hash) - for i := 10; i < 200; i++ { - accInputHashes[uint64(i)] = common.BytesToHash([]byte(fmt.Sprintf("hash%d", i))) - } - return accInputHashes -} - -func Test_accInputHashFunctions(t *testing.T) { - aggregator := Aggregator{ - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - - hash1 := common.BytesToHash([]byte("hash1")) - hash2 := common.BytesToHash([]byte("hash2")) - - aggregator.setAccInputHash(1, hash1) - aggregator.setAccInputHash(2, hash2) - - assert.Equal(t, 2, len(aggregator.accInputHashes)) - - hash3 := aggregator.getAccInputHash(1) - assert.Equal(t, hash1, hash3) - - aggregator.removeAccInputHashes(1, 2) - assert.Equal(t, 0, len(aggregator.accInputHashes)) -} - -func Test_sanityChecks(t *testing.T) { - batchToProve := state.Batch{ - BatchNumber: 1, - StateRoot: common.HexToHash("0x01"), - AccInputHash: common.HexToHash("0x02"), - } - - aggregator := Aggregator{ - accInputHashes: make(map[uint64]common.Hash), - accInputHashesMutex: &sync.Mutex{}, - } - - aggregator.performSanityChecks(log.GetDefaultLogger(), batchToProve.StateRoot, batchToProve.AccInputHash, &batchToProve) - - // Halt by SR sanity check - go func() { - aggregator.performSanityChecks(log.GetDefaultLogger(), common.HexToHash("0x03"), batchToProve.AccInputHash, &batchToProve) - time.Sleep(5 * time.Second) - return - }() - - // Halt by AIH sanity check - go func() { - aggregator.performSanityChecks(log.GetDefaultLogger(), batchToProve.StateRoot, common.HexToHash("0x04"), &batchToProve) - time.Sleep(5 * time.Second) - return - }() -} diff --git a/aggregator/config.go b/aggregator/config.go deleted file mode 100644 index 9ccb3b0f..00000000 --- a/aggregator/config.go +++ /dev/null @@ -1,139 +0,0 @@ -package aggregator - -import ( - "fmt" - "math/big" - - "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" - syncronizerConfig "github.com/0xPolygonHermez/zkevm-synchronizer-l1/config" - "github.com/agglayer/aggkit/config/types" - "github.com/agglayer/aggkit/log" -) - -// SettlementBackend is the type of the settlement backend -type SettlementBackend string - -const ( - // AggLayer settlement backend - AggLayer SettlementBackend = "agglayer" - - // L1 settlement backend - L1 SettlementBackend = "l1" - - // TenToThePowerOf18 represents 1000000000000000000 - TenToThePowerOf18 = 1000000000000000000 -) - -// TokenAmountWithDecimals is a wrapper type that parses token amount with decimals to big int -type TokenAmountWithDecimals struct { - *big.Int `validate:"required"` -} - -// UnmarshalText unmarshal token amount from float string to big int -func (t *TokenAmountWithDecimals) UnmarshalText(data []byte) error { - amount, ok := new(big.Float).SetString(string(data)) - if !ok { - return fmt.Errorf("failed to unmarshal string to float") - } - coin := new(big.Float).SetInt(big.NewInt(TenToThePowerOf18)) - bigval := new(big.Float).Mul(amount, coin) - result := new(big.Int) - bigval.Int(result) - t.Int = result - - return nil -} - -// Config represents the configuration of the aggregator -type Config struct { - // Host for the grpc server - Host string `mapstructure:"Host"` - // Port for the grpc server - Port int `mapstructure:"Port"` - - // RetryTime is the time the aggregator main loop sleeps if there are no proofs to aggregate - // or batches to generate proofs. It is also used in the isSynced loop - RetryTime types.Duration `mapstructure:"RetryTime"` - - // VerifyProofInterval is the interval of time to verify/send an proof in L1 - VerifyProofInterval types.Duration `mapstructure:"VerifyProofInterval"` - - // ProofStatePollingInterval is the interval time to polling the prover about the generation state of a proof - ProofStatePollingInterval types.Duration `mapstructure:"ProofStatePollingInterval"` - - // IntervalAfterWhichBatchConsolidateAnyway is the interval duration for the main sequencer to check - // if there are no transactions. If there are no transactions in this interval, the sequencer will - // consolidate the batch anyway. - IntervalAfterWhichBatchConsolidateAnyway types.Duration `mapstructure:"IntervalAfterWhichBatchConsolidateAnyway"` - - // BatchProofSanityCheckEnabled is a flag to enable the sanity check of the batch proof - BatchProofSanityCheckEnabled bool `mapstructure:"BatchProofSanityCheckEnabled"` - - // ChainID is the L2 ChainID provided by the Network Config - ChainID uint64 - - // ForkID is the L2 ForkID provided by the Network Config - ForkId uint64 `mapstructure:"ForkId"` //nolint:stylecheck - - // SenderAddress defines which private key the eth tx manager needs to use - // to sign the L1 txs - SenderAddress string `mapstructure:"SenderAddress"` - - // CleanupLockedProofsInterval is the interval of time to clean up locked proofs. - CleanupLockedProofsInterval types.Duration `mapstructure:"CleanupLockedProofsInterval"` - - // GeneratingProofCleanupThreshold represents the time interval after - // which a proof in generating state is considered to be stuck and - // allowed to be cleared. - GeneratingProofCleanupThreshold string `mapstructure:"GeneratingProofCleanupThreshold"` - - // GasOffset is the amount of gas to be added to the gas estimation in order - // to provide an amount that is higher than the estimated one. This is used - // to avoid the TX getting reverted in case something has changed in the network - // state after the estimation which can cause the TX to require more gas to be - // executed. - // - // ex: - // gas estimation: 1000 - // gas offset: 100 - // final gas: 1100 - GasOffset uint64 `mapstructure:"GasOffset"` - - // RPCURL is the URL of the RPC server - RPCURL string `mapstructure:"RPCURL"` - - // WitnessURL is the URL of the witness server - WitnessURL string `mapstructure:"WitnessURL"` - - // UseFullWitness is a flag to enable the use of full witness in the aggregator - UseFullWitness bool `mapstructure:"UseFullWitness"` - - // DBPath is the path to the database - DBPath string `mapstructure:"DBPath"` - - // EthTxManager is the config for the ethtxmanager - EthTxManager ethtxmanager.Config `mapstructure:"EthTxManager"` - - // Log is the log configuration - Log log.Config `mapstructure:"Log"` - - // Synchornizer config - Synchronizer syncronizerConfig.Config `mapstructure:"Synchronizer"` - - // SettlementBackend configuration defines how a final ZKP should be settled. - // It can be settled directly to L1 or over Agglayer. - SettlementBackend SettlementBackend `mapstructure:"SettlementBackend" jsonschema:"enum=agglayer,enum=l1"` - - // SequencerPrivateKey Private key of the trusted sequencer - SequencerPrivateKey types.KeystoreFileConfig `mapstructure:"SequencerPrivateKey"` - - // AggLayerTxTimeout is the interval time to wait for a tx to be mined from the agglayer - AggLayerTxTimeout types.Duration `mapstructure:"AggLayerTxTimeout"` - - // AggLayerURL url of the agglayer service - AggLayerURL string `mapstructure:"AggLayerURL"` - - // SyncModeOnlyEnabled is a flag that activates sync mode exclusively. - // When enabled, the aggregator will sync data only from L1 and will not generate or read the data stream. - SyncModeOnlyEnabled bool `mapstructure:"SyncModeOnlyEnabled"` -} diff --git a/aggregator/db/dbstorage/dbstorage.go b/aggregator/db/dbstorage/dbstorage.go deleted file mode 100644 index 05af9aa7..00000000 --- a/aggregator/db/dbstorage/dbstorage.go +++ /dev/null @@ -1,35 +0,0 @@ -package dbstorage - -import ( - "context" - "database/sql" - - "github.com/agglayer/aggkit/db" -) - -// DBStorage implements the Storage interface -type DBStorage struct { - DB *sql.DB -} - -// NewDBStorage creates a new DBStorage instance -func NewDBStorage(dbPath string) (*DBStorage, error) { - db, err := db.NewSQLiteDB(dbPath) - if err != nil { - return nil, err - } - - return &DBStorage{DB: db}, nil -} - -func (d *DBStorage) BeginTx(ctx context.Context, options *sql.TxOptions) (db.Txer, error) { - return db.NewTx(ctx, d.DB) -} - -func (d *DBStorage) getExecQuerier(dbTx db.Txer) db.Querier { - if dbTx == nil { - return d.DB - } - - return dbTx -} diff --git a/aggregator/db/dbstorage/proof.go b/aggregator/db/dbstorage/proof.go deleted file mode 100644 index 969d5e7e..00000000 --- a/aggregator/db/dbstorage/proof.go +++ /dev/null @@ -1,356 +0,0 @@ -package dbstorage - -import ( - "context" - "database/sql" - "errors" - "fmt" - "time" - - "github.com/agglayer/aggkit/db" - "github.com/agglayer/aggkit/state" -) - -// CheckProofExistsForBatch checks if the batch is already included in any proof -func (d *DBStorage) CheckProofExistsForBatch(ctx context.Context, batchNumber uint64, dbTx db.Txer) (bool, error) { - const checkProofExistsForBatchSQL = ` - SELECT EXISTS (SELECT 1 FROM proof p WHERE $1 >= p.batch_num AND $1 <= p.batch_num_final) - ` - e := d.getExecQuerier(dbTx) - var exists bool - err := e.QueryRow(checkProofExistsForBatchSQL, batchNumber).Scan(&exists) - if err != nil && !errors.Is(err, sql.ErrNoRows) { - return exists, err - } - return exists, nil -} - -// CheckProofContainsCompleteSequences checks if a recursive proof contains complete sequences -func (d *DBStorage) CheckProofContainsCompleteSequences( - ctx context.Context, proof *state.Proof, dbTx db.Txer, -) (bool, error) { - const getProofContainsCompleteSequencesSQL = ` - SELECT EXISTS (SELECT 1 FROM sequence s1 WHERE s1.from_batch_num = $1) AND - EXISTS (SELECT 1 FROM sequence s2 WHERE s2.to_batch_num = $2) - ` - e := d.getExecQuerier(dbTx) - var exists bool - err := e.QueryRow(getProofContainsCompleteSequencesSQL, proof.BatchNumber, proof.BatchNumberFinal).Scan(&exists) - if err != nil && !errors.Is(err, sql.ErrNoRows) { - return exists, err - } - return exists, nil -} - -// GetProofReadyToVerify return the proof that is ready to verify -func (d *DBStorage) GetProofReadyToVerify( - ctx context.Context, lastVerfiedBatchNumber uint64, dbTx db.Txer, -) (*state.Proof, error) { - const getProofReadyToVerifySQL = ` - SELECT - p.batch_num, - p.batch_num_final, - p.proof, - p.proof_id, - p.input_prover, - p.prover, - p.prover_id, - p.generating_since, - p.created_at, - p.updated_at - FROM proof p - WHERE batch_num = $1 AND generating_since IS NULL AND - EXISTS (SELECT 1 FROM sequence s1 WHERE s1.from_batch_num = p.batch_num) AND - EXISTS (SELECT 1 FROM sequence s2 WHERE s2.to_batch_num = p.batch_num_final) - ` - - var proof = &state.Proof{} - - e := d.getExecQuerier(dbTx) - row := e.QueryRow(getProofReadyToVerifySQL, lastVerfiedBatchNumber+1) - - var ( - generatingSince *uint64 - createdAt *uint64 - updatedAt *uint64 - ) - err := row.Scan( - &proof.BatchNumber, &proof.BatchNumberFinal, &proof.Proof, &proof.ProofID, - &proof.InputProver, &proof.Prover, &proof.ProverID, &generatingSince, - &createdAt, &updatedAt, - ) - - if generatingSince != nil { - timeSince := time.Unix(int64(*generatingSince), 0) - proof.GeneratingSince = &timeSince - } - - if createdAt != nil { - proof.CreatedAt = time.Unix(int64(*createdAt), 0) - } - - if updatedAt != nil { - proof.UpdatedAt = time.Unix(int64(*updatedAt), 0) - } - - if errors.Is(err, sql.ErrNoRows) { - return nil, state.ErrNotFound - } else if err != nil { - return nil, err - } - - return proof, err -} - -// GetProofsToAggregate return the next to proof that it is possible to aggregate -func (d *DBStorage) GetProofsToAggregate(ctx context.Context, dbTx db.Txer) (*state.Proof, *state.Proof, error) { - var ( - proof1 = &state.Proof{} - proof2 = &state.Proof{} - ) - - // TODO: add comments to explain the query - const getProofsToAggregateSQL = ` - SELECT - p1.batch_num as p1_batch_num, - p1.batch_num_final as p1_batch_num_final, - p1.proof as p1_proof, - p1.proof_id as p1_proof_id, - p1.input_prover as p1_input_prover, - p1.prover as p1_prover, - p1.prover_id as p1_prover_id, - p1.generating_since as p1_generating_since, - p1.created_at as p1_created_at, - p1.updated_at as p1_updated_at, - p2.batch_num as p2_batch_num, - p2.batch_num_final as p2_batch_num_final, - p2.proof as p2_proof, - p2.proof_id as p2_proof_id, - p2.input_prover as p2_input_prover, - p2.prover as p2_prover, - p2.prover_id as p2_prover_id, - p2.generating_since as p2_generating_since, - p2.created_at as p2_created_at, - p2.updated_at as p2_updated_at - FROM proof p1 INNER JOIN proof p2 ON p1.batch_num_final = p2.batch_num - 1 - WHERE p1.generating_since IS NULL AND p2.generating_since IS NULL AND - p1.proof IS NOT NULL AND p2.proof IS NOT NULL AND - ( - EXISTS ( - SELECT 1 FROM sequence s - WHERE p1.batch_num >= s.from_batch_num AND p1.batch_num <= s.to_batch_num AND - p1.batch_num_final >= s.from_batch_num AND p1.batch_num_final <= s.to_batch_num AND - p2.batch_num >= s.from_batch_num AND p2.batch_num <= s.to_batch_num AND - p2.batch_num_final >= s.from_batch_num AND p2.batch_num_final <= s.to_batch_num - ) - OR - ( - EXISTS ( SELECT 1 FROM sequence s WHERE p1.batch_num = s.from_batch_num) AND - EXISTS ( SELECT 1 FROM sequence s WHERE p1.batch_num_final = s.to_batch_num) AND - EXISTS ( SELECT 1 FROM sequence s WHERE p2.batch_num = s.from_batch_num) AND - EXISTS ( SELECT 1 FROM sequence s WHERE p2.batch_num_final = s.to_batch_num) - ) - ) - ORDER BY p1.batch_num ASC - LIMIT 1 - ` - - e := d.getExecQuerier(dbTx) - row := e.QueryRow(getProofsToAggregateSQL) - - var ( - generatingSince1, generatingSince2 *uint64 - createdAt1, createdAt2 *uint64 - updatedAt1, updatedAt2 *uint64 - ) - - err := row.Scan( - &proof1.BatchNumber, &proof1.BatchNumberFinal, &proof1.Proof, &proof1.ProofID, - &proof1.InputProver, &proof1.Prover, &proof1.ProverID, &generatingSince1, - &createdAt1, &updatedAt1, - &proof2.BatchNumber, &proof2.BatchNumberFinal, &proof2.Proof, &proof2.ProofID, - &proof2.InputProver, &proof2.Prover, &proof2.ProverID, &generatingSince2, - &createdAt2, &updatedAt2, - ) - - if generatingSince1 != nil { - timeSince1 := time.Unix(int64(*generatingSince1), 0) - proof1.GeneratingSince = &timeSince1 - } - - if generatingSince2 != nil { - timeSince2 := time.Unix(int64(*generatingSince2), 0) - proof2.GeneratingSince = &timeSince2 - } - - if createdAt1 != nil { - proof1.CreatedAt = time.Unix(int64(*createdAt1), 0) - } - - if createdAt2 != nil { - proof2.CreatedAt = time.Unix(int64(*createdAt2), 0) - } - - if updatedAt1 != nil { - proof1.UpdatedAt = time.Unix(int64(*updatedAt1), 0) - } - - if updatedAt2 != nil { - proof2.UpdatedAt = time.Unix(int64(*updatedAt2), 0) - } - - if errors.Is(err, sql.ErrNoRows) { - return nil, nil, state.ErrNotFound - } else if err != nil { - return nil, nil, err - } - - return proof1, proof2, err -} - -// AddGeneratedProof adds a generated proof to the storage -func (d *DBStorage) AddGeneratedProof(ctx context.Context, proof *state.Proof, dbTx db.Txer) error { - const addGeneratedProofSQL = ` - INSERT INTO proof ( - batch_num, batch_num_final, proof, proof_id, input_prover, prover, - prover_id, generating_since, created_at, updated_at - ) VALUES ( - $1, $2, $3, $4, $5, $6, $7, $8, $9, $10 - ) - ` - e := d.getExecQuerier(dbTx) - now := time.Now().UTC().Round(time.Microsecond) - - var ( - generatingSince *uint64 - createdAt *uint64 - updatedAt *uint64 - ) - - if proof.GeneratingSince != nil { - generatingSince = new(uint64) - *generatingSince = uint64(proof.GeneratingSince.Unix()) - } - - if !proof.CreatedAt.IsZero() { - createdAt = new(uint64) - *createdAt = uint64(proof.CreatedAt.Unix()) - } else { - createdAt = new(uint64) - *createdAt = uint64(now.Unix()) - } - - if !proof.UpdatedAt.IsZero() { - updatedAt = new(uint64) - *updatedAt = uint64(proof.UpdatedAt.Unix()) - } else { - updatedAt = new(uint64) - *updatedAt = uint64(now.Unix()) - } - - _, err := e.Exec( - addGeneratedProofSQL, proof.BatchNumber, proof.BatchNumberFinal, proof.Proof, proof.ProofID, - proof.InputProver, proof.Prover, proof.ProverID, generatingSince, createdAt, updatedAt, - ) - return err -} - -// UpdateGeneratedProof updates a generated proof in the storage -func (d *DBStorage) UpdateGeneratedProof(ctx context.Context, proof *state.Proof, dbTx db.Txer) error { - const updateGeneratedProofSQL = ` - UPDATE proof - SET proof = $3, - proof_id = $4, - input_prover = $5, - prover = $6, - prover_id = $7, - generating_since = $8, - updated_at = $9 - WHERE batch_num = $1 - AND batch_num_final = $2 - ` - e := d.getExecQuerier(dbTx) - now := time.Now().UTC().Round(time.Microsecond) - - var ( - generatingSince *uint64 - updatedAt *uint64 - ) - - if proof.GeneratingSince != nil { - generatingSince = new(uint64) - *generatingSince = uint64(proof.GeneratingSince.Unix()) - } - - if !proof.UpdatedAt.IsZero() { - updatedAt = new(uint64) - *updatedAt = uint64(proof.UpdatedAt.Unix()) - } else { - updatedAt = new(uint64) - *updatedAt = uint64(now.Unix()) - } - _, err := e.Exec( - updateGeneratedProofSQL, proof.Proof, proof.ProofID, proof.InputProver, - proof.Prover, proof.ProverID, generatingSince, updatedAt, proof.BatchNumber, proof.BatchNumberFinal, - ) - return err -} - -// DeleteGeneratedProofs deletes from the storage the generated proofs falling -// inside the batch numbers range. -func (d *DBStorage) DeleteGeneratedProofs( - ctx context.Context, batchNumber uint64, batchNumberFinal uint64, dbTx db.Txer, -) error { - const deleteGeneratedProofSQL = "DELETE FROM proof WHERE batch_num >= $1 AND batch_num_final <= $2" - e := d.getExecQuerier(dbTx) - _, err := e.Exec(deleteGeneratedProofSQL, batchNumber, batchNumberFinal) - return err -} - -// CleanupGeneratedProofs deletes from the storage the generated proofs up to -// the specified batch number included. -func (d *DBStorage) CleanupGeneratedProofs(ctx context.Context, batchNumber uint64, dbTx db.Txer) error { - const deleteGeneratedProofSQL = "DELETE FROM proof WHERE batch_num_final <= $1" - e := d.getExecQuerier(dbTx) - _, err := e.Exec(deleteGeneratedProofSQL, batchNumber) - return err -} - -// CleanupLockedProofs deletes from the storage the proofs locked in generating -// state for more than the provided threshold. -func (d *DBStorage) CleanupLockedProofs(ctx context.Context, duration string, dbTx db.Txer) (int64, error) { - seconds, err := convertDurationToSeconds(duration) - if err != nil { - return 0, err - } - - difference := time.Now().Unix() - seconds - - sql := fmt.Sprintf("DELETE FROM proof WHERE generating_since is not null and generating_since < %d", difference) - e := d.getExecQuerier(dbTx) - ct, err := e.Exec(sql) - if err != nil { - return 0, err - } - return ct.RowsAffected() -} - -// DeleteUngeneratedProofs deletes ungenerated proofs. -// This method is meant to be use during aggregator boot-up sequence -func (d *DBStorage) DeleteUngeneratedProofs(ctx context.Context, dbTx db.Txer) error { - const deleteUngeneratedProofsSQL = "DELETE FROM proof WHERE generating_since IS NOT NULL" - e := d.getExecQuerier(dbTx) - _, err := e.Exec(deleteUngeneratedProofsSQL) - return err -} - -func convertDurationToSeconds(duration string) (int64, error) { - // Parse the duration using time.ParseDuration - parsedDuration, err := time.ParseDuration(duration) - if err != nil { - return 0, fmt.Errorf("invalid duration format: %w", err) - } - - // Return the duration in seconds - return int64(parsedDuration.Seconds()), nil -} diff --git a/aggregator/db/dbstorage/proof_test.go b/aggregator/db/dbstorage/proof_test.go deleted file mode 100644 index 18408318..00000000 --- a/aggregator/db/dbstorage/proof_test.go +++ /dev/null @@ -1,151 +0,0 @@ -package dbstorage - -import ( - "context" - "math" - "path" - "testing" - "time" - - "github.com/agglayer/aggkit/aggregator/db" - "github.com/agglayer/aggkit/state" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -var ( - proofID = "proof_1" - prover = "prover_1" - proverID = "prover_id" -) - -func Test_Proof(t *testing.T) { - dbPath := path.Join(t.TempDir(), "Test_Proof.sqlite") - err := db.RunMigrationsUp(dbPath, db.AggregatorMigrationName) - assert.NoError(t, err) - - ctx := context.Background() - now := time.Now() - - DBStorage, err := NewDBStorage(dbPath) - assert.NoError(t, err) - - dbtxer, err := DBStorage.BeginTx(ctx, nil) - require.NoError(t, err) - - exists, err := DBStorage.CheckProofExistsForBatch(ctx, 1, dbtxer) - assert.NoError(t, err) - assert.False(t, exists) - - proof := state.Proof{ - BatchNumber: 1, - BatchNumberFinal: 1, - Proof: "proof content", - InputProver: "input prover", - ProofID: &proofID, - Prover: &prover, - ProverID: &proofID, - GeneratingSince: nil, - CreatedAt: now, - UpdatedAt: now, - } - - err = DBStorage.AddGeneratedProof(ctx, &proof, dbtxer) - assert.NoError(t, err) - - err = DBStorage.AddSequence(ctx, state.Sequence{FromBatchNumber: 1, ToBatchNumber: 1}, dbtxer) - assert.NoError(t, err) - - contains, err := DBStorage.CheckProofContainsCompleteSequences(ctx, &proof, dbtxer) - assert.NoError(t, err) - assert.True(t, contains) - - proof2, err := DBStorage.GetProofReadyToVerify(ctx, 0, dbtxer) - assert.NoError(t, err) - assert.NotNil(t, proof2) - - require.Equal(t, proof.BatchNumber, proof2.BatchNumber) - require.Equal(t, proof.BatchNumberFinal, proof2.BatchNumberFinal) - require.Equal(t, proof.Proof, proof2.Proof) - require.Equal(t, *proof.ProofID, *proof2.ProofID) - require.Equal(t, proof.InputProver, proof2.InputProver) - require.Equal(t, *proof.Prover, *proof2.Prover) - require.Equal(t, *proof.ProverID, *proof2.ProverID) - require.Equal(t, proof.CreatedAt.Unix(), proof2.CreatedAt.Unix()) - require.Equal(t, proof.UpdatedAt.Unix(), proof2.UpdatedAt.Unix()) - - proof = state.Proof{ - BatchNumber: 1, - BatchNumberFinal: 1, - Proof: "proof content", - InputProver: "input prover", - ProofID: &proofID, - Prover: &prover, - ProverID: &proofID, - GeneratingSince: &now, - CreatedAt: time.Now(), - UpdatedAt: time.Now(), - } - - err = DBStorage.UpdateGeneratedProof(ctx, &proof, dbtxer) - assert.NoError(t, err) - - sequence := state.Sequence{FromBatchNumber: 3, ToBatchNumber: 4} - - proof3 := state.Proof{ - BatchNumber: 3, - BatchNumberFinal: 3, - GeneratingSince: nil, - } - - proof4 := state.Proof{ - BatchNumber: 4, - BatchNumberFinal: 4, - GeneratingSince: nil, - } - - err = DBStorage.AddSequence(ctx, sequence, dbtxer) - assert.NoError(t, err) - - err = DBStorage.AddGeneratedProof(ctx, &proof3, dbtxer) - assert.NoError(t, err) - - err = DBStorage.AddGeneratedProof(ctx, &proof4, dbtxer) - assert.NoError(t, err) - - proof5, proof6, err := DBStorage.GetProofsToAggregate(ctx, dbtxer) - assert.NoError(t, err) - assert.NotNil(t, proof5) - assert.NotNil(t, proof6) - - err = DBStorage.DeleteGeneratedProofs(ctx, 1, math.MaxInt, dbtxer) - assert.NoError(t, err) - - err = DBStorage.CleanupGeneratedProofs(ctx, 1, dbtxer) - assert.NoError(t, err) - - now = time.Now() - - proof3.GeneratingSince = &now - proof4.GeneratingSince = &now - - err = DBStorage.AddGeneratedProof(ctx, &proof3, dbtxer) - assert.NoError(t, err) - - err = DBStorage.AddGeneratedProof(ctx, &proof4, dbtxer) - assert.NoError(t, err) - - time.Sleep(5 * time.Second) - - affected, err := DBStorage.CleanupLockedProofs(ctx, "4s", dbtxer) - assert.NoError(t, err) - require.Equal(t, int64(2), affected) - - proof5, proof6, err = DBStorage.GetProofsToAggregate(ctx, dbtxer) - assert.EqualError(t, err, state.ErrNotFound.Error()) - assert.Nil(t, proof5) - assert.Nil(t, proof6) - - err = DBStorage.DeleteUngeneratedProofs(ctx, dbtxer) - assert.NoError(t, err) -} diff --git a/aggregator/db/dbstorage/sequence.go b/aggregator/db/dbstorage/sequence.go deleted file mode 100644 index f8a4ff03..00000000 --- a/aggregator/db/dbstorage/sequence.go +++ /dev/null @@ -1,21 +0,0 @@ -package dbstorage - -import ( - "context" - - "github.com/agglayer/aggkit/db" - "github.com/agglayer/aggkit/state" -) - -// AddSequence stores the sequence information to allow the aggregator verify sequences. -func (d *DBStorage) AddSequence(ctx context.Context, sequence state.Sequence, dbTx db.Txer) error { - const addSequenceSQL = ` - INSERT INTO sequence (from_batch_num, to_batch_num) - VALUES($1, $2) - ON CONFLICT (from_batch_num) DO UPDATE SET to_batch_num = $2 - ` - - e := d.getExecQuerier(dbTx) - _, err := e.Exec(addSequenceSQL, sequence.FromBatchNumber, sequence.ToBatchNumber) - return err -} diff --git a/aggregator/db/logger.go b/aggregator/db/logger.go deleted file mode 100644 index 441a54fc..00000000 --- a/aggregator/db/logger.go +++ /dev/null @@ -1,27 +0,0 @@ -package db - -import ( - "context" - "fmt" - - "github.com/agglayer/aggkit/log" - "github.com/jackc/pgx/v4" -) - -type dbLoggerImpl struct{} - -func (l dbLoggerImpl) Log(ctx context.Context, level pgx.LogLevel, msg string, data map[string]interface{}) { - m := fmt.Sprintf("%s %v", msg, data) - - switch level { - case pgx.LogLevelInfo: - log.Info(m) - case pgx.LogLevelWarn: - log.Warn(m) - case pgx.LogLevelError: - log.Error(m) - default: - m = fmt.Sprintf("[%s] %s %v", level.String(), msg, data) - log.Debug(m) - } -} diff --git a/aggregator/db/migrations.go b/aggregator/db/migrations.go deleted file mode 100644 index 695eb0c9..00000000 --- a/aggregator/db/migrations.go +++ /dev/null @@ -1,122 +0,0 @@ -package db - -import ( - "embed" - "fmt" - - "github.com/agglayer/aggkit/db" - "github.com/agglayer/aggkit/log" - migrate "github.com/rubenv/sql-migrate" -) - -const ( - // AggregatorMigrationName is the name of the migration used to associate with the migrations dir - AggregatorMigrationName = "aggregator-db" -) - -var ( - //go:embed migrations/*.sql - embedAggregatorMigrations embed.FS - - // embedMigrations is a map of migrations with the name - embedMigrations = map[string]embed.FS{} -) - -func init() { - embedMigrations[AggregatorMigrationName] = embedAggregatorMigrations -} - -// RunMigrationsUp runs migrate-up for the given config. -func RunMigrationsUp(dbPath string, name string) error { - log.Info("running migrations up") - - return runMigrations(dbPath, name, migrate.Up) -} - -// CheckMigrations runs migrate-up for the given config. -func CheckMigrations(dbPath string, name string) error { - return checkMigrations(dbPath, name) -} - -// RunMigrationsDown runs migrate-down for the given config. -func RunMigrationsDown(dbPath string, name string) error { - log.Info("running migrations down") - - return runMigrations(dbPath, name, migrate.Down) -} - -// runMigrations will execute pending migrations if needed to keep -// the database updated with the latest changes in either direction, -// up or down. -func runMigrations(dbPath string, name string, direction migrate.MigrationDirection) error { - db, err := db.NewSQLiteDB(dbPath) - if err != nil { - return err - } - - embedMigration, ok := embedMigrations[name] - if !ok { - return fmt.Errorf("migration not found with name: %v", name) - } - - var migrations = &migrate.EmbedFileSystemMigrationSource{ - FileSystem: embedMigration, - Root: "migrations", - } - - nMigrations, err := migrate.Exec(db, "sqlite3", migrations, direction) - if err != nil { - return err - } - - log.Info("successfully ran ", nMigrations, " migrations") - - return nil -} - -func checkMigrations(dbPath string, name string) error { - db, err := db.NewSQLiteDB(dbPath) - if err != nil { - return err - } - - embedMigration, ok := embedMigrations[name] - if !ok { - return fmt.Errorf("migration not found with name: %v", name) - } - - migrationSource := &migrate.EmbedFileSystemMigrationSource{FileSystem: embedMigration} - - migrations, err := migrationSource.FindMigrations() - if err != nil { - log.Errorf("error getting migrations from source: %v", err) - - return err - } - - var expected int - for _, migration := range migrations { - if len(migration.Up) != 0 { - expected++ - } - } - - var actual int - query := `SELECT COUNT(1) FROM public.gorp_migrations` - err = db.QueryRow(query).Scan(&actual) - if err != nil { - log.Error("error getting migrations count: ", err) - - return err - } - if expected == actual { - log.Infof("Found %d migrations as expected", actual) - } else { - return fmt.Errorf( - "error the component needs to run %d migrations before starting. DB only contains %d migrations", - expected, actual, - ) - } - - return nil -} diff --git a/aggregator/db/migrations/0001.sql b/aggregator/db/migrations/0001.sql deleted file mode 100644 index 651597a3..00000000 --- a/aggregator/db/migrations/0001.sql +++ /dev/null @@ -1,24 +0,0 @@ --- +migrate Down -DROP TABLE IF EXISTS proof; -DROP TABLE IF EXISTS sequence; - --- +migrate Up -CREATE TABLE IF NOT EXISTS proof ( - batch_num BIGINT NOT NULL, - batch_num_final BIGINT NOT NULL, - proof TEXT NULL, - proof_id TEXT NULL, - input_prover TEXT NULL, - prover TEXT NULL, - prover_id TEXT NULL, - created_at BIGINT NOT NULL, - updated_at BIGINT NOT NULL, - generating_since BIGINT DEFAULT NULL, - PRIMARY KEY (batch_num, batch_num_final) -); - -CREATE TABLE IF NOT EXISTS sequence ( - from_batch_num BIGINT NOT NULL, - to_batch_num BIGINT NOT NULL, - PRIMARY KEY (from_batch_num) -); diff --git a/aggregator/db/migrations_test.go b/aggregator/db/migrations_test.go deleted file mode 100644 index aaf5e08e..00000000 --- a/aggregator/db/migrations_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package db - -import ( - "path" - "testing" - - migrate "github.com/rubenv/sql-migrate" - "github.com/stretchr/testify/assert" -) - -func Test_checkMigrations(t *testing.T) { - embedMigration := embedMigrations[AggregatorMigrationName] - migrationSource := &migrate.EmbedFileSystemMigrationSource{ - FileSystem: embedMigration, - } - - _, err := migrationSource.FileSystem.ReadFile("migrations/0001.sql") - assert.NoError(t, err) -} - -func Test_runMigrations(t *testing.T) { - dbPath := path.Join(t.TempDir(), "Test_runMigrations.sqlite") - err := runMigrations(dbPath, AggregatorMigrationName, migrate.Up) - assert.NoError(t, err) - - err = runMigrations(dbPath, AggregatorMigrationName, migrate.Down) - assert.NoError(t, err) -} diff --git a/aggregator/ethmantypes/finalproofinputs.go b/aggregator/ethmantypes/finalproofinputs.go deleted file mode 100644 index 1dcd98ad..00000000 --- a/aggregator/ethmantypes/finalproofinputs.go +++ /dev/null @@ -1,10 +0,0 @@ -package ethmantypes - -import "github.com/agglayer/aggkit/aggregator/prover" - -// FinalProofInputs struct -type FinalProofInputs struct { - FinalProof *prover.FinalProof - NewLocalExitRoot []byte - NewStateRoot []byte -} diff --git a/aggregator/interfaces.go b/aggregator/interfaces.go deleted file mode 100644 index 7ccce5e3..00000000 --- a/aggregator/interfaces.go +++ /dev/null @@ -1,100 +0,0 @@ -package aggregator - -import ( - "context" - "database/sql" - "math/big" - - "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" - ethtxtypes "github.com/0xPolygon/zkevm-ethtx-manager/types" - ethmanTypes "github.com/agglayer/aggkit/aggregator/ethmantypes" - "github.com/agglayer/aggkit/aggregator/prover" - "github.com/agglayer/aggkit/db" - "github.com/agglayer/aggkit/rpc/types" - "github.com/agglayer/aggkit/state" - "github.com/ethereum/go-ethereum/common" - ethtypes "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/crypto/kzg4844" -) - -// Consumer interfaces required by the package. -type RPCInterface interface { - GetBatch(batchNumber uint64) (*types.RPCBatch, error) - GetWitness(batchNumber uint64, fullWitness bool) ([]byte, error) -} - -type ProverInterface interface { - Name() string - ID() string - Addr() string - IsIdle() (bool, error) - BatchProof(input *prover.StatelessInputProver) (*string, error) - AggregatedProof(inputProof1, inputProof2 string) (*string, error) - FinalProof(inputProof string, aggregatorAddr string) (*string, error) - WaitRecursiveProof(ctx context.Context, proofID string) (string, common.Hash, common.Hash, error) - WaitFinalProof(ctx context.Context, proofID string) (*prover.FinalProof, error) -} - -// Etherman contains the methods required to interact with ethereum -type Etherman interface { - GetRollupId() uint32 - GetLatestVerifiedBatchNum() (uint64, error) - BuildTrustedVerifyBatchesTxData( - lastVerifiedBatch, newVerifiedBatch uint64, inputs *ethmanTypes.FinalProofInputs, beneficiary common.Address, - ) (to *common.Address, data []byte, err error) - GetLatestBlockHeader(ctx context.Context) (*ethtypes.Header, error) - GetBatchAccInputHash(ctx context.Context, batchNumber uint64) (common.Hash, error) - HeaderByNumber(ctx context.Context, number *big.Int) (*ethtypes.Header, error) -} - -// aggregatorTxProfitabilityChecker interface for different profitability -// checking algorithms. -type aggregatorTxProfitabilityChecker interface { - IsProfitable(context.Context, *big.Int) (bool, error) -} - -// StateInterface gathers the methods to interact with the state. -type StorageInterface interface { - BeginTx(ctx context.Context, options *sql.TxOptions) (db.Txer, error) - CheckProofContainsCompleteSequences(ctx context.Context, proof *state.Proof, dbTx db.Txer) (bool, error) - GetProofReadyToVerify(ctx context.Context, lastVerfiedBatchNumber uint64, dbTx db.Txer) (*state.Proof, error) - GetProofsToAggregate(ctx context.Context, dbTx db.Txer) (*state.Proof, *state.Proof, error) - AddGeneratedProof(ctx context.Context, proof *state.Proof, dbTx db.Txer) error - UpdateGeneratedProof(ctx context.Context, proof *state.Proof, dbTx db.Txer) error - DeleteGeneratedProofs(ctx context.Context, batchNumber uint64, batchNumberFinal uint64, dbTx db.Txer) error - DeleteUngeneratedProofs(ctx context.Context, dbTx db.Txer) error - CleanupGeneratedProofs(ctx context.Context, batchNumber uint64, dbTx db.Txer) error - CleanupLockedProofs(ctx context.Context, duration string, dbTx db.Txer) (int64, error) - CheckProofExistsForBatch(ctx context.Context, batchNumber uint64, dbTx db.Txer) (bool, error) - AddSequence(ctx context.Context, sequence state.Sequence, dbTx db.Txer) error -} - -// EthTxManagerClient represents the eth tx manager interface -type EthTxManagerClient interface { - Add( - ctx context.Context, - to *common.Address, - value *big.Int, - data []byte, - gasOffset uint64, - sidecar *ethtypes.BlobTxSidecar, - ) (common.Hash, error) - AddWithGas( - ctx context.Context, - to *common.Address, - value *big.Int, - data []byte, - gasOffset uint64, - sidecar *ethtypes.BlobTxSidecar, - gas uint64, - ) (common.Hash, error) - EncodeBlobData(data []byte) (kzg4844.Blob, error) - MakeBlobSidecar(blobs []kzg4844.Blob) *ethtypes.BlobTxSidecar - ProcessPendingMonitoredTxs(ctx context.Context, resultHandler ethtxmanager.ResultHandler) - Remove(ctx context.Context, id common.Hash) error - RemoveAll(ctx context.Context) error - Result(ctx context.Context, id common.Hash) (ethtxtypes.MonitoredTxResult, error) - ResultsByStatus(ctx context.Context, statuses []ethtxtypes.MonitoredTxStatus) ([]ethtxtypes.MonitoredTxResult, error) - Start() - Stop() -} diff --git a/aggregator/mocks/mock_eth_tx_manager.go b/aggregator/mocks/mock_eth_tx_manager.go deleted file mode 100644 index 47dabe1c..00000000 --- a/aggregator/mocks/mock_eth_tx_manager.go +++ /dev/null @@ -1,587 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - big "math/big" - - common "github.com/ethereum/go-ethereum/common" - - ethtxmanager "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" - - kzg4844 "github.com/ethereum/go-ethereum/crypto/kzg4844" - - mock "github.com/stretchr/testify/mock" - - types "github.com/ethereum/go-ethereum/core/types" - - zkevm_ethtx_managertypes "github.com/0xPolygon/zkevm-ethtx-manager/types" -) - -// EthTxManagerClientMock is an autogenerated mock type for the EthTxManagerClient type -type EthTxManagerClientMock struct { - mock.Mock -} - -type EthTxManagerClientMock_Expecter struct { - mock *mock.Mock -} - -func (_m *EthTxManagerClientMock) EXPECT() *EthTxManagerClientMock_Expecter { - return &EthTxManagerClientMock_Expecter{mock: &_m.Mock} -} - -// Add provides a mock function with given fields: ctx, to, value, data, gasOffset, sidecar -func (_m *EthTxManagerClientMock) Add(ctx context.Context, to *common.Address, value *big.Int, data []byte, gasOffset uint64, sidecar *types.BlobTxSidecar) (common.Hash, error) { - ret := _m.Called(ctx, to, value, data, gasOffset, sidecar) - - if len(ret) == 0 { - panic("no return value specified for Add") - } - - var r0 common.Hash - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar) (common.Hash, error)); ok { - return rf(ctx, to, value, data, gasOffset, sidecar) - } - if rf, ok := ret.Get(0).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar) common.Hash); ok { - r0 = rf(ctx, to, value, data, gasOffset, sidecar) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(common.Hash) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar) error); ok { - r1 = rf(ctx, to, value, data, gasOffset, sidecar) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthTxManagerClientMock_Add_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Add' -type EthTxManagerClientMock_Add_Call struct { - *mock.Call -} - -// Add is a helper method to define mock.On call -// - ctx context.Context -// - to *common.Address -// - value *big.Int -// - data []byte -// - gasOffset uint64 -// - sidecar *types.BlobTxSidecar -func (_e *EthTxManagerClientMock_Expecter) Add(ctx interface{}, to interface{}, value interface{}, data interface{}, gasOffset interface{}, sidecar interface{}) *EthTxManagerClientMock_Add_Call { - return &EthTxManagerClientMock_Add_Call{Call: _e.mock.On("Add", ctx, to, value, data, gasOffset, sidecar)} -} - -func (_c *EthTxManagerClientMock_Add_Call) Run(run func(ctx context.Context, to *common.Address, value *big.Int, data []byte, gasOffset uint64, sidecar *types.BlobTxSidecar)) *EthTxManagerClientMock_Add_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*common.Address), args[2].(*big.Int), args[3].([]byte), args[4].(uint64), args[5].(*types.BlobTxSidecar)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_Add_Call) Return(_a0 common.Hash, _a1 error) *EthTxManagerClientMock_Add_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthTxManagerClientMock_Add_Call) RunAndReturn(run func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar) (common.Hash, error)) *EthTxManagerClientMock_Add_Call { - _c.Call.Return(run) - return _c -} - -// AddWithGas provides a mock function with given fields: ctx, to, value, data, gasOffset, sidecar, gas -func (_m *EthTxManagerClientMock) AddWithGas(ctx context.Context, to *common.Address, value *big.Int, data []byte, gasOffset uint64, sidecar *types.BlobTxSidecar, gas uint64) (common.Hash, error) { - ret := _m.Called(ctx, to, value, data, gasOffset, sidecar, gas) - - if len(ret) == 0 { - panic("no return value specified for AddWithGas") - } - - var r0 common.Hash - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar, uint64) (common.Hash, error)); ok { - return rf(ctx, to, value, data, gasOffset, sidecar, gas) - } - if rf, ok := ret.Get(0).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar, uint64) common.Hash); ok { - r0 = rf(ctx, to, value, data, gasOffset, sidecar, gas) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(common.Hash) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar, uint64) error); ok { - r1 = rf(ctx, to, value, data, gasOffset, sidecar, gas) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthTxManagerClientMock_AddWithGas_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddWithGas' -type EthTxManagerClientMock_AddWithGas_Call struct { - *mock.Call -} - -// AddWithGas is a helper method to define mock.On call -// - ctx context.Context -// - to *common.Address -// - value *big.Int -// - data []byte -// - gasOffset uint64 -// - sidecar *types.BlobTxSidecar -// - gas uint64 -func (_e *EthTxManagerClientMock_Expecter) AddWithGas(ctx interface{}, to interface{}, value interface{}, data interface{}, gasOffset interface{}, sidecar interface{}, gas interface{}) *EthTxManagerClientMock_AddWithGas_Call { - return &EthTxManagerClientMock_AddWithGas_Call{Call: _e.mock.On("AddWithGas", ctx, to, value, data, gasOffset, sidecar, gas)} -} - -func (_c *EthTxManagerClientMock_AddWithGas_Call) Run(run func(ctx context.Context, to *common.Address, value *big.Int, data []byte, gasOffset uint64, sidecar *types.BlobTxSidecar, gas uint64)) *EthTxManagerClientMock_AddWithGas_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*common.Address), args[2].(*big.Int), args[3].([]byte), args[4].(uint64), args[5].(*types.BlobTxSidecar), args[6].(uint64)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_AddWithGas_Call) Return(_a0 common.Hash, _a1 error) *EthTxManagerClientMock_AddWithGas_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthTxManagerClientMock_AddWithGas_Call) RunAndReturn(run func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar, uint64) (common.Hash, error)) *EthTxManagerClientMock_AddWithGas_Call { - _c.Call.Return(run) - return _c -} - -// EncodeBlobData provides a mock function with given fields: data -func (_m *EthTxManagerClientMock) EncodeBlobData(data []byte) (kzg4844.Blob, error) { - ret := _m.Called(data) - - if len(ret) == 0 { - panic("no return value specified for EncodeBlobData") - } - - var r0 kzg4844.Blob - var r1 error - if rf, ok := ret.Get(0).(func([]byte) (kzg4844.Blob, error)); ok { - return rf(data) - } - if rf, ok := ret.Get(0).(func([]byte) kzg4844.Blob); ok { - r0 = rf(data) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(kzg4844.Blob) - } - } - - if rf, ok := ret.Get(1).(func([]byte) error); ok { - r1 = rf(data) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthTxManagerClientMock_EncodeBlobData_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'EncodeBlobData' -type EthTxManagerClientMock_EncodeBlobData_Call struct { - *mock.Call -} - -// EncodeBlobData is a helper method to define mock.On call -// - data []byte -func (_e *EthTxManagerClientMock_Expecter) EncodeBlobData(data interface{}) *EthTxManagerClientMock_EncodeBlobData_Call { - return &EthTxManagerClientMock_EncodeBlobData_Call{Call: _e.mock.On("EncodeBlobData", data)} -} - -func (_c *EthTxManagerClientMock_EncodeBlobData_Call) Run(run func(data []byte)) *EthTxManagerClientMock_EncodeBlobData_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].([]byte)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_EncodeBlobData_Call) Return(_a0 kzg4844.Blob, _a1 error) *EthTxManagerClientMock_EncodeBlobData_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthTxManagerClientMock_EncodeBlobData_Call) RunAndReturn(run func([]byte) (kzg4844.Blob, error)) *EthTxManagerClientMock_EncodeBlobData_Call { - _c.Call.Return(run) - return _c -} - -// MakeBlobSidecar provides a mock function with given fields: blobs -func (_m *EthTxManagerClientMock) MakeBlobSidecar(blobs []kzg4844.Blob) *types.BlobTxSidecar { - ret := _m.Called(blobs) - - if len(ret) == 0 { - panic("no return value specified for MakeBlobSidecar") - } - - var r0 *types.BlobTxSidecar - if rf, ok := ret.Get(0).(func([]kzg4844.Blob) *types.BlobTxSidecar); ok { - r0 = rf(blobs) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.BlobTxSidecar) - } - } - - return r0 -} - -// EthTxManagerClientMock_MakeBlobSidecar_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'MakeBlobSidecar' -type EthTxManagerClientMock_MakeBlobSidecar_Call struct { - *mock.Call -} - -// MakeBlobSidecar is a helper method to define mock.On call -// - blobs []kzg4844.Blob -func (_e *EthTxManagerClientMock_Expecter) MakeBlobSidecar(blobs interface{}) *EthTxManagerClientMock_MakeBlobSidecar_Call { - return &EthTxManagerClientMock_MakeBlobSidecar_Call{Call: _e.mock.On("MakeBlobSidecar", blobs)} -} - -func (_c *EthTxManagerClientMock_MakeBlobSidecar_Call) Run(run func(blobs []kzg4844.Blob)) *EthTxManagerClientMock_MakeBlobSidecar_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].([]kzg4844.Blob)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_MakeBlobSidecar_Call) Return(_a0 *types.BlobTxSidecar) *EthTxManagerClientMock_MakeBlobSidecar_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *EthTxManagerClientMock_MakeBlobSidecar_Call) RunAndReturn(run func([]kzg4844.Blob) *types.BlobTxSidecar) *EthTxManagerClientMock_MakeBlobSidecar_Call { - _c.Call.Return(run) - return _c -} - -// ProcessPendingMonitoredTxs provides a mock function with given fields: ctx, resultHandler -func (_m *EthTxManagerClientMock) ProcessPendingMonitoredTxs(ctx context.Context, resultHandler ethtxmanager.ResultHandler) { - _m.Called(ctx, resultHandler) -} - -// EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ProcessPendingMonitoredTxs' -type EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call struct { - *mock.Call -} - -// ProcessPendingMonitoredTxs is a helper method to define mock.On call -// - ctx context.Context -// - resultHandler ethtxmanager.ResultHandler -func (_e *EthTxManagerClientMock_Expecter) ProcessPendingMonitoredTxs(ctx interface{}, resultHandler interface{}) *EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call { - return &EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call{Call: _e.mock.On("ProcessPendingMonitoredTxs", ctx, resultHandler)} -} - -func (_c *EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call) Run(run func(ctx context.Context, resultHandler ethtxmanager.ResultHandler)) *EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(ethtxmanager.ResultHandler)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call) Return() *EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call { - _c.Call.Return() - return _c -} - -func (_c *EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call) RunAndReturn(run func(context.Context, ethtxmanager.ResultHandler)) *EthTxManagerClientMock_ProcessPendingMonitoredTxs_Call { - _c.Run(run) - return _c -} - -// Remove provides a mock function with given fields: ctx, id -func (_m *EthTxManagerClientMock) Remove(ctx context.Context, id common.Hash) error { - ret := _m.Called(ctx, id) - - if len(ret) == 0 { - panic("no return value specified for Remove") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, common.Hash) error); ok { - r0 = rf(ctx, id) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// EthTxManagerClientMock_Remove_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Remove' -type EthTxManagerClientMock_Remove_Call struct { - *mock.Call -} - -// Remove is a helper method to define mock.On call -// - ctx context.Context -// - id common.Hash -func (_e *EthTxManagerClientMock_Expecter) Remove(ctx interface{}, id interface{}) *EthTxManagerClientMock_Remove_Call { - return &EthTxManagerClientMock_Remove_Call{Call: _e.mock.On("Remove", ctx, id)} -} - -func (_c *EthTxManagerClientMock_Remove_Call) Run(run func(ctx context.Context, id common.Hash)) *EthTxManagerClientMock_Remove_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(common.Hash)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_Remove_Call) Return(_a0 error) *EthTxManagerClientMock_Remove_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *EthTxManagerClientMock_Remove_Call) RunAndReturn(run func(context.Context, common.Hash) error) *EthTxManagerClientMock_Remove_Call { - _c.Call.Return(run) - return _c -} - -// RemoveAll provides a mock function with given fields: ctx -func (_m *EthTxManagerClientMock) RemoveAll(ctx context.Context) error { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for RemoveAll") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context) error); ok { - r0 = rf(ctx) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// EthTxManagerClientMock_RemoveAll_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveAll' -type EthTxManagerClientMock_RemoveAll_Call struct { - *mock.Call -} - -// RemoveAll is a helper method to define mock.On call -// - ctx context.Context -func (_e *EthTxManagerClientMock_Expecter) RemoveAll(ctx interface{}) *EthTxManagerClientMock_RemoveAll_Call { - return &EthTxManagerClientMock_RemoveAll_Call{Call: _e.mock.On("RemoveAll", ctx)} -} - -func (_c *EthTxManagerClientMock_RemoveAll_Call) Run(run func(ctx context.Context)) *EthTxManagerClientMock_RemoveAll_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_RemoveAll_Call) Return(_a0 error) *EthTxManagerClientMock_RemoveAll_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *EthTxManagerClientMock_RemoveAll_Call) RunAndReturn(run func(context.Context) error) *EthTxManagerClientMock_RemoveAll_Call { - _c.Call.Return(run) - return _c -} - -// Result provides a mock function with given fields: ctx, id -func (_m *EthTxManagerClientMock) Result(ctx context.Context, id common.Hash) (zkevm_ethtx_managertypes.MonitoredTxResult, error) { - ret := _m.Called(ctx, id) - - if len(ret) == 0 { - panic("no return value specified for Result") - } - - var r0 zkevm_ethtx_managertypes.MonitoredTxResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, common.Hash) (zkevm_ethtx_managertypes.MonitoredTxResult, error)); ok { - return rf(ctx, id) - } - if rf, ok := ret.Get(0).(func(context.Context, common.Hash) zkevm_ethtx_managertypes.MonitoredTxResult); ok { - r0 = rf(ctx, id) - } else { - r0 = ret.Get(0).(zkevm_ethtx_managertypes.MonitoredTxResult) - } - - if rf, ok := ret.Get(1).(func(context.Context, common.Hash) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthTxManagerClientMock_Result_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Result' -type EthTxManagerClientMock_Result_Call struct { - *mock.Call -} - -// Result is a helper method to define mock.On call -// - ctx context.Context -// - id common.Hash -func (_e *EthTxManagerClientMock_Expecter) Result(ctx interface{}, id interface{}) *EthTxManagerClientMock_Result_Call { - return &EthTxManagerClientMock_Result_Call{Call: _e.mock.On("Result", ctx, id)} -} - -func (_c *EthTxManagerClientMock_Result_Call) Run(run func(ctx context.Context, id common.Hash)) *EthTxManagerClientMock_Result_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(common.Hash)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_Result_Call) Return(_a0 zkevm_ethtx_managertypes.MonitoredTxResult, _a1 error) *EthTxManagerClientMock_Result_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthTxManagerClientMock_Result_Call) RunAndReturn(run func(context.Context, common.Hash) (zkevm_ethtx_managertypes.MonitoredTxResult, error)) *EthTxManagerClientMock_Result_Call { - _c.Call.Return(run) - return _c -} - -// ResultsByStatus provides a mock function with given fields: ctx, statuses -func (_m *EthTxManagerClientMock) ResultsByStatus(ctx context.Context, statuses []zkevm_ethtx_managertypes.MonitoredTxStatus) ([]zkevm_ethtx_managertypes.MonitoredTxResult, error) { - ret := _m.Called(ctx, statuses) - - if len(ret) == 0 { - panic("no return value specified for ResultsByStatus") - } - - var r0 []zkevm_ethtx_managertypes.MonitoredTxResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []zkevm_ethtx_managertypes.MonitoredTxStatus) ([]zkevm_ethtx_managertypes.MonitoredTxResult, error)); ok { - return rf(ctx, statuses) - } - if rf, ok := ret.Get(0).(func(context.Context, []zkevm_ethtx_managertypes.MonitoredTxStatus) []zkevm_ethtx_managertypes.MonitoredTxResult); ok { - r0 = rf(ctx, statuses) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]zkevm_ethtx_managertypes.MonitoredTxResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []zkevm_ethtx_managertypes.MonitoredTxStatus) error); ok { - r1 = rf(ctx, statuses) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthTxManagerClientMock_ResultsByStatus_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ResultsByStatus' -type EthTxManagerClientMock_ResultsByStatus_Call struct { - *mock.Call -} - -// ResultsByStatus is a helper method to define mock.On call -// - ctx context.Context -// - statuses []zkevm_ethtx_managertypes.MonitoredTxStatus -func (_e *EthTxManagerClientMock_Expecter) ResultsByStatus(ctx interface{}, statuses interface{}) *EthTxManagerClientMock_ResultsByStatus_Call { - return &EthTxManagerClientMock_ResultsByStatus_Call{Call: _e.mock.On("ResultsByStatus", ctx, statuses)} -} - -func (_c *EthTxManagerClientMock_ResultsByStatus_Call) Run(run func(ctx context.Context, statuses []zkevm_ethtx_managertypes.MonitoredTxStatus)) *EthTxManagerClientMock_ResultsByStatus_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]zkevm_ethtx_managertypes.MonitoredTxStatus)) - }) - return _c -} - -func (_c *EthTxManagerClientMock_ResultsByStatus_Call) Return(_a0 []zkevm_ethtx_managertypes.MonitoredTxResult, _a1 error) *EthTxManagerClientMock_ResultsByStatus_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthTxManagerClientMock_ResultsByStatus_Call) RunAndReturn(run func(context.Context, []zkevm_ethtx_managertypes.MonitoredTxStatus) ([]zkevm_ethtx_managertypes.MonitoredTxResult, error)) *EthTxManagerClientMock_ResultsByStatus_Call { - _c.Call.Return(run) - return _c -} - -// Start provides a mock function with no fields -func (_m *EthTxManagerClientMock) Start() { - _m.Called() -} - -// EthTxManagerClientMock_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start' -type EthTxManagerClientMock_Start_Call struct { - *mock.Call -} - -// Start is a helper method to define mock.On call -func (_e *EthTxManagerClientMock_Expecter) Start() *EthTxManagerClientMock_Start_Call { - return &EthTxManagerClientMock_Start_Call{Call: _e.mock.On("Start")} -} - -func (_c *EthTxManagerClientMock_Start_Call) Run(run func()) *EthTxManagerClientMock_Start_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *EthTxManagerClientMock_Start_Call) Return() *EthTxManagerClientMock_Start_Call { - _c.Call.Return() - return _c -} - -func (_c *EthTxManagerClientMock_Start_Call) RunAndReturn(run func()) *EthTxManagerClientMock_Start_Call { - _c.Run(run) - return _c -} - -// Stop provides a mock function with no fields -func (_m *EthTxManagerClientMock) Stop() { - _m.Called() -} - -// EthTxManagerClientMock_Stop_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Stop' -type EthTxManagerClientMock_Stop_Call struct { - *mock.Call -} - -// Stop is a helper method to define mock.On call -func (_e *EthTxManagerClientMock_Expecter) Stop() *EthTxManagerClientMock_Stop_Call { - return &EthTxManagerClientMock_Stop_Call{Call: _e.mock.On("Stop")} -} - -func (_c *EthTxManagerClientMock_Stop_Call) Run(run func()) *EthTxManagerClientMock_Stop_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *EthTxManagerClientMock_Stop_Call) Return() *EthTxManagerClientMock_Stop_Call { - _c.Call.Return() - return _c -} - -func (_c *EthTxManagerClientMock_Stop_Call) RunAndReturn(run func()) *EthTxManagerClientMock_Stop_Call { - _c.Run(run) - return _c -} - -// NewEthTxManagerClientMock creates a new instance of EthTxManagerClientMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewEthTxManagerClientMock(t interface { - mock.TestingT - Cleanup(func()) -}) *EthTxManagerClientMock { - mock := &EthTxManagerClientMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/aggregator/mocks/mock_etherman.go b/aggregator/mocks/mock_etherman.go deleted file mode 100644 index d704dd86..00000000 --- a/aggregator/mocks/mock_etherman.go +++ /dev/null @@ -1,389 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - big "math/big" - - common "github.com/ethereum/go-ethereum/common" - - ethmantypes "github.com/agglayer/aggkit/aggregator/ethmantypes" - - mock "github.com/stretchr/testify/mock" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// EthermanMock is an autogenerated mock type for the Etherman type -type EthermanMock struct { - mock.Mock -} - -type EthermanMock_Expecter struct { - mock *mock.Mock -} - -func (_m *EthermanMock) EXPECT() *EthermanMock_Expecter { - return &EthermanMock_Expecter{mock: &_m.Mock} -} - -// BuildTrustedVerifyBatchesTxData provides a mock function with given fields: lastVerifiedBatch, newVerifiedBatch, inputs, beneficiary -func (_m *EthermanMock) BuildTrustedVerifyBatchesTxData(lastVerifiedBatch uint64, newVerifiedBatch uint64, inputs *ethmantypes.FinalProofInputs, beneficiary common.Address) (*common.Address, []byte, error) { - ret := _m.Called(lastVerifiedBatch, newVerifiedBatch, inputs, beneficiary) - - if len(ret) == 0 { - panic("no return value specified for BuildTrustedVerifyBatchesTxData") - } - - var r0 *common.Address - var r1 []byte - var r2 error - if rf, ok := ret.Get(0).(func(uint64, uint64, *ethmantypes.FinalProofInputs, common.Address) (*common.Address, []byte, error)); ok { - return rf(lastVerifiedBatch, newVerifiedBatch, inputs, beneficiary) - } - if rf, ok := ret.Get(0).(func(uint64, uint64, *ethmantypes.FinalProofInputs, common.Address) *common.Address); ok { - r0 = rf(lastVerifiedBatch, newVerifiedBatch, inputs, beneficiary) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*common.Address) - } - } - - if rf, ok := ret.Get(1).(func(uint64, uint64, *ethmantypes.FinalProofInputs, common.Address) []byte); ok { - r1 = rf(lastVerifiedBatch, newVerifiedBatch, inputs, beneficiary) - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).([]byte) - } - } - - if rf, ok := ret.Get(2).(func(uint64, uint64, *ethmantypes.FinalProofInputs, common.Address) error); ok { - r2 = rf(lastVerifiedBatch, newVerifiedBatch, inputs, beneficiary) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 -} - -// EthermanMock_BuildTrustedVerifyBatchesTxData_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BuildTrustedVerifyBatchesTxData' -type EthermanMock_BuildTrustedVerifyBatchesTxData_Call struct { - *mock.Call -} - -// BuildTrustedVerifyBatchesTxData is a helper method to define mock.On call -// - lastVerifiedBatch uint64 -// - newVerifiedBatch uint64 -// - inputs *ethmantypes.FinalProofInputs -// - beneficiary common.Address -func (_e *EthermanMock_Expecter) BuildTrustedVerifyBatchesTxData(lastVerifiedBatch interface{}, newVerifiedBatch interface{}, inputs interface{}, beneficiary interface{}) *EthermanMock_BuildTrustedVerifyBatchesTxData_Call { - return &EthermanMock_BuildTrustedVerifyBatchesTxData_Call{Call: _e.mock.On("BuildTrustedVerifyBatchesTxData", lastVerifiedBatch, newVerifiedBatch, inputs, beneficiary)} -} - -func (_c *EthermanMock_BuildTrustedVerifyBatchesTxData_Call) Run(run func(lastVerifiedBatch uint64, newVerifiedBatch uint64, inputs *ethmantypes.FinalProofInputs, beneficiary common.Address)) *EthermanMock_BuildTrustedVerifyBatchesTxData_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(uint64), args[1].(uint64), args[2].(*ethmantypes.FinalProofInputs), args[3].(common.Address)) - }) - return _c -} - -func (_c *EthermanMock_BuildTrustedVerifyBatchesTxData_Call) Return(to *common.Address, data []byte, err error) *EthermanMock_BuildTrustedVerifyBatchesTxData_Call { - _c.Call.Return(to, data, err) - return _c -} - -func (_c *EthermanMock_BuildTrustedVerifyBatchesTxData_Call) RunAndReturn(run func(uint64, uint64, *ethmantypes.FinalProofInputs, common.Address) (*common.Address, []byte, error)) *EthermanMock_BuildTrustedVerifyBatchesTxData_Call { - _c.Call.Return(run) - return _c -} - -// GetBatchAccInputHash provides a mock function with given fields: ctx, batchNumber -func (_m *EthermanMock) GetBatchAccInputHash(ctx context.Context, batchNumber uint64) (common.Hash, error) { - ret := _m.Called(ctx, batchNumber) - - if len(ret) == 0 { - panic("no return value specified for GetBatchAccInputHash") - } - - var r0 common.Hash - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint64) (common.Hash, error)); ok { - return rf(ctx, batchNumber) - } - if rf, ok := ret.Get(0).(func(context.Context, uint64) common.Hash); ok { - r0 = rf(ctx, batchNumber) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(common.Hash) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint64) error); ok { - r1 = rf(ctx, batchNumber) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthermanMock_GetBatchAccInputHash_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetBatchAccInputHash' -type EthermanMock_GetBatchAccInputHash_Call struct { - *mock.Call -} - -// GetBatchAccInputHash is a helper method to define mock.On call -// - ctx context.Context -// - batchNumber uint64 -func (_e *EthermanMock_Expecter) GetBatchAccInputHash(ctx interface{}, batchNumber interface{}) *EthermanMock_GetBatchAccInputHash_Call { - return &EthermanMock_GetBatchAccInputHash_Call{Call: _e.mock.On("GetBatchAccInputHash", ctx, batchNumber)} -} - -func (_c *EthermanMock_GetBatchAccInputHash_Call) Run(run func(ctx context.Context, batchNumber uint64)) *EthermanMock_GetBatchAccInputHash_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64)) - }) - return _c -} - -func (_c *EthermanMock_GetBatchAccInputHash_Call) Return(_a0 common.Hash, _a1 error) *EthermanMock_GetBatchAccInputHash_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthermanMock_GetBatchAccInputHash_Call) RunAndReturn(run func(context.Context, uint64) (common.Hash, error)) *EthermanMock_GetBatchAccInputHash_Call { - _c.Call.Return(run) - return _c -} - -// GetLatestBlockHeader provides a mock function with given fields: ctx -func (_m *EthermanMock) GetLatestBlockHeader(ctx context.Context) (*types.Header, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for GetLatestBlockHeader") - } - - var r0 *types.Header - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*types.Header, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *types.Header); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Header) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthermanMock_GetLatestBlockHeader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLatestBlockHeader' -type EthermanMock_GetLatestBlockHeader_Call struct { - *mock.Call -} - -// GetLatestBlockHeader is a helper method to define mock.On call -// - ctx context.Context -func (_e *EthermanMock_Expecter) GetLatestBlockHeader(ctx interface{}) *EthermanMock_GetLatestBlockHeader_Call { - return &EthermanMock_GetLatestBlockHeader_Call{Call: _e.mock.On("GetLatestBlockHeader", ctx)} -} - -func (_c *EthermanMock_GetLatestBlockHeader_Call) Run(run func(ctx context.Context)) *EthermanMock_GetLatestBlockHeader_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *EthermanMock_GetLatestBlockHeader_Call) Return(_a0 *types.Header, _a1 error) *EthermanMock_GetLatestBlockHeader_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthermanMock_GetLatestBlockHeader_Call) RunAndReturn(run func(context.Context) (*types.Header, error)) *EthermanMock_GetLatestBlockHeader_Call { - _c.Call.Return(run) - return _c -} - -// GetLatestVerifiedBatchNum provides a mock function with no fields -func (_m *EthermanMock) GetLatestVerifiedBatchNum() (uint64, error) { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GetLatestVerifiedBatchNum") - } - - var r0 uint64 - var r1 error - if rf, ok := ret.Get(0).(func() (uint64, error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() uint64); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(uint64) - } - - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthermanMock_GetLatestVerifiedBatchNum_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLatestVerifiedBatchNum' -type EthermanMock_GetLatestVerifiedBatchNum_Call struct { - *mock.Call -} - -// GetLatestVerifiedBatchNum is a helper method to define mock.On call -func (_e *EthermanMock_Expecter) GetLatestVerifiedBatchNum() *EthermanMock_GetLatestVerifiedBatchNum_Call { - return &EthermanMock_GetLatestVerifiedBatchNum_Call{Call: _e.mock.On("GetLatestVerifiedBatchNum")} -} - -func (_c *EthermanMock_GetLatestVerifiedBatchNum_Call) Run(run func()) *EthermanMock_GetLatestVerifiedBatchNum_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *EthermanMock_GetLatestVerifiedBatchNum_Call) Return(_a0 uint64, _a1 error) *EthermanMock_GetLatestVerifiedBatchNum_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthermanMock_GetLatestVerifiedBatchNum_Call) RunAndReturn(run func() (uint64, error)) *EthermanMock_GetLatestVerifiedBatchNum_Call { - _c.Call.Return(run) - return _c -} - -// GetRollupId provides a mock function with no fields -func (_m *EthermanMock) GetRollupId() uint32 { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GetRollupId") - } - - var r0 uint32 - if rf, ok := ret.Get(0).(func() uint32); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(uint32) - } - - return r0 -} - -// EthermanMock_GetRollupId_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRollupId' -type EthermanMock_GetRollupId_Call struct { - *mock.Call -} - -// GetRollupId is a helper method to define mock.On call -func (_e *EthermanMock_Expecter) GetRollupId() *EthermanMock_GetRollupId_Call { - return &EthermanMock_GetRollupId_Call{Call: _e.mock.On("GetRollupId")} -} - -func (_c *EthermanMock_GetRollupId_Call) Run(run func()) *EthermanMock_GetRollupId_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *EthermanMock_GetRollupId_Call) Return(_a0 uint32) *EthermanMock_GetRollupId_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *EthermanMock_GetRollupId_Call) RunAndReturn(run func() uint32) *EthermanMock_GetRollupId_Call { - _c.Call.Return(run) - return _c -} - -// HeaderByNumber provides a mock function with given fields: ctx, number -func (_m *EthermanMock) HeaderByNumber(ctx context.Context, number *big.Int) (*types.Header, error) { - ret := _m.Called(ctx, number) - - if len(ret) == 0 { - panic("no return value specified for HeaderByNumber") - } - - var r0 *types.Header - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *big.Int) (*types.Header, error)); ok { - return rf(ctx, number) - } - if rf, ok := ret.Get(0).(func(context.Context, *big.Int) *types.Header); ok { - r0 = rf(ctx, number) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Header) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, *big.Int) error); ok { - r1 = rf(ctx, number) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthermanMock_HeaderByNumber_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HeaderByNumber' -type EthermanMock_HeaderByNumber_Call struct { - *mock.Call -} - -// HeaderByNumber is a helper method to define mock.On call -// - ctx context.Context -// - number *big.Int -func (_e *EthermanMock_Expecter) HeaderByNumber(ctx interface{}, number interface{}) *EthermanMock_HeaderByNumber_Call { - return &EthermanMock_HeaderByNumber_Call{Call: _e.mock.On("HeaderByNumber", ctx, number)} -} - -func (_c *EthermanMock_HeaderByNumber_Call) Run(run func(ctx context.Context, number *big.Int)) *EthermanMock_HeaderByNumber_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*big.Int)) - }) - return _c -} - -func (_c *EthermanMock_HeaderByNumber_Call) Return(_a0 *types.Header, _a1 error) *EthermanMock_HeaderByNumber_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthermanMock_HeaderByNumber_Call) RunAndReturn(run func(context.Context, *big.Int) (*types.Header, error)) *EthermanMock_HeaderByNumber_Call { - _c.Call.Return(run) - return _c -} - -// NewEthermanMock creates a new instance of EthermanMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewEthermanMock(t interface { - mock.TestingT - Cleanup(func()) -}) *EthermanMock { - mock := &EthermanMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/aggregator/mocks/mock_prover.go b/aggregator/mocks/mock_prover.go deleted file mode 100644 index 8e8c94e3..00000000 --- a/aggregator/mocks/mock_prover.go +++ /dev/null @@ -1,540 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - prover "github.com/agglayer/aggkit/aggregator/prover" -) - -// ProverInterfaceMock is an autogenerated mock type for the ProverInterface type -type ProverInterfaceMock struct { - mock.Mock -} - -type ProverInterfaceMock_Expecter struct { - mock *mock.Mock -} - -func (_m *ProverInterfaceMock) EXPECT() *ProverInterfaceMock_Expecter { - return &ProverInterfaceMock_Expecter{mock: &_m.Mock} -} - -// Addr provides a mock function with no fields -func (_m *ProverInterfaceMock) Addr() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Addr") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// ProverInterfaceMock_Addr_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Addr' -type ProverInterfaceMock_Addr_Call struct { - *mock.Call -} - -// Addr is a helper method to define mock.On call -func (_e *ProverInterfaceMock_Expecter) Addr() *ProverInterfaceMock_Addr_Call { - return &ProverInterfaceMock_Addr_Call{Call: _e.mock.On("Addr")} -} - -func (_c *ProverInterfaceMock_Addr_Call) Run(run func()) *ProverInterfaceMock_Addr_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *ProverInterfaceMock_Addr_Call) Return(_a0 string) *ProverInterfaceMock_Addr_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ProverInterfaceMock_Addr_Call) RunAndReturn(run func() string) *ProverInterfaceMock_Addr_Call { - _c.Call.Return(run) - return _c -} - -// AggregatedProof provides a mock function with given fields: inputProof1, inputProof2 -func (_m *ProverInterfaceMock) AggregatedProof(inputProof1 string, inputProof2 string) (*string, error) { - ret := _m.Called(inputProof1, inputProof2) - - if len(ret) == 0 { - panic("no return value specified for AggregatedProof") - } - - var r0 *string - var r1 error - if rf, ok := ret.Get(0).(func(string, string) (*string, error)); ok { - return rf(inputProof1, inputProof2) - } - if rf, ok := ret.Get(0).(func(string, string) *string); ok { - r0 = rf(inputProof1, inputProof2) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*string) - } - } - - if rf, ok := ret.Get(1).(func(string, string) error); ok { - r1 = rf(inputProof1, inputProof2) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// ProverInterfaceMock_AggregatedProof_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AggregatedProof' -type ProverInterfaceMock_AggregatedProof_Call struct { - *mock.Call -} - -// AggregatedProof is a helper method to define mock.On call -// - inputProof1 string -// - inputProof2 string -func (_e *ProverInterfaceMock_Expecter) AggregatedProof(inputProof1 interface{}, inputProof2 interface{}) *ProverInterfaceMock_AggregatedProof_Call { - return &ProverInterfaceMock_AggregatedProof_Call{Call: _e.mock.On("AggregatedProof", inputProof1, inputProof2)} -} - -func (_c *ProverInterfaceMock_AggregatedProof_Call) Run(run func(inputProof1 string, inputProof2 string)) *ProverInterfaceMock_AggregatedProof_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(string), args[1].(string)) - }) - return _c -} - -func (_c *ProverInterfaceMock_AggregatedProof_Call) Return(_a0 *string, _a1 error) *ProverInterfaceMock_AggregatedProof_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *ProverInterfaceMock_AggregatedProof_Call) RunAndReturn(run func(string, string) (*string, error)) *ProverInterfaceMock_AggregatedProof_Call { - _c.Call.Return(run) - return _c -} - -// BatchProof provides a mock function with given fields: input -func (_m *ProverInterfaceMock) BatchProof(input *prover.StatelessInputProver) (*string, error) { - ret := _m.Called(input) - - if len(ret) == 0 { - panic("no return value specified for BatchProof") - } - - var r0 *string - var r1 error - if rf, ok := ret.Get(0).(func(*prover.StatelessInputProver) (*string, error)); ok { - return rf(input) - } - if rf, ok := ret.Get(0).(func(*prover.StatelessInputProver) *string); ok { - r0 = rf(input) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*string) - } - } - - if rf, ok := ret.Get(1).(func(*prover.StatelessInputProver) error); ok { - r1 = rf(input) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// ProverInterfaceMock_BatchProof_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BatchProof' -type ProverInterfaceMock_BatchProof_Call struct { - *mock.Call -} - -// BatchProof is a helper method to define mock.On call -// - input *prover.StatelessInputProver -func (_e *ProverInterfaceMock_Expecter) BatchProof(input interface{}) *ProverInterfaceMock_BatchProof_Call { - return &ProverInterfaceMock_BatchProof_Call{Call: _e.mock.On("BatchProof", input)} -} - -func (_c *ProverInterfaceMock_BatchProof_Call) Run(run func(input *prover.StatelessInputProver)) *ProverInterfaceMock_BatchProof_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*prover.StatelessInputProver)) - }) - return _c -} - -func (_c *ProverInterfaceMock_BatchProof_Call) Return(_a0 *string, _a1 error) *ProverInterfaceMock_BatchProof_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *ProverInterfaceMock_BatchProof_Call) RunAndReturn(run func(*prover.StatelessInputProver) (*string, error)) *ProverInterfaceMock_BatchProof_Call { - _c.Call.Return(run) - return _c -} - -// FinalProof provides a mock function with given fields: inputProof, aggregatorAddr -func (_m *ProverInterfaceMock) FinalProof(inputProof string, aggregatorAddr string) (*string, error) { - ret := _m.Called(inputProof, aggregatorAddr) - - if len(ret) == 0 { - panic("no return value specified for FinalProof") - } - - var r0 *string - var r1 error - if rf, ok := ret.Get(0).(func(string, string) (*string, error)); ok { - return rf(inputProof, aggregatorAddr) - } - if rf, ok := ret.Get(0).(func(string, string) *string); ok { - r0 = rf(inputProof, aggregatorAddr) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*string) - } - } - - if rf, ok := ret.Get(1).(func(string, string) error); ok { - r1 = rf(inputProof, aggregatorAddr) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// ProverInterfaceMock_FinalProof_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'FinalProof' -type ProverInterfaceMock_FinalProof_Call struct { - *mock.Call -} - -// FinalProof is a helper method to define mock.On call -// - inputProof string -// - aggregatorAddr string -func (_e *ProverInterfaceMock_Expecter) FinalProof(inputProof interface{}, aggregatorAddr interface{}) *ProverInterfaceMock_FinalProof_Call { - return &ProverInterfaceMock_FinalProof_Call{Call: _e.mock.On("FinalProof", inputProof, aggregatorAddr)} -} - -func (_c *ProverInterfaceMock_FinalProof_Call) Run(run func(inputProof string, aggregatorAddr string)) *ProverInterfaceMock_FinalProof_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(string), args[1].(string)) - }) - return _c -} - -func (_c *ProverInterfaceMock_FinalProof_Call) Return(_a0 *string, _a1 error) *ProverInterfaceMock_FinalProof_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *ProverInterfaceMock_FinalProof_Call) RunAndReturn(run func(string, string) (*string, error)) *ProverInterfaceMock_FinalProof_Call { - _c.Call.Return(run) - return _c -} - -// ID provides a mock function with no fields -func (_m *ProverInterfaceMock) ID() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for ID") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// ProverInterfaceMock_ID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ID' -type ProverInterfaceMock_ID_Call struct { - *mock.Call -} - -// ID is a helper method to define mock.On call -func (_e *ProverInterfaceMock_Expecter) ID() *ProverInterfaceMock_ID_Call { - return &ProverInterfaceMock_ID_Call{Call: _e.mock.On("ID")} -} - -func (_c *ProverInterfaceMock_ID_Call) Run(run func()) *ProverInterfaceMock_ID_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *ProverInterfaceMock_ID_Call) Return(_a0 string) *ProverInterfaceMock_ID_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ProverInterfaceMock_ID_Call) RunAndReturn(run func() string) *ProverInterfaceMock_ID_Call { - _c.Call.Return(run) - return _c -} - -// IsIdle provides a mock function with no fields -func (_m *ProverInterfaceMock) IsIdle() (bool, error) { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for IsIdle") - } - - var r0 bool - var r1 error - if rf, ok := ret.Get(0).(func() (bool, error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() bool); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(bool) - } - - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// ProverInterfaceMock_IsIdle_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'IsIdle' -type ProverInterfaceMock_IsIdle_Call struct { - *mock.Call -} - -// IsIdle is a helper method to define mock.On call -func (_e *ProverInterfaceMock_Expecter) IsIdle() *ProverInterfaceMock_IsIdle_Call { - return &ProverInterfaceMock_IsIdle_Call{Call: _e.mock.On("IsIdle")} -} - -func (_c *ProverInterfaceMock_IsIdle_Call) Run(run func()) *ProverInterfaceMock_IsIdle_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *ProverInterfaceMock_IsIdle_Call) Return(_a0 bool, _a1 error) *ProverInterfaceMock_IsIdle_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *ProverInterfaceMock_IsIdle_Call) RunAndReturn(run func() (bool, error)) *ProverInterfaceMock_IsIdle_Call { - _c.Call.Return(run) - return _c -} - -// Name provides a mock function with no fields -func (_m *ProverInterfaceMock) Name() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Name") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// ProverInterfaceMock_Name_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Name' -type ProverInterfaceMock_Name_Call struct { - *mock.Call -} - -// Name is a helper method to define mock.On call -func (_e *ProverInterfaceMock_Expecter) Name() *ProverInterfaceMock_Name_Call { - return &ProverInterfaceMock_Name_Call{Call: _e.mock.On("Name")} -} - -func (_c *ProverInterfaceMock_Name_Call) Run(run func()) *ProverInterfaceMock_Name_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *ProverInterfaceMock_Name_Call) Return(_a0 string) *ProverInterfaceMock_Name_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ProverInterfaceMock_Name_Call) RunAndReturn(run func() string) *ProverInterfaceMock_Name_Call { - _c.Call.Return(run) - return _c -} - -// WaitFinalProof provides a mock function with given fields: ctx, proofID -func (_m *ProverInterfaceMock) WaitFinalProof(ctx context.Context, proofID string) (*prover.FinalProof, error) { - ret := _m.Called(ctx, proofID) - - if len(ret) == 0 { - panic("no return value specified for WaitFinalProof") - } - - var r0 *prover.FinalProof - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*prover.FinalProof, error)); ok { - return rf(ctx, proofID) - } - if rf, ok := ret.Get(0).(func(context.Context, string) *prover.FinalProof); ok { - r0 = rf(ctx, proofID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*prover.FinalProof) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, proofID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// ProverInterfaceMock_WaitFinalProof_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WaitFinalProof' -type ProverInterfaceMock_WaitFinalProof_Call struct { - *mock.Call -} - -// WaitFinalProof is a helper method to define mock.On call -// - ctx context.Context -// - proofID string -func (_e *ProverInterfaceMock_Expecter) WaitFinalProof(ctx interface{}, proofID interface{}) *ProverInterfaceMock_WaitFinalProof_Call { - return &ProverInterfaceMock_WaitFinalProof_Call{Call: _e.mock.On("WaitFinalProof", ctx, proofID)} -} - -func (_c *ProverInterfaceMock_WaitFinalProof_Call) Run(run func(ctx context.Context, proofID string)) *ProverInterfaceMock_WaitFinalProof_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *ProverInterfaceMock_WaitFinalProof_Call) Return(_a0 *prover.FinalProof, _a1 error) *ProverInterfaceMock_WaitFinalProof_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *ProverInterfaceMock_WaitFinalProof_Call) RunAndReturn(run func(context.Context, string) (*prover.FinalProof, error)) *ProverInterfaceMock_WaitFinalProof_Call { - _c.Call.Return(run) - return _c -} - -// WaitRecursiveProof provides a mock function with given fields: ctx, proofID -func (_m *ProverInterfaceMock) WaitRecursiveProof(ctx context.Context, proofID string) (string, common.Hash, common.Hash, error) { - ret := _m.Called(ctx, proofID) - - if len(ret) == 0 { - panic("no return value specified for WaitRecursiveProof") - } - - var r0 string - var r1 common.Hash - var r2 common.Hash - var r3 error - if rf, ok := ret.Get(0).(func(context.Context, string) (string, common.Hash, common.Hash, error)); ok { - return rf(ctx, proofID) - } - if rf, ok := ret.Get(0).(func(context.Context, string) string); ok { - r0 = rf(ctx, proofID) - } else { - r0 = ret.Get(0).(string) - } - - if rf, ok := ret.Get(1).(func(context.Context, string) common.Hash); ok { - r1 = rf(ctx, proofID) - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).(common.Hash) - } - } - - if rf, ok := ret.Get(2).(func(context.Context, string) common.Hash); ok { - r2 = rf(ctx, proofID) - } else { - if ret.Get(2) != nil { - r2 = ret.Get(2).(common.Hash) - } - } - - if rf, ok := ret.Get(3).(func(context.Context, string) error); ok { - r3 = rf(ctx, proofID) - } else { - r3 = ret.Error(3) - } - - return r0, r1, r2, r3 -} - -// ProverInterfaceMock_WaitRecursiveProof_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WaitRecursiveProof' -type ProverInterfaceMock_WaitRecursiveProof_Call struct { - *mock.Call -} - -// WaitRecursiveProof is a helper method to define mock.On call -// - ctx context.Context -// - proofID string -func (_e *ProverInterfaceMock_Expecter) WaitRecursiveProof(ctx interface{}, proofID interface{}) *ProverInterfaceMock_WaitRecursiveProof_Call { - return &ProverInterfaceMock_WaitRecursiveProof_Call{Call: _e.mock.On("WaitRecursiveProof", ctx, proofID)} -} - -func (_c *ProverInterfaceMock_WaitRecursiveProof_Call) Run(run func(ctx context.Context, proofID string)) *ProverInterfaceMock_WaitRecursiveProof_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *ProverInterfaceMock_WaitRecursiveProof_Call) Return(_a0 string, _a1 common.Hash, _a2 common.Hash, _a3 error) *ProverInterfaceMock_WaitRecursiveProof_Call { - _c.Call.Return(_a0, _a1, _a2, _a3) - return _c -} - -func (_c *ProverInterfaceMock_WaitRecursiveProof_Call) RunAndReturn(run func(context.Context, string) (string, common.Hash, common.Hash, error)) *ProverInterfaceMock_WaitRecursiveProof_Call { - _c.Call.Return(run) - return _c -} - -// NewProverInterfaceMock creates a new instance of ProverInterfaceMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewProverInterfaceMock(t interface { - mock.TestingT - Cleanup(func()) -}) *ProverInterfaceMock { - mock := &ProverInterfaceMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/aggregator/mocks/mock_rpc.go b/aggregator/mocks/mock_rpc.go deleted file mode 100644 index f51ab663..00000000 --- a/aggregator/mocks/mock_rpc.go +++ /dev/null @@ -1,152 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - types "github.com/agglayer/aggkit/rpc/types" - mock "github.com/stretchr/testify/mock" -) - -// RPCInterfaceMock is an autogenerated mock type for the RPCInterface type -type RPCInterfaceMock struct { - mock.Mock -} - -type RPCInterfaceMock_Expecter struct { - mock *mock.Mock -} - -func (_m *RPCInterfaceMock) EXPECT() *RPCInterfaceMock_Expecter { - return &RPCInterfaceMock_Expecter{mock: &_m.Mock} -} - -// GetBatch provides a mock function with given fields: batchNumber -func (_m *RPCInterfaceMock) GetBatch(batchNumber uint64) (*types.RPCBatch, error) { - ret := _m.Called(batchNumber) - - if len(ret) == 0 { - panic("no return value specified for GetBatch") - } - - var r0 *types.RPCBatch - var r1 error - if rf, ok := ret.Get(0).(func(uint64) (*types.RPCBatch, error)); ok { - return rf(batchNumber) - } - if rf, ok := ret.Get(0).(func(uint64) *types.RPCBatch); ok { - r0 = rf(batchNumber) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.RPCBatch) - } - } - - if rf, ok := ret.Get(1).(func(uint64) error); ok { - r1 = rf(batchNumber) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RPCInterfaceMock_GetBatch_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetBatch' -type RPCInterfaceMock_GetBatch_Call struct { - *mock.Call -} - -// GetBatch is a helper method to define mock.On call -// - batchNumber uint64 -func (_e *RPCInterfaceMock_Expecter) GetBatch(batchNumber interface{}) *RPCInterfaceMock_GetBatch_Call { - return &RPCInterfaceMock_GetBatch_Call{Call: _e.mock.On("GetBatch", batchNumber)} -} - -func (_c *RPCInterfaceMock_GetBatch_Call) Run(run func(batchNumber uint64)) *RPCInterfaceMock_GetBatch_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(uint64)) - }) - return _c -} - -func (_c *RPCInterfaceMock_GetBatch_Call) Return(_a0 *types.RPCBatch, _a1 error) *RPCInterfaceMock_GetBatch_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RPCInterfaceMock_GetBatch_Call) RunAndReturn(run func(uint64) (*types.RPCBatch, error)) *RPCInterfaceMock_GetBatch_Call { - _c.Call.Return(run) - return _c -} - -// GetWitness provides a mock function with given fields: batchNumber, fullWitness -func (_m *RPCInterfaceMock) GetWitness(batchNumber uint64, fullWitness bool) ([]byte, error) { - ret := _m.Called(batchNumber, fullWitness) - - if len(ret) == 0 { - panic("no return value specified for GetWitness") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(uint64, bool) ([]byte, error)); ok { - return rf(batchNumber, fullWitness) - } - if rf, ok := ret.Get(0).(func(uint64, bool) []byte); ok { - r0 = rf(batchNumber, fullWitness) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(uint64, bool) error); ok { - r1 = rf(batchNumber, fullWitness) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RPCInterfaceMock_GetWitness_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetWitness' -type RPCInterfaceMock_GetWitness_Call struct { - *mock.Call -} - -// GetWitness is a helper method to define mock.On call -// - batchNumber uint64 -// - fullWitness bool -func (_e *RPCInterfaceMock_Expecter) GetWitness(batchNumber interface{}, fullWitness interface{}) *RPCInterfaceMock_GetWitness_Call { - return &RPCInterfaceMock_GetWitness_Call{Call: _e.mock.On("GetWitness", batchNumber, fullWitness)} -} - -func (_c *RPCInterfaceMock_GetWitness_Call) Run(run func(batchNumber uint64, fullWitness bool)) *RPCInterfaceMock_GetWitness_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(uint64), args[1].(bool)) - }) - return _c -} - -func (_c *RPCInterfaceMock_GetWitness_Call) Return(_a0 []byte, _a1 error) *RPCInterfaceMock_GetWitness_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RPCInterfaceMock_GetWitness_Call) RunAndReturn(run func(uint64, bool) ([]byte, error)) *RPCInterfaceMock_GetWitness_Call { - _c.Call.Return(run) - return _c -} - -// NewRPCInterfaceMock creates a new instance of RPCInterfaceMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewRPCInterfaceMock(t interface { - mock.TestingT - Cleanup(func()) -}) *RPCInterfaceMock { - mock := &RPCInterfaceMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/aggregator/mocks/mock_storage.go b/aggregator/mocks/mock_storage.go deleted file mode 100644 index 1f253f67..00000000 --- a/aggregator/mocks/mock_storage.go +++ /dev/null @@ -1,690 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - - db "github.com/agglayer/aggkit/db" - mock "github.com/stretchr/testify/mock" - - sql "database/sql" - - state "github.com/agglayer/aggkit/state" -) - -// StorageInterfaceMock is an autogenerated mock type for the StorageInterface type -type StorageInterfaceMock struct { - mock.Mock -} - -type StorageInterfaceMock_Expecter struct { - mock *mock.Mock -} - -func (_m *StorageInterfaceMock) EXPECT() *StorageInterfaceMock_Expecter { - return &StorageInterfaceMock_Expecter{mock: &_m.Mock} -} - -// AddGeneratedProof provides a mock function with given fields: ctx, proof, dbTx -func (_m *StorageInterfaceMock) AddGeneratedProof(ctx context.Context, proof *state.Proof, dbTx db.Txer) error { - ret := _m.Called(ctx, proof, dbTx) - - if len(ret) == 0 { - panic("no return value specified for AddGeneratedProof") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *state.Proof, db.Txer) error); ok { - r0 = rf(ctx, proof, dbTx) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// StorageInterfaceMock_AddGeneratedProof_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddGeneratedProof' -type StorageInterfaceMock_AddGeneratedProof_Call struct { - *mock.Call -} - -// AddGeneratedProof is a helper method to define mock.On call -// - ctx context.Context -// - proof *state.Proof -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) AddGeneratedProof(ctx interface{}, proof interface{}, dbTx interface{}) *StorageInterfaceMock_AddGeneratedProof_Call { - return &StorageInterfaceMock_AddGeneratedProof_Call{Call: _e.mock.On("AddGeneratedProof", ctx, proof, dbTx)} -} - -func (_c *StorageInterfaceMock_AddGeneratedProof_Call) Run(run func(ctx context.Context, proof *state.Proof, dbTx db.Txer)) *StorageInterfaceMock_AddGeneratedProof_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*state.Proof), args[2].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_AddGeneratedProof_Call) Return(_a0 error) *StorageInterfaceMock_AddGeneratedProof_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *StorageInterfaceMock_AddGeneratedProof_Call) RunAndReturn(run func(context.Context, *state.Proof, db.Txer) error) *StorageInterfaceMock_AddGeneratedProof_Call { - _c.Call.Return(run) - return _c -} - -// AddSequence provides a mock function with given fields: ctx, sequence, dbTx -func (_m *StorageInterfaceMock) AddSequence(ctx context.Context, sequence state.Sequence, dbTx db.Txer) error { - ret := _m.Called(ctx, sequence, dbTx) - - if len(ret) == 0 { - panic("no return value specified for AddSequence") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, state.Sequence, db.Txer) error); ok { - r0 = rf(ctx, sequence, dbTx) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// StorageInterfaceMock_AddSequence_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddSequence' -type StorageInterfaceMock_AddSequence_Call struct { - *mock.Call -} - -// AddSequence is a helper method to define mock.On call -// - ctx context.Context -// - sequence state.Sequence -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) AddSequence(ctx interface{}, sequence interface{}, dbTx interface{}) *StorageInterfaceMock_AddSequence_Call { - return &StorageInterfaceMock_AddSequence_Call{Call: _e.mock.On("AddSequence", ctx, sequence, dbTx)} -} - -func (_c *StorageInterfaceMock_AddSequence_Call) Run(run func(ctx context.Context, sequence state.Sequence, dbTx db.Txer)) *StorageInterfaceMock_AddSequence_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(state.Sequence), args[2].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_AddSequence_Call) Return(_a0 error) *StorageInterfaceMock_AddSequence_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *StorageInterfaceMock_AddSequence_Call) RunAndReturn(run func(context.Context, state.Sequence, db.Txer) error) *StorageInterfaceMock_AddSequence_Call { - _c.Call.Return(run) - return _c -} - -// BeginTx provides a mock function with given fields: ctx, options -func (_m *StorageInterfaceMock) BeginTx(ctx context.Context, options *sql.TxOptions) (db.Txer, error) { - ret := _m.Called(ctx, options) - - if len(ret) == 0 { - panic("no return value specified for BeginTx") - } - - var r0 db.Txer - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *sql.TxOptions) (db.Txer, error)); ok { - return rf(ctx, options) - } - if rf, ok := ret.Get(0).(func(context.Context, *sql.TxOptions) db.Txer); ok { - r0 = rf(ctx, options) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(db.Txer) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, *sql.TxOptions) error); ok { - r1 = rf(ctx, options) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// StorageInterfaceMock_BeginTx_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BeginTx' -type StorageInterfaceMock_BeginTx_Call struct { - *mock.Call -} - -// BeginTx is a helper method to define mock.On call -// - ctx context.Context -// - options *sql.TxOptions -func (_e *StorageInterfaceMock_Expecter) BeginTx(ctx interface{}, options interface{}) *StorageInterfaceMock_BeginTx_Call { - return &StorageInterfaceMock_BeginTx_Call{Call: _e.mock.On("BeginTx", ctx, options)} -} - -func (_c *StorageInterfaceMock_BeginTx_Call) Run(run func(ctx context.Context, options *sql.TxOptions)) *StorageInterfaceMock_BeginTx_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*sql.TxOptions)) - }) - return _c -} - -func (_c *StorageInterfaceMock_BeginTx_Call) Return(_a0 db.Txer, _a1 error) *StorageInterfaceMock_BeginTx_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *StorageInterfaceMock_BeginTx_Call) RunAndReturn(run func(context.Context, *sql.TxOptions) (db.Txer, error)) *StorageInterfaceMock_BeginTx_Call { - _c.Call.Return(run) - return _c -} - -// CheckProofContainsCompleteSequences provides a mock function with given fields: ctx, proof, dbTx -func (_m *StorageInterfaceMock) CheckProofContainsCompleteSequences(ctx context.Context, proof *state.Proof, dbTx db.Txer) (bool, error) { - ret := _m.Called(ctx, proof, dbTx) - - if len(ret) == 0 { - panic("no return value specified for CheckProofContainsCompleteSequences") - } - - var r0 bool - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *state.Proof, db.Txer) (bool, error)); ok { - return rf(ctx, proof, dbTx) - } - if rf, ok := ret.Get(0).(func(context.Context, *state.Proof, db.Txer) bool); ok { - r0 = rf(ctx, proof, dbTx) - } else { - r0 = ret.Get(0).(bool) - } - - if rf, ok := ret.Get(1).(func(context.Context, *state.Proof, db.Txer) error); ok { - r1 = rf(ctx, proof, dbTx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// StorageInterfaceMock_CheckProofContainsCompleteSequences_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CheckProofContainsCompleteSequences' -type StorageInterfaceMock_CheckProofContainsCompleteSequences_Call struct { - *mock.Call -} - -// CheckProofContainsCompleteSequences is a helper method to define mock.On call -// - ctx context.Context -// - proof *state.Proof -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) CheckProofContainsCompleteSequences(ctx interface{}, proof interface{}, dbTx interface{}) *StorageInterfaceMock_CheckProofContainsCompleteSequences_Call { - return &StorageInterfaceMock_CheckProofContainsCompleteSequences_Call{Call: _e.mock.On("CheckProofContainsCompleteSequences", ctx, proof, dbTx)} -} - -func (_c *StorageInterfaceMock_CheckProofContainsCompleteSequences_Call) Run(run func(ctx context.Context, proof *state.Proof, dbTx db.Txer)) *StorageInterfaceMock_CheckProofContainsCompleteSequences_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*state.Proof), args[2].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_CheckProofContainsCompleteSequences_Call) Return(_a0 bool, _a1 error) *StorageInterfaceMock_CheckProofContainsCompleteSequences_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *StorageInterfaceMock_CheckProofContainsCompleteSequences_Call) RunAndReturn(run func(context.Context, *state.Proof, db.Txer) (bool, error)) *StorageInterfaceMock_CheckProofContainsCompleteSequences_Call { - _c.Call.Return(run) - return _c -} - -// CheckProofExistsForBatch provides a mock function with given fields: ctx, batchNumber, dbTx -func (_m *StorageInterfaceMock) CheckProofExistsForBatch(ctx context.Context, batchNumber uint64, dbTx db.Txer) (bool, error) { - ret := _m.Called(ctx, batchNumber, dbTx) - - if len(ret) == 0 { - panic("no return value specified for CheckProofExistsForBatch") - } - - var r0 bool - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint64, db.Txer) (bool, error)); ok { - return rf(ctx, batchNumber, dbTx) - } - if rf, ok := ret.Get(0).(func(context.Context, uint64, db.Txer) bool); ok { - r0 = rf(ctx, batchNumber, dbTx) - } else { - r0 = ret.Get(0).(bool) - } - - if rf, ok := ret.Get(1).(func(context.Context, uint64, db.Txer) error); ok { - r1 = rf(ctx, batchNumber, dbTx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// StorageInterfaceMock_CheckProofExistsForBatch_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CheckProofExistsForBatch' -type StorageInterfaceMock_CheckProofExistsForBatch_Call struct { - *mock.Call -} - -// CheckProofExistsForBatch is a helper method to define mock.On call -// - ctx context.Context -// - batchNumber uint64 -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) CheckProofExistsForBatch(ctx interface{}, batchNumber interface{}, dbTx interface{}) *StorageInterfaceMock_CheckProofExistsForBatch_Call { - return &StorageInterfaceMock_CheckProofExistsForBatch_Call{Call: _e.mock.On("CheckProofExistsForBatch", ctx, batchNumber, dbTx)} -} - -func (_c *StorageInterfaceMock_CheckProofExistsForBatch_Call) Run(run func(ctx context.Context, batchNumber uint64, dbTx db.Txer)) *StorageInterfaceMock_CheckProofExistsForBatch_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64), args[2].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_CheckProofExistsForBatch_Call) Return(_a0 bool, _a1 error) *StorageInterfaceMock_CheckProofExistsForBatch_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *StorageInterfaceMock_CheckProofExistsForBatch_Call) RunAndReturn(run func(context.Context, uint64, db.Txer) (bool, error)) *StorageInterfaceMock_CheckProofExistsForBatch_Call { - _c.Call.Return(run) - return _c -} - -// CleanupGeneratedProofs provides a mock function with given fields: ctx, batchNumber, dbTx -func (_m *StorageInterfaceMock) CleanupGeneratedProofs(ctx context.Context, batchNumber uint64, dbTx db.Txer) error { - ret := _m.Called(ctx, batchNumber, dbTx) - - if len(ret) == 0 { - panic("no return value specified for CleanupGeneratedProofs") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, uint64, db.Txer) error); ok { - r0 = rf(ctx, batchNumber, dbTx) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// StorageInterfaceMock_CleanupGeneratedProofs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CleanupGeneratedProofs' -type StorageInterfaceMock_CleanupGeneratedProofs_Call struct { - *mock.Call -} - -// CleanupGeneratedProofs is a helper method to define mock.On call -// - ctx context.Context -// - batchNumber uint64 -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) CleanupGeneratedProofs(ctx interface{}, batchNumber interface{}, dbTx interface{}) *StorageInterfaceMock_CleanupGeneratedProofs_Call { - return &StorageInterfaceMock_CleanupGeneratedProofs_Call{Call: _e.mock.On("CleanupGeneratedProofs", ctx, batchNumber, dbTx)} -} - -func (_c *StorageInterfaceMock_CleanupGeneratedProofs_Call) Run(run func(ctx context.Context, batchNumber uint64, dbTx db.Txer)) *StorageInterfaceMock_CleanupGeneratedProofs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64), args[2].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_CleanupGeneratedProofs_Call) Return(_a0 error) *StorageInterfaceMock_CleanupGeneratedProofs_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *StorageInterfaceMock_CleanupGeneratedProofs_Call) RunAndReturn(run func(context.Context, uint64, db.Txer) error) *StorageInterfaceMock_CleanupGeneratedProofs_Call { - _c.Call.Return(run) - return _c -} - -// CleanupLockedProofs provides a mock function with given fields: ctx, duration, dbTx -func (_m *StorageInterfaceMock) CleanupLockedProofs(ctx context.Context, duration string, dbTx db.Txer) (int64, error) { - ret := _m.Called(ctx, duration, dbTx) - - if len(ret) == 0 { - panic("no return value specified for CleanupLockedProofs") - } - - var r0 int64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, db.Txer) (int64, error)); ok { - return rf(ctx, duration, dbTx) - } - if rf, ok := ret.Get(0).(func(context.Context, string, db.Txer) int64); ok { - r0 = rf(ctx, duration, dbTx) - } else { - r0 = ret.Get(0).(int64) - } - - if rf, ok := ret.Get(1).(func(context.Context, string, db.Txer) error); ok { - r1 = rf(ctx, duration, dbTx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// StorageInterfaceMock_CleanupLockedProofs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CleanupLockedProofs' -type StorageInterfaceMock_CleanupLockedProofs_Call struct { - *mock.Call -} - -// CleanupLockedProofs is a helper method to define mock.On call -// - ctx context.Context -// - duration string -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) CleanupLockedProofs(ctx interface{}, duration interface{}, dbTx interface{}) *StorageInterfaceMock_CleanupLockedProofs_Call { - return &StorageInterfaceMock_CleanupLockedProofs_Call{Call: _e.mock.On("CleanupLockedProofs", ctx, duration, dbTx)} -} - -func (_c *StorageInterfaceMock_CleanupLockedProofs_Call) Run(run func(ctx context.Context, duration string, dbTx db.Txer)) *StorageInterfaceMock_CleanupLockedProofs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_CleanupLockedProofs_Call) Return(_a0 int64, _a1 error) *StorageInterfaceMock_CleanupLockedProofs_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *StorageInterfaceMock_CleanupLockedProofs_Call) RunAndReturn(run func(context.Context, string, db.Txer) (int64, error)) *StorageInterfaceMock_CleanupLockedProofs_Call { - _c.Call.Return(run) - return _c -} - -// DeleteGeneratedProofs provides a mock function with given fields: ctx, batchNumber, batchNumberFinal, dbTx -func (_m *StorageInterfaceMock) DeleteGeneratedProofs(ctx context.Context, batchNumber uint64, batchNumberFinal uint64, dbTx db.Txer) error { - ret := _m.Called(ctx, batchNumber, batchNumberFinal, dbTx) - - if len(ret) == 0 { - panic("no return value specified for DeleteGeneratedProofs") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, uint64, uint64, db.Txer) error); ok { - r0 = rf(ctx, batchNumber, batchNumberFinal, dbTx) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// StorageInterfaceMock_DeleteGeneratedProofs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGeneratedProofs' -type StorageInterfaceMock_DeleteGeneratedProofs_Call struct { - *mock.Call -} - -// DeleteGeneratedProofs is a helper method to define mock.On call -// - ctx context.Context -// - batchNumber uint64 -// - batchNumberFinal uint64 -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) DeleteGeneratedProofs(ctx interface{}, batchNumber interface{}, batchNumberFinal interface{}, dbTx interface{}) *StorageInterfaceMock_DeleteGeneratedProofs_Call { - return &StorageInterfaceMock_DeleteGeneratedProofs_Call{Call: _e.mock.On("DeleteGeneratedProofs", ctx, batchNumber, batchNumberFinal, dbTx)} -} - -func (_c *StorageInterfaceMock_DeleteGeneratedProofs_Call) Run(run func(ctx context.Context, batchNumber uint64, batchNumberFinal uint64, dbTx db.Txer)) *StorageInterfaceMock_DeleteGeneratedProofs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64), args[2].(uint64), args[3].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_DeleteGeneratedProofs_Call) Return(_a0 error) *StorageInterfaceMock_DeleteGeneratedProofs_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *StorageInterfaceMock_DeleteGeneratedProofs_Call) RunAndReturn(run func(context.Context, uint64, uint64, db.Txer) error) *StorageInterfaceMock_DeleteGeneratedProofs_Call { - _c.Call.Return(run) - return _c -} - -// DeleteUngeneratedProofs provides a mock function with given fields: ctx, dbTx -func (_m *StorageInterfaceMock) DeleteUngeneratedProofs(ctx context.Context, dbTx db.Txer) error { - ret := _m.Called(ctx, dbTx) - - if len(ret) == 0 { - panic("no return value specified for DeleteUngeneratedProofs") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, db.Txer) error); ok { - r0 = rf(ctx, dbTx) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// StorageInterfaceMock_DeleteUngeneratedProofs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteUngeneratedProofs' -type StorageInterfaceMock_DeleteUngeneratedProofs_Call struct { - *mock.Call -} - -// DeleteUngeneratedProofs is a helper method to define mock.On call -// - ctx context.Context -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) DeleteUngeneratedProofs(ctx interface{}, dbTx interface{}) *StorageInterfaceMock_DeleteUngeneratedProofs_Call { - return &StorageInterfaceMock_DeleteUngeneratedProofs_Call{Call: _e.mock.On("DeleteUngeneratedProofs", ctx, dbTx)} -} - -func (_c *StorageInterfaceMock_DeleteUngeneratedProofs_Call) Run(run func(ctx context.Context, dbTx db.Txer)) *StorageInterfaceMock_DeleteUngeneratedProofs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_DeleteUngeneratedProofs_Call) Return(_a0 error) *StorageInterfaceMock_DeleteUngeneratedProofs_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *StorageInterfaceMock_DeleteUngeneratedProofs_Call) RunAndReturn(run func(context.Context, db.Txer) error) *StorageInterfaceMock_DeleteUngeneratedProofs_Call { - _c.Call.Return(run) - return _c -} - -// GetProofReadyToVerify provides a mock function with given fields: ctx, lastVerfiedBatchNumber, dbTx -func (_m *StorageInterfaceMock) GetProofReadyToVerify(ctx context.Context, lastVerfiedBatchNumber uint64, dbTx db.Txer) (*state.Proof, error) { - ret := _m.Called(ctx, lastVerfiedBatchNumber, dbTx) - - if len(ret) == 0 { - panic("no return value specified for GetProofReadyToVerify") - } - - var r0 *state.Proof - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint64, db.Txer) (*state.Proof, error)); ok { - return rf(ctx, lastVerfiedBatchNumber, dbTx) - } - if rf, ok := ret.Get(0).(func(context.Context, uint64, db.Txer) *state.Proof); ok { - r0 = rf(ctx, lastVerfiedBatchNumber, dbTx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*state.Proof) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint64, db.Txer) error); ok { - r1 = rf(ctx, lastVerfiedBatchNumber, dbTx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// StorageInterfaceMock_GetProofReadyToVerify_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetProofReadyToVerify' -type StorageInterfaceMock_GetProofReadyToVerify_Call struct { - *mock.Call -} - -// GetProofReadyToVerify is a helper method to define mock.On call -// - ctx context.Context -// - lastVerfiedBatchNumber uint64 -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) GetProofReadyToVerify(ctx interface{}, lastVerfiedBatchNumber interface{}, dbTx interface{}) *StorageInterfaceMock_GetProofReadyToVerify_Call { - return &StorageInterfaceMock_GetProofReadyToVerify_Call{Call: _e.mock.On("GetProofReadyToVerify", ctx, lastVerfiedBatchNumber, dbTx)} -} - -func (_c *StorageInterfaceMock_GetProofReadyToVerify_Call) Run(run func(ctx context.Context, lastVerfiedBatchNumber uint64, dbTx db.Txer)) *StorageInterfaceMock_GetProofReadyToVerify_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64), args[2].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_GetProofReadyToVerify_Call) Return(_a0 *state.Proof, _a1 error) *StorageInterfaceMock_GetProofReadyToVerify_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *StorageInterfaceMock_GetProofReadyToVerify_Call) RunAndReturn(run func(context.Context, uint64, db.Txer) (*state.Proof, error)) *StorageInterfaceMock_GetProofReadyToVerify_Call { - _c.Call.Return(run) - return _c -} - -// GetProofsToAggregate provides a mock function with given fields: ctx, dbTx -func (_m *StorageInterfaceMock) GetProofsToAggregate(ctx context.Context, dbTx db.Txer) (*state.Proof, *state.Proof, error) { - ret := _m.Called(ctx, dbTx) - - if len(ret) == 0 { - panic("no return value specified for GetProofsToAggregate") - } - - var r0 *state.Proof - var r1 *state.Proof - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, db.Txer) (*state.Proof, *state.Proof, error)); ok { - return rf(ctx, dbTx) - } - if rf, ok := ret.Get(0).(func(context.Context, db.Txer) *state.Proof); ok { - r0 = rf(ctx, dbTx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*state.Proof) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, db.Txer) *state.Proof); ok { - r1 = rf(ctx, dbTx) - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).(*state.Proof) - } - } - - if rf, ok := ret.Get(2).(func(context.Context, db.Txer) error); ok { - r2 = rf(ctx, dbTx) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 -} - -// StorageInterfaceMock_GetProofsToAggregate_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetProofsToAggregate' -type StorageInterfaceMock_GetProofsToAggregate_Call struct { - *mock.Call -} - -// GetProofsToAggregate is a helper method to define mock.On call -// - ctx context.Context -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) GetProofsToAggregate(ctx interface{}, dbTx interface{}) *StorageInterfaceMock_GetProofsToAggregate_Call { - return &StorageInterfaceMock_GetProofsToAggregate_Call{Call: _e.mock.On("GetProofsToAggregate", ctx, dbTx)} -} - -func (_c *StorageInterfaceMock_GetProofsToAggregate_Call) Run(run func(ctx context.Context, dbTx db.Txer)) *StorageInterfaceMock_GetProofsToAggregate_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_GetProofsToAggregate_Call) Return(_a0 *state.Proof, _a1 *state.Proof, _a2 error) *StorageInterfaceMock_GetProofsToAggregate_Call { - _c.Call.Return(_a0, _a1, _a2) - return _c -} - -func (_c *StorageInterfaceMock_GetProofsToAggregate_Call) RunAndReturn(run func(context.Context, db.Txer) (*state.Proof, *state.Proof, error)) *StorageInterfaceMock_GetProofsToAggregate_Call { - _c.Call.Return(run) - return _c -} - -// UpdateGeneratedProof provides a mock function with given fields: ctx, proof, dbTx -func (_m *StorageInterfaceMock) UpdateGeneratedProof(ctx context.Context, proof *state.Proof, dbTx db.Txer) error { - ret := _m.Called(ctx, proof, dbTx) - - if len(ret) == 0 { - panic("no return value specified for UpdateGeneratedProof") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *state.Proof, db.Txer) error); ok { - r0 = rf(ctx, proof, dbTx) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// StorageInterfaceMock_UpdateGeneratedProof_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGeneratedProof' -type StorageInterfaceMock_UpdateGeneratedProof_Call struct { - *mock.Call -} - -// UpdateGeneratedProof is a helper method to define mock.On call -// - ctx context.Context -// - proof *state.Proof -// - dbTx db.Txer -func (_e *StorageInterfaceMock_Expecter) UpdateGeneratedProof(ctx interface{}, proof interface{}, dbTx interface{}) *StorageInterfaceMock_UpdateGeneratedProof_Call { - return &StorageInterfaceMock_UpdateGeneratedProof_Call{Call: _e.mock.On("UpdateGeneratedProof", ctx, proof, dbTx)} -} - -func (_c *StorageInterfaceMock_UpdateGeneratedProof_Call) Run(run func(ctx context.Context, proof *state.Proof, dbTx db.Txer)) *StorageInterfaceMock_UpdateGeneratedProof_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*state.Proof), args[2].(db.Txer)) - }) - return _c -} - -func (_c *StorageInterfaceMock_UpdateGeneratedProof_Call) Return(_a0 error) *StorageInterfaceMock_UpdateGeneratedProof_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *StorageInterfaceMock_UpdateGeneratedProof_Call) RunAndReturn(run func(context.Context, *state.Proof, db.Txer) error) *StorageInterfaceMock_UpdateGeneratedProof_Call { - _c.Call.Return(run) - return _c -} - -// NewStorageInterfaceMock creates a new instance of StorageInterfaceMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewStorageInterfaceMock(t interface { - mock.TestingT - Cleanup(func()) -}) *StorageInterfaceMock { - mock := &StorageInterfaceMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/aggregator/mocks/mock_synchronizer.go b/aggregator/mocks/mock_synchronizer.go deleted file mode 100644 index ae7735b1..00000000 --- a/aggregator/mocks/mock_synchronizer.go +++ /dev/null @@ -1,697 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - synchronizer "github.com/0xPolygonHermez/zkevm-synchronizer-l1/synchronizer" -) - -// SynchronizerInterfaceMock is an autogenerated mock type for the Synchronizer type -type SynchronizerInterfaceMock struct { - mock.Mock -} - -type SynchronizerInterfaceMock_Expecter struct { - mock *mock.Mock -} - -func (_m *SynchronizerInterfaceMock) EXPECT() *SynchronizerInterfaceMock_Expecter { - return &SynchronizerInterfaceMock_Expecter{mock: &_m.Mock} -} - -// GetL1BlockByNumber provides a mock function with given fields: ctx, blockNumber -func (_m *SynchronizerInterfaceMock) GetL1BlockByNumber(ctx context.Context, blockNumber uint64) (*synchronizer.L1Block, error) { - ret := _m.Called(ctx, blockNumber) - - if len(ret) == 0 { - panic("no return value specified for GetL1BlockByNumber") - } - - var r0 *synchronizer.L1Block - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint64) (*synchronizer.L1Block, error)); ok { - return rf(ctx, blockNumber) - } - if rf, ok := ret.Get(0).(func(context.Context, uint64) *synchronizer.L1Block); ok { - r0 = rf(ctx, blockNumber) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*synchronizer.L1Block) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint64) error); ok { - r1 = rf(ctx, blockNumber) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SynchronizerInterfaceMock_GetL1BlockByNumber_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetL1BlockByNumber' -type SynchronizerInterfaceMock_GetL1BlockByNumber_Call struct { - *mock.Call -} - -// GetL1BlockByNumber is a helper method to define mock.On call -// - ctx context.Context -// - blockNumber uint64 -func (_e *SynchronizerInterfaceMock_Expecter) GetL1BlockByNumber(ctx interface{}, blockNumber interface{}) *SynchronizerInterfaceMock_GetL1BlockByNumber_Call { - return &SynchronizerInterfaceMock_GetL1BlockByNumber_Call{Call: _e.mock.On("GetL1BlockByNumber", ctx, blockNumber)} -} - -func (_c *SynchronizerInterfaceMock_GetL1BlockByNumber_Call) Run(run func(ctx context.Context, blockNumber uint64)) *SynchronizerInterfaceMock_GetL1BlockByNumber_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetL1BlockByNumber_Call) Return(_a0 *synchronizer.L1Block, _a1 error) *SynchronizerInterfaceMock_GetL1BlockByNumber_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetL1BlockByNumber_Call) RunAndReturn(run func(context.Context, uint64) (*synchronizer.L1Block, error)) *SynchronizerInterfaceMock_GetL1BlockByNumber_Call { - _c.Call.Return(run) - return _c -} - -// GetL1InfoRootPerIndex provides a mock function with given fields: ctx, L1InfoTreeIndex -func (_m *SynchronizerInterfaceMock) GetL1InfoRootPerIndex(ctx context.Context, L1InfoTreeIndex uint32) (common.Hash, error) { - ret := _m.Called(ctx, L1InfoTreeIndex) - - if len(ret) == 0 { - panic("no return value specified for GetL1InfoRootPerIndex") - } - - var r0 common.Hash - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint32) (common.Hash, error)); ok { - return rf(ctx, L1InfoTreeIndex) - } - if rf, ok := ret.Get(0).(func(context.Context, uint32) common.Hash); ok { - r0 = rf(ctx, L1InfoTreeIndex) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(common.Hash) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint32) error); ok { - r1 = rf(ctx, L1InfoTreeIndex) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetL1InfoRootPerIndex' -type SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call struct { - *mock.Call -} - -// GetL1InfoRootPerIndex is a helper method to define mock.On call -// - ctx context.Context -// - L1InfoTreeIndex uint32 -func (_e *SynchronizerInterfaceMock_Expecter) GetL1InfoRootPerIndex(ctx interface{}, L1InfoTreeIndex interface{}) *SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call { - return &SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call{Call: _e.mock.On("GetL1InfoRootPerIndex", ctx, L1InfoTreeIndex)} -} - -func (_c *SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call) Run(run func(ctx context.Context, L1InfoTreeIndex uint32)) *SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint32)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call) Return(_a0 common.Hash, _a1 error) *SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call) RunAndReturn(run func(context.Context, uint32) (common.Hash, error)) *SynchronizerInterfaceMock_GetL1InfoRootPerIndex_Call { - _c.Call.Return(run) - return _c -} - -// GetL1InfoTreeLeaves provides a mock function with given fields: ctx, indexLeaves -func (_m *SynchronizerInterfaceMock) GetL1InfoTreeLeaves(ctx context.Context, indexLeaves []uint32) (map[uint32]synchronizer.L1InfoTreeLeaf, error) { - ret := _m.Called(ctx, indexLeaves) - - if len(ret) == 0 { - panic("no return value specified for GetL1InfoTreeLeaves") - } - - var r0 map[uint32]synchronizer.L1InfoTreeLeaf - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []uint32) (map[uint32]synchronizer.L1InfoTreeLeaf, error)); ok { - return rf(ctx, indexLeaves) - } - if rf, ok := ret.Get(0).(func(context.Context, []uint32) map[uint32]synchronizer.L1InfoTreeLeaf); ok { - r0 = rf(ctx, indexLeaves) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(map[uint32]synchronizer.L1InfoTreeLeaf) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []uint32) error); ok { - r1 = rf(ctx, indexLeaves) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetL1InfoTreeLeaves' -type SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call struct { - *mock.Call -} - -// GetL1InfoTreeLeaves is a helper method to define mock.On call -// - ctx context.Context -// - indexLeaves []uint32 -func (_e *SynchronizerInterfaceMock_Expecter) GetL1InfoTreeLeaves(ctx interface{}, indexLeaves interface{}) *SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call { - return &SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call{Call: _e.mock.On("GetL1InfoTreeLeaves", ctx, indexLeaves)} -} - -func (_c *SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call) Run(run func(ctx context.Context, indexLeaves []uint32)) *SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]uint32)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call) Return(_a0 map[uint32]synchronizer.L1InfoTreeLeaf, _a1 error) *SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call) RunAndReturn(run func(context.Context, []uint32) (map[uint32]synchronizer.L1InfoTreeLeaf, error)) *SynchronizerInterfaceMock_GetL1InfoTreeLeaves_Call { - _c.Call.Return(run) - return _c -} - -// GetLastL1Block provides a mock function with given fields: ctx -func (_m *SynchronizerInterfaceMock) GetLastL1Block(ctx context.Context) (*synchronizer.L1Block, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for GetLastL1Block") - } - - var r0 *synchronizer.L1Block - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*synchronizer.L1Block, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *synchronizer.L1Block); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*synchronizer.L1Block) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SynchronizerInterfaceMock_GetLastL1Block_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLastL1Block' -type SynchronizerInterfaceMock_GetLastL1Block_Call struct { - *mock.Call -} - -// GetLastL1Block is a helper method to define mock.On call -// - ctx context.Context -func (_e *SynchronizerInterfaceMock_Expecter) GetLastL1Block(ctx interface{}) *SynchronizerInterfaceMock_GetLastL1Block_Call { - return &SynchronizerInterfaceMock_GetLastL1Block_Call{Call: _e.mock.On("GetLastL1Block", ctx)} -} - -func (_c *SynchronizerInterfaceMock_GetLastL1Block_Call) Run(run func(ctx context.Context)) *SynchronizerInterfaceMock_GetLastL1Block_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetLastL1Block_Call) Return(_a0 *synchronizer.L1Block, _a1 error) *SynchronizerInterfaceMock_GetLastL1Block_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetLastL1Block_Call) RunAndReturn(run func(context.Context) (*synchronizer.L1Block, error)) *SynchronizerInterfaceMock_GetLastL1Block_Call { - _c.Call.Return(run) - return _c -} - -// GetLastestVirtualBatchNumber provides a mock function with given fields: ctx -func (_m *SynchronizerInterfaceMock) GetLastestVirtualBatchNumber(ctx context.Context) (uint64, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for GetLastestVirtualBatchNumber") - } - - var r0 uint64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (uint64, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) uint64); ok { - r0 = rf(ctx) - } else { - r0 = ret.Get(0).(uint64) - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLastestVirtualBatchNumber' -type SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call struct { - *mock.Call -} - -// GetLastestVirtualBatchNumber is a helper method to define mock.On call -// - ctx context.Context -func (_e *SynchronizerInterfaceMock_Expecter) GetLastestVirtualBatchNumber(ctx interface{}) *SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call { - return &SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call{Call: _e.mock.On("GetLastestVirtualBatchNumber", ctx)} -} - -func (_c *SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call) Run(run func(ctx context.Context)) *SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call) Return(_a0 uint64, _a1 error) *SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call) RunAndReturn(run func(context.Context) (uint64, error)) *SynchronizerInterfaceMock_GetLastestVirtualBatchNumber_Call { - _c.Call.Return(run) - return _c -} - -// GetLeafsByL1InfoRoot provides a mock function with given fields: ctx, l1InfoRoot -func (_m *SynchronizerInterfaceMock) GetLeafsByL1InfoRoot(ctx context.Context, l1InfoRoot common.Hash) ([]synchronizer.L1InfoTreeLeaf, error) { - ret := _m.Called(ctx, l1InfoRoot) - - if len(ret) == 0 { - panic("no return value specified for GetLeafsByL1InfoRoot") - } - - var r0 []synchronizer.L1InfoTreeLeaf - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, common.Hash) ([]synchronizer.L1InfoTreeLeaf, error)); ok { - return rf(ctx, l1InfoRoot) - } - if rf, ok := ret.Get(0).(func(context.Context, common.Hash) []synchronizer.L1InfoTreeLeaf); ok { - r0 = rf(ctx, l1InfoRoot) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]synchronizer.L1InfoTreeLeaf) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, common.Hash) error); ok { - r1 = rf(ctx, l1InfoRoot) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLeafsByL1InfoRoot' -type SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call struct { - *mock.Call -} - -// GetLeafsByL1InfoRoot is a helper method to define mock.On call -// - ctx context.Context -// - l1InfoRoot common.Hash -func (_e *SynchronizerInterfaceMock_Expecter) GetLeafsByL1InfoRoot(ctx interface{}, l1InfoRoot interface{}) *SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call { - return &SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call{Call: _e.mock.On("GetLeafsByL1InfoRoot", ctx, l1InfoRoot)} -} - -func (_c *SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call) Run(run func(ctx context.Context, l1InfoRoot common.Hash)) *SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(common.Hash)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call) Return(_a0 []synchronizer.L1InfoTreeLeaf, _a1 error) *SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call) RunAndReturn(run func(context.Context, common.Hash) ([]synchronizer.L1InfoTreeLeaf, error)) *SynchronizerInterfaceMock_GetLeafsByL1InfoRoot_Call { - _c.Call.Return(run) - return _c -} - -// GetSequenceByBatchNumber provides a mock function with given fields: ctx, batchNumber -func (_m *SynchronizerInterfaceMock) GetSequenceByBatchNumber(ctx context.Context, batchNumber uint64) (*synchronizer.SequencedBatches, error) { - ret := _m.Called(ctx, batchNumber) - - if len(ret) == 0 { - panic("no return value specified for GetSequenceByBatchNumber") - } - - var r0 *synchronizer.SequencedBatches - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint64) (*synchronizer.SequencedBatches, error)); ok { - return rf(ctx, batchNumber) - } - if rf, ok := ret.Get(0).(func(context.Context, uint64) *synchronizer.SequencedBatches); ok { - r0 = rf(ctx, batchNumber) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*synchronizer.SequencedBatches) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint64) error); ok { - r1 = rf(ctx, batchNumber) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSequenceByBatchNumber' -type SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call struct { - *mock.Call -} - -// GetSequenceByBatchNumber is a helper method to define mock.On call -// - ctx context.Context -// - batchNumber uint64 -func (_e *SynchronizerInterfaceMock_Expecter) GetSequenceByBatchNumber(ctx interface{}, batchNumber interface{}) *SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call { - return &SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call{Call: _e.mock.On("GetSequenceByBatchNumber", ctx, batchNumber)} -} - -func (_c *SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call) Run(run func(ctx context.Context, batchNumber uint64)) *SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call) Return(_a0 *synchronizer.SequencedBatches, _a1 error) *SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call) RunAndReturn(run func(context.Context, uint64) (*synchronizer.SequencedBatches, error)) *SynchronizerInterfaceMock_GetSequenceByBatchNumber_Call { - _c.Call.Return(run) - return _c -} - -// GetVirtualBatchByBatchNumber provides a mock function with given fields: ctx, batchNumber -func (_m *SynchronizerInterfaceMock) GetVirtualBatchByBatchNumber(ctx context.Context, batchNumber uint64) (*synchronizer.VirtualBatch, error) { - ret := _m.Called(ctx, batchNumber) - - if len(ret) == 0 { - panic("no return value specified for GetVirtualBatchByBatchNumber") - } - - var r0 *synchronizer.VirtualBatch - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint64) (*synchronizer.VirtualBatch, error)); ok { - return rf(ctx, batchNumber) - } - if rf, ok := ret.Get(0).(func(context.Context, uint64) *synchronizer.VirtualBatch); ok { - r0 = rf(ctx, batchNumber) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*synchronizer.VirtualBatch) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint64) error); ok { - r1 = rf(ctx, batchNumber) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetVirtualBatchByBatchNumber' -type SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call struct { - *mock.Call -} - -// GetVirtualBatchByBatchNumber is a helper method to define mock.On call -// - ctx context.Context -// - batchNumber uint64 -func (_e *SynchronizerInterfaceMock_Expecter) GetVirtualBatchByBatchNumber(ctx interface{}, batchNumber interface{}) *SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call { - return &SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call{Call: _e.mock.On("GetVirtualBatchByBatchNumber", ctx, batchNumber)} -} - -func (_c *SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call) Run(run func(ctx context.Context, batchNumber uint64)) *SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call) Return(_a0 *synchronizer.VirtualBatch, _a1 error) *SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call) RunAndReturn(run func(context.Context, uint64) (*synchronizer.VirtualBatch, error)) *SynchronizerInterfaceMock_GetVirtualBatchByBatchNumber_Call { - _c.Call.Return(run) - return _c -} - -// IsSynced provides a mock function with no fields -func (_m *SynchronizerInterfaceMock) IsSynced() bool { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for IsSynced") - } - - var r0 bool - if rf, ok := ret.Get(0).(func() bool); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(bool) - } - - return r0 -} - -// SynchronizerInterfaceMock_IsSynced_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'IsSynced' -type SynchronizerInterfaceMock_IsSynced_Call struct { - *mock.Call -} - -// IsSynced is a helper method to define mock.On call -func (_e *SynchronizerInterfaceMock_Expecter) IsSynced() *SynchronizerInterfaceMock_IsSynced_Call { - return &SynchronizerInterfaceMock_IsSynced_Call{Call: _e.mock.On("IsSynced")} -} - -func (_c *SynchronizerInterfaceMock_IsSynced_Call) Run(run func()) *SynchronizerInterfaceMock_IsSynced_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_IsSynced_Call) Return(_a0 bool) *SynchronizerInterfaceMock_IsSynced_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *SynchronizerInterfaceMock_IsSynced_Call) RunAndReturn(run func() bool) *SynchronizerInterfaceMock_IsSynced_Call { - _c.Call.Return(run) - return _c -} - -// SetCallbackOnReorgDone provides a mock function with given fields: callback -func (_m *SynchronizerInterfaceMock) SetCallbackOnReorgDone(callback func(synchronizer.ReorgExecutionResult)) { - _m.Called(callback) -} - -// SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetCallbackOnReorgDone' -type SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call struct { - *mock.Call -} - -// SetCallbackOnReorgDone is a helper method to define mock.On call -// - callback func(synchronizer.ReorgExecutionResult) -func (_e *SynchronizerInterfaceMock_Expecter) SetCallbackOnReorgDone(callback interface{}) *SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call { - return &SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call{Call: _e.mock.On("SetCallbackOnReorgDone", callback)} -} - -func (_c *SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call) Run(run func(callback func(synchronizer.ReorgExecutionResult))) *SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(func(synchronizer.ReorgExecutionResult))) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call) Return() *SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call { - _c.Call.Return() - return _c -} - -func (_c *SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call) RunAndReturn(run func(func(synchronizer.ReorgExecutionResult))) *SynchronizerInterfaceMock_SetCallbackOnReorgDone_Call { - _c.Run(run) - return _c -} - -// SetCallbackOnRollbackBatches provides a mock function with given fields: callback -func (_m *SynchronizerInterfaceMock) SetCallbackOnRollbackBatches(callback func(synchronizer.RollbackBatchesData)) { - _m.Called(callback) -} - -// SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetCallbackOnRollbackBatches' -type SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call struct { - *mock.Call -} - -// SetCallbackOnRollbackBatches is a helper method to define mock.On call -// - callback func(synchronizer.RollbackBatchesData) -func (_e *SynchronizerInterfaceMock_Expecter) SetCallbackOnRollbackBatches(callback interface{}) *SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call { - return &SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call{Call: _e.mock.On("SetCallbackOnRollbackBatches", callback)} -} - -func (_c *SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call) Run(run func(callback func(synchronizer.RollbackBatchesData))) *SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(func(synchronizer.RollbackBatchesData))) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call) Return() *SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call { - _c.Call.Return() - return _c -} - -func (_c *SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call) RunAndReturn(run func(func(synchronizer.RollbackBatchesData))) *SynchronizerInterfaceMock_SetCallbackOnRollbackBatches_Call { - _c.Run(run) - return _c -} - -// Stop provides a mock function with no fields -func (_m *SynchronizerInterfaceMock) Stop() { - _m.Called() -} - -// SynchronizerInterfaceMock_Stop_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Stop' -type SynchronizerInterfaceMock_Stop_Call struct { - *mock.Call -} - -// Stop is a helper method to define mock.On call -func (_e *SynchronizerInterfaceMock_Expecter) Stop() *SynchronizerInterfaceMock_Stop_Call { - return &SynchronizerInterfaceMock_Stop_Call{Call: _e.mock.On("Stop")} -} - -func (_c *SynchronizerInterfaceMock_Stop_Call) Run(run func()) *SynchronizerInterfaceMock_Stop_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_Stop_Call) Return() *SynchronizerInterfaceMock_Stop_Call { - _c.Call.Return() - return _c -} - -func (_c *SynchronizerInterfaceMock_Stop_Call) RunAndReturn(run func()) *SynchronizerInterfaceMock_Stop_Call { - _c.Run(run) - return _c -} - -// Sync provides a mock function with given fields: returnOnSync -func (_m *SynchronizerInterfaceMock) Sync(returnOnSync bool) error { - ret := _m.Called(returnOnSync) - - if len(ret) == 0 { - panic("no return value specified for Sync") - } - - var r0 error - if rf, ok := ret.Get(0).(func(bool) error); ok { - r0 = rf(returnOnSync) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// SynchronizerInterfaceMock_Sync_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Sync' -type SynchronizerInterfaceMock_Sync_Call struct { - *mock.Call -} - -// Sync is a helper method to define mock.On call -// - returnOnSync bool -func (_e *SynchronizerInterfaceMock_Expecter) Sync(returnOnSync interface{}) *SynchronizerInterfaceMock_Sync_Call { - return &SynchronizerInterfaceMock_Sync_Call{Call: _e.mock.On("Sync", returnOnSync)} -} - -func (_c *SynchronizerInterfaceMock_Sync_Call) Run(run func(returnOnSync bool)) *SynchronizerInterfaceMock_Sync_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(bool)) - }) - return _c -} - -func (_c *SynchronizerInterfaceMock_Sync_Call) Return(_a0 error) *SynchronizerInterfaceMock_Sync_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *SynchronizerInterfaceMock_Sync_Call) RunAndReturn(run func(bool) error) *SynchronizerInterfaceMock_Sync_Call { - _c.Call.Return(run) - return _c -} - -// NewSynchronizerInterfaceMock creates a new instance of SynchronizerInterfaceMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewSynchronizerInterfaceMock(t interface { - mock.TestingT - Cleanup(func()) -}) *SynchronizerInterfaceMock { - mock := &SynchronizerInterfaceMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/aggregator/mocks/mock_txer.go b/aggregator/mocks/mock_txer.go deleted file mode 100644 index 39a98d03..00000000 --- a/aggregator/mocks/mock_txer.go +++ /dev/null @@ -1,389 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - sql "database/sql" - - mock "github.com/stretchr/testify/mock" -) - -// TxerMock is an autogenerated mock type for the Txer type -type TxerMock struct { - mock.Mock -} - -type TxerMock_Expecter struct { - mock *mock.Mock -} - -func (_m *TxerMock) EXPECT() *TxerMock_Expecter { - return &TxerMock_Expecter{mock: &_m.Mock} -} - -// AddCommitCallback provides a mock function with given fields: cb -func (_m *TxerMock) AddCommitCallback(cb func()) { - _m.Called(cb) -} - -// TxerMock_AddCommitCallback_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddCommitCallback' -type TxerMock_AddCommitCallback_Call struct { - *mock.Call -} - -// AddCommitCallback is a helper method to define mock.On call -// - cb func() -func (_e *TxerMock_Expecter) AddCommitCallback(cb interface{}) *TxerMock_AddCommitCallback_Call { - return &TxerMock_AddCommitCallback_Call{Call: _e.mock.On("AddCommitCallback", cb)} -} - -func (_c *TxerMock_AddCommitCallback_Call) Run(run func(cb func())) *TxerMock_AddCommitCallback_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(func())) - }) - return _c -} - -func (_c *TxerMock_AddCommitCallback_Call) Return() *TxerMock_AddCommitCallback_Call { - _c.Call.Return() - return _c -} - -func (_c *TxerMock_AddCommitCallback_Call) RunAndReturn(run func(func())) *TxerMock_AddCommitCallback_Call { - _c.Run(run) - return _c -} - -// AddRollbackCallback provides a mock function with given fields: cb -func (_m *TxerMock) AddRollbackCallback(cb func()) { - _m.Called(cb) -} - -// TxerMock_AddRollbackCallback_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddRollbackCallback' -type TxerMock_AddRollbackCallback_Call struct { - *mock.Call -} - -// AddRollbackCallback is a helper method to define mock.On call -// - cb func() -func (_e *TxerMock_Expecter) AddRollbackCallback(cb interface{}) *TxerMock_AddRollbackCallback_Call { - return &TxerMock_AddRollbackCallback_Call{Call: _e.mock.On("AddRollbackCallback", cb)} -} - -func (_c *TxerMock_AddRollbackCallback_Call) Run(run func(cb func())) *TxerMock_AddRollbackCallback_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(func())) - }) - return _c -} - -func (_c *TxerMock_AddRollbackCallback_Call) Return() *TxerMock_AddRollbackCallback_Call { - _c.Call.Return() - return _c -} - -func (_c *TxerMock_AddRollbackCallback_Call) RunAndReturn(run func(func())) *TxerMock_AddRollbackCallback_Call { - _c.Run(run) - return _c -} - -// Commit provides a mock function with no fields -func (_m *TxerMock) Commit() error { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Commit") - } - - var r0 error - if rf, ok := ret.Get(0).(func() error); ok { - r0 = rf() - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// TxerMock_Commit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Commit' -type TxerMock_Commit_Call struct { - *mock.Call -} - -// Commit is a helper method to define mock.On call -func (_e *TxerMock_Expecter) Commit() *TxerMock_Commit_Call { - return &TxerMock_Commit_Call{Call: _e.mock.On("Commit")} -} - -func (_c *TxerMock_Commit_Call) Run(run func()) *TxerMock_Commit_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *TxerMock_Commit_Call) Return(_a0 error) *TxerMock_Commit_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxerMock_Commit_Call) RunAndReturn(run func() error) *TxerMock_Commit_Call { - _c.Call.Return(run) - return _c -} - -// Exec provides a mock function with given fields: query, args -func (_m *TxerMock) Exec(query string, args ...interface{}) (sql.Result, error) { - var _ca []interface{} - _ca = append(_ca, query) - _ca = append(_ca, args...) - ret := _m.Called(_ca...) - - if len(ret) == 0 { - panic("no return value specified for Exec") - } - - var r0 sql.Result - var r1 error - if rf, ok := ret.Get(0).(func(string, ...interface{}) (sql.Result, error)); ok { - return rf(query, args...) - } - if rf, ok := ret.Get(0).(func(string, ...interface{}) sql.Result); ok { - r0 = rf(query, args...) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(sql.Result) - } - } - - if rf, ok := ret.Get(1).(func(string, ...interface{}) error); ok { - r1 = rf(query, args...) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// TxerMock_Exec_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Exec' -type TxerMock_Exec_Call struct { - *mock.Call -} - -// Exec is a helper method to define mock.On call -// - query string -// - args ...interface{} -func (_e *TxerMock_Expecter) Exec(query interface{}, args ...interface{}) *TxerMock_Exec_Call { - return &TxerMock_Exec_Call{Call: _e.mock.On("Exec", - append([]interface{}{query}, args...)...)} -} - -func (_c *TxerMock_Exec_Call) Run(run func(query string, args ...interface{})) *TxerMock_Exec_Call { - _c.Call.Run(func(args mock.Arguments) { - variadicArgs := make([]interface{}, len(args)-1) - for i, a := range args[1:] { - if a != nil { - variadicArgs[i] = a.(interface{}) - } - } - run(args[0].(string), variadicArgs...) - }) - return _c -} - -func (_c *TxerMock_Exec_Call) Return(_a0 sql.Result, _a1 error) *TxerMock_Exec_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *TxerMock_Exec_Call) RunAndReturn(run func(string, ...interface{}) (sql.Result, error)) *TxerMock_Exec_Call { - _c.Call.Return(run) - return _c -} - -// Query provides a mock function with given fields: query, args -func (_m *TxerMock) Query(query string, args ...interface{}) (*sql.Rows, error) { - var _ca []interface{} - _ca = append(_ca, query) - _ca = append(_ca, args...) - ret := _m.Called(_ca...) - - if len(ret) == 0 { - panic("no return value specified for Query") - } - - var r0 *sql.Rows - var r1 error - if rf, ok := ret.Get(0).(func(string, ...interface{}) (*sql.Rows, error)); ok { - return rf(query, args...) - } - if rf, ok := ret.Get(0).(func(string, ...interface{}) *sql.Rows); ok { - r0 = rf(query, args...) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*sql.Rows) - } - } - - if rf, ok := ret.Get(1).(func(string, ...interface{}) error); ok { - r1 = rf(query, args...) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// TxerMock_Query_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Query' -type TxerMock_Query_Call struct { - *mock.Call -} - -// Query is a helper method to define mock.On call -// - query string -// - args ...interface{} -func (_e *TxerMock_Expecter) Query(query interface{}, args ...interface{}) *TxerMock_Query_Call { - return &TxerMock_Query_Call{Call: _e.mock.On("Query", - append([]interface{}{query}, args...)...)} -} - -func (_c *TxerMock_Query_Call) Run(run func(query string, args ...interface{})) *TxerMock_Query_Call { - _c.Call.Run(func(args mock.Arguments) { - variadicArgs := make([]interface{}, len(args)-1) - for i, a := range args[1:] { - if a != nil { - variadicArgs[i] = a.(interface{}) - } - } - run(args[0].(string), variadicArgs...) - }) - return _c -} - -func (_c *TxerMock_Query_Call) Return(_a0 *sql.Rows, _a1 error) *TxerMock_Query_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *TxerMock_Query_Call) RunAndReturn(run func(string, ...interface{}) (*sql.Rows, error)) *TxerMock_Query_Call { - _c.Call.Return(run) - return _c -} - -// QueryRow provides a mock function with given fields: query, args -func (_m *TxerMock) QueryRow(query string, args ...interface{}) *sql.Row { - var _ca []interface{} - _ca = append(_ca, query) - _ca = append(_ca, args...) - ret := _m.Called(_ca...) - - if len(ret) == 0 { - panic("no return value specified for QueryRow") - } - - var r0 *sql.Row - if rf, ok := ret.Get(0).(func(string, ...interface{}) *sql.Row); ok { - r0 = rf(query, args...) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*sql.Row) - } - } - - return r0 -} - -// TxerMock_QueryRow_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'QueryRow' -type TxerMock_QueryRow_Call struct { - *mock.Call -} - -// QueryRow is a helper method to define mock.On call -// - query string -// - args ...interface{} -func (_e *TxerMock_Expecter) QueryRow(query interface{}, args ...interface{}) *TxerMock_QueryRow_Call { - return &TxerMock_QueryRow_Call{Call: _e.mock.On("QueryRow", - append([]interface{}{query}, args...)...)} -} - -func (_c *TxerMock_QueryRow_Call) Run(run func(query string, args ...interface{})) *TxerMock_QueryRow_Call { - _c.Call.Run(func(args mock.Arguments) { - variadicArgs := make([]interface{}, len(args)-1) - for i, a := range args[1:] { - if a != nil { - variadicArgs[i] = a.(interface{}) - } - } - run(args[0].(string), variadicArgs...) - }) - return _c -} - -func (_c *TxerMock_QueryRow_Call) Return(_a0 *sql.Row) *TxerMock_QueryRow_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxerMock_QueryRow_Call) RunAndReturn(run func(string, ...interface{}) *sql.Row) *TxerMock_QueryRow_Call { - _c.Call.Return(run) - return _c -} - -// Rollback provides a mock function with no fields -func (_m *TxerMock) Rollback() error { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Rollback") - } - - var r0 error - if rf, ok := ret.Get(0).(func() error); ok { - r0 = rf() - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// TxerMock_Rollback_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Rollback' -type TxerMock_Rollback_Call struct { - *mock.Call -} - -// Rollback is a helper method to define mock.On call -func (_e *TxerMock_Expecter) Rollback() *TxerMock_Rollback_Call { - return &TxerMock_Rollback_Call{Call: _e.mock.On("Rollback")} -} - -func (_c *TxerMock_Rollback_Call) Run(run func()) *TxerMock_Rollback_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *TxerMock_Rollback_Call) Return(_a0 error) *TxerMock_Rollback_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxerMock_Rollback_Call) RunAndReturn(run func() error) *TxerMock_Rollback_Call { - _c.Call.Return(run) - return _c -} - -// NewTxerMock creates a new instance of TxerMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewTxerMock(t interface { - mock.TestingT - Cleanup(func()) -}) *TxerMock { - mock := &TxerMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/aggregator/prover/aggregator.pb.go b/aggregator/prover/aggregator.pb.go deleted file mode 100644 index b79d134b..00000000 --- a/aggregator/prover/aggregator.pb.go +++ /dev/null @@ -1,2819 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.34.1 -// protoc v5.27.0 -// source: aggregator.proto - -package prover - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// * -// @dev Result -// - OK: succesfully completed -// - ERROR: request is not correct, i.e. input data is wrong -// - INTERNAL_ERROR: internal server error when delivering the response -type Result int32 - -const ( - Result_RESULT_UNSPECIFIED Result = 0 - Result_RESULT_OK Result = 1 - Result_RESULT_ERROR Result = 2 - Result_RESULT_INTERNAL_ERROR Result = 3 -) - -// Enum value maps for Result. -var ( - Result_name = map[int32]string{ - 0: "RESULT_UNSPECIFIED", - 1: "RESULT_OK", - 2: "RESULT_ERROR", - 3: "RESULT_INTERNAL_ERROR", - } - Result_value = map[string]int32{ - "RESULT_UNSPECIFIED": 0, - "RESULT_OK": 1, - "RESULT_ERROR": 2, - "RESULT_INTERNAL_ERROR": 3, - } -) - -func (x Result) Enum() *Result { - p := new(Result) - *p = x - return p -} - -func (x Result) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (Result) Descriptor() protoreflect.EnumDescriptor { - return file_aggregator_proto_enumTypes[0].Descriptor() -} - -func (Result) Type() protoreflect.EnumType { - return &file_aggregator_proto_enumTypes[0] -} - -func (x Result) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use Result.Descriptor instead. -func (Result) EnumDescriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{0} -} - -type GetStatusResponse_Status int32 - -const ( - GetStatusResponse_STATUS_UNSPECIFIED GetStatusResponse_Status = 0 - GetStatusResponse_STATUS_BOOTING GetStatusResponse_Status = 1 - GetStatusResponse_STATUS_COMPUTING GetStatusResponse_Status = 2 - GetStatusResponse_STATUS_IDLE GetStatusResponse_Status = 3 - GetStatusResponse_STATUS_HALT GetStatusResponse_Status = 4 -) - -// Enum value maps for GetStatusResponse_Status. -var ( - GetStatusResponse_Status_name = map[int32]string{ - 0: "STATUS_UNSPECIFIED", - 1: "STATUS_BOOTING", - 2: "STATUS_COMPUTING", - 3: "STATUS_IDLE", - 4: "STATUS_HALT", - } - GetStatusResponse_Status_value = map[string]int32{ - "STATUS_UNSPECIFIED": 0, - "STATUS_BOOTING": 1, - "STATUS_COMPUTING": 2, - "STATUS_IDLE": 3, - "STATUS_HALT": 4, - } -) - -func (x GetStatusResponse_Status) Enum() *GetStatusResponse_Status { - p := new(GetStatusResponse_Status) - *p = x - return p -} - -func (x GetStatusResponse_Status) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (GetStatusResponse_Status) Descriptor() protoreflect.EnumDescriptor { - return file_aggregator_proto_enumTypes[1].Descriptor() -} - -func (GetStatusResponse_Status) Type() protoreflect.EnumType { - return &file_aggregator_proto_enumTypes[1] -} - -func (x GetStatusResponse_Status) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use GetStatusResponse_Status.Descriptor instead. -func (GetStatusResponse_Status) EnumDescriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{10, 0} -} - -type GetProofResponse_Result int32 - -const ( - GetProofResponse_RESULT_UNSPECIFIED GetProofResponse_Result = 0 - GetProofResponse_RESULT_COMPLETED_OK GetProofResponse_Result = 1 - GetProofResponse_RESULT_ERROR GetProofResponse_Result = 2 - GetProofResponse_RESULT_COMPLETED_ERROR GetProofResponse_Result = 3 - GetProofResponse_RESULT_PENDING GetProofResponse_Result = 4 - GetProofResponse_RESULT_INTERNAL_ERROR GetProofResponse_Result = 5 - GetProofResponse_RESULT_CANCEL GetProofResponse_Result = 6 -) - -// Enum value maps for GetProofResponse_Result. -var ( - GetProofResponse_Result_name = map[int32]string{ - 0: "RESULT_UNSPECIFIED", - 1: "RESULT_COMPLETED_OK", - 2: "RESULT_ERROR", - 3: "RESULT_COMPLETED_ERROR", - 4: "RESULT_PENDING", - 5: "RESULT_INTERNAL_ERROR", - 6: "RESULT_CANCEL", - } - GetProofResponse_Result_value = map[string]int32{ - "RESULT_UNSPECIFIED": 0, - "RESULT_COMPLETED_OK": 1, - "RESULT_ERROR": 2, - "RESULT_COMPLETED_ERROR": 3, - "RESULT_PENDING": 4, - "RESULT_INTERNAL_ERROR": 5, - "RESULT_CANCEL": 6, - } -) - -func (x GetProofResponse_Result) Enum() *GetProofResponse_Result { - p := new(GetProofResponse_Result) - *p = x - return p -} - -func (x GetProofResponse_Result) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (GetProofResponse_Result) Descriptor() protoreflect.EnumDescriptor { - return file_aggregator_proto_enumTypes[2].Descriptor() -} - -func (GetProofResponse_Result) Type() protoreflect.EnumType { - return &file_aggregator_proto_enumTypes[2] -} - -func (x GetProofResponse_Result) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use GetProofResponse_Result.Descriptor instead. -func (GetProofResponse_Result) EnumDescriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{15, 0} -} - -type Version struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - V0_0_1 string `protobuf:"bytes,1,opt,name=v0_0_1,json=v001,proto3" json:"v0_0_1,omitempty"` -} - -func (x *Version) Reset() { - *x = Version{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Version) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Version) ProtoMessage() {} - -func (x *Version) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Version.ProtoReflect.Descriptor instead. -func (*Version) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{0} -} - -func (x *Version) GetV0_0_1() string { - if x != nil { - return x.V0_0_1 - } - return "" -} - -type AggregatorMessage struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - // Types that are assignable to Request: - // - // *AggregatorMessage_GetStatusRequest - // *AggregatorMessage_GenBatchProofRequest - // *AggregatorMessage_GenAggregatedProofRequest - // *AggregatorMessage_GenFinalProofRequest - // *AggregatorMessage_CancelRequest - // *AggregatorMessage_GetProofRequest - // *AggregatorMessage_GenStatelessBatchProofRequest - Request isAggregatorMessage_Request `protobuf_oneof:"request"` -} - -func (x *AggregatorMessage) Reset() { - *x = AggregatorMessage{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *AggregatorMessage) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*AggregatorMessage) ProtoMessage() {} - -func (x *AggregatorMessage) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use AggregatorMessage.ProtoReflect.Descriptor instead. -func (*AggregatorMessage) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{1} -} - -func (x *AggregatorMessage) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (m *AggregatorMessage) GetRequest() isAggregatorMessage_Request { - if m != nil { - return m.Request - } - return nil -} - -func (x *AggregatorMessage) GetGetStatusRequest() *GetStatusRequest { - if x, ok := x.GetRequest().(*AggregatorMessage_GetStatusRequest); ok { - return x.GetStatusRequest - } - return nil -} - -func (x *AggregatorMessage) GetGenBatchProofRequest() *GenBatchProofRequest { - if x, ok := x.GetRequest().(*AggregatorMessage_GenBatchProofRequest); ok { - return x.GenBatchProofRequest - } - return nil -} - -func (x *AggregatorMessage) GetGenAggregatedProofRequest() *GenAggregatedProofRequest { - if x, ok := x.GetRequest().(*AggregatorMessage_GenAggregatedProofRequest); ok { - return x.GenAggregatedProofRequest - } - return nil -} - -func (x *AggregatorMessage) GetGenFinalProofRequest() *GenFinalProofRequest { - if x, ok := x.GetRequest().(*AggregatorMessage_GenFinalProofRequest); ok { - return x.GenFinalProofRequest - } - return nil -} - -func (x *AggregatorMessage) GetCancelRequest() *CancelRequest { - if x, ok := x.GetRequest().(*AggregatorMessage_CancelRequest); ok { - return x.CancelRequest - } - return nil -} - -func (x *AggregatorMessage) GetGetProofRequest() *GetProofRequest { - if x, ok := x.GetRequest().(*AggregatorMessage_GetProofRequest); ok { - return x.GetProofRequest - } - return nil -} - -func (x *AggregatorMessage) GetGenStatelessBatchProofRequest() *GenStatelessBatchProofRequest { - if x, ok := x.GetRequest().(*AggregatorMessage_GenStatelessBatchProofRequest); ok { - return x.GenStatelessBatchProofRequest - } - return nil -} - -type isAggregatorMessage_Request interface { - isAggregatorMessage_Request() -} - -type AggregatorMessage_GetStatusRequest struct { - GetStatusRequest *GetStatusRequest `protobuf:"bytes,2,opt,name=get_status_request,json=getStatusRequest,proto3,oneof"` -} - -type AggregatorMessage_GenBatchProofRequest struct { - GenBatchProofRequest *GenBatchProofRequest `protobuf:"bytes,3,opt,name=gen_batch_proof_request,json=genBatchProofRequest,proto3,oneof"` -} - -type AggregatorMessage_GenAggregatedProofRequest struct { - GenAggregatedProofRequest *GenAggregatedProofRequest `protobuf:"bytes,4,opt,name=gen_aggregated_proof_request,json=genAggregatedProofRequest,proto3,oneof"` -} - -type AggregatorMessage_GenFinalProofRequest struct { - GenFinalProofRequest *GenFinalProofRequest `protobuf:"bytes,5,opt,name=gen_final_proof_request,json=genFinalProofRequest,proto3,oneof"` -} - -type AggregatorMessage_CancelRequest struct { - CancelRequest *CancelRequest `protobuf:"bytes,6,opt,name=cancel_request,json=cancelRequest,proto3,oneof"` -} - -type AggregatorMessage_GetProofRequest struct { - GetProofRequest *GetProofRequest `protobuf:"bytes,7,opt,name=get_proof_request,json=getProofRequest,proto3,oneof"` -} - -type AggregatorMessage_GenStatelessBatchProofRequest struct { - GenStatelessBatchProofRequest *GenStatelessBatchProofRequest `protobuf:"bytes,8,opt,name=gen_stateless_batch_proof_request,json=genStatelessBatchProofRequest,proto3,oneof"` -} - -func (*AggregatorMessage_GetStatusRequest) isAggregatorMessage_Request() {} - -func (*AggregatorMessage_GenBatchProofRequest) isAggregatorMessage_Request() {} - -func (*AggregatorMessage_GenAggregatedProofRequest) isAggregatorMessage_Request() {} - -func (*AggregatorMessage_GenFinalProofRequest) isAggregatorMessage_Request() {} - -func (*AggregatorMessage_CancelRequest) isAggregatorMessage_Request() {} - -func (*AggregatorMessage_GetProofRequest) isAggregatorMessage_Request() {} - -func (*AggregatorMessage_GenStatelessBatchProofRequest) isAggregatorMessage_Request() {} - -type ProverMessage struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - // Types that are assignable to Response: - // - // *ProverMessage_GetStatusResponse - // *ProverMessage_GenBatchProofResponse - // *ProverMessage_GenAggregatedProofResponse - // *ProverMessage_GenFinalProofResponse - // *ProverMessage_CancelResponse - // *ProverMessage_GetProofResponse - Response isProverMessage_Response `protobuf_oneof:"response"` -} - -func (x *ProverMessage) Reset() { - *x = ProverMessage{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ProverMessage) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ProverMessage) ProtoMessage() {} - -func (x *ProverMessage) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ProverMessage.ProtoReflect.Descriptor instead. -func (*ProverMessage) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{2} -} - -func (x *ProverMessage) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (m *ProverMessage) GetResponse() isProverMessage_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *ProverMessage) GetGetStatusResponse() *GetStatusResponse { - if x, ok := x.GetResponse().(*ProverMessage_GetStatusResponse); ok { - return x.GetStatusResponse - } - return nil -} - -func (x *ProverMessage) GetGenBatchProofResponse() *GenBatchProofResponse { - if x, ok := x.GetResponse().(*ProverMessage_GenBatchProofResponse); ok { - return x.GenBatchProofResponse - } - return nil -} - -func (x *ProverMessage) GetGenAggregatedProofResponse() *GenAggregatedProofResponse { - if x, ok := x.GetResponse().(*ProverMessage_GenAggregatedProofResponse); ok { - return x.GenAggregatedProofResponse - } - return nil -} - -func (x *ProverMessage) GetGenFinalProofResponse() *GenFinalProofResponse { - if x, ok := x.GetResponse().(*ProverMessage_GenFinalProofResponse); ok { - return x.GenFinalProofResponse - } - return nil -} - -func (x *ProverMessage) GetCancelResponse() *CancelResponse { - if x, ok := x.GetResponse().(*ProverMessage_CancelResponse); ok { - return x.CancelResponse - } - return nil -} - -func (x *ProverMessage) GetGetProofResponse() *GetProofResponse { - if x, ok := x.GetResponse().(*ProverMessage_GetProofResponse); ok { - return x.GetProofResponse - } - return nil -} - -type isProverMessage_Response interface { - isProverMessage_Response() -} - -type ProverMessage_GetStatusResponse struct { - GetStatusResponse *GetStatusResponse `protobuf:"bytes,2,opt,name=get_status_response,json=getStatusResponse,proto3,oneof"` -} - -type ProverMessage_GenBatchProofResponse struct { - GenBatchProofResponse *GenBatchProofResponse `protobuf:"bytes,3,opt,name=gen_batch_proof_response,json=genBatchProofResponse,proto3,oneof"` -} - -type ProverMessage_GenAggregatedProofResponse struct { - GenAggregatedProofResponse *GenAggregatedProofResponse `protobuf:"bytes,4,opt,name=gen_aggregated_proof_response,json=genAggregatedProofResponse,proto3,oneof"` -} - -type ProverMessage_GenFinalProofResponse struct { - GenFinalProofResponse *GenFinalProofResponse `protobuf:"bytes,5,opt,name=gen_final_proof_response,json=genFinalProofResponse,proto3,oneof"` -} - -type ProverMessage_CancelResponse struct { - CancelResponse *CancelResponse `protobuf:"bytes,6,opt,name=cancel_response,json=cancelResponse,proto3,oneof"` -} - -type ProverMessage_GetProofResponse struct { - GetProofResponse *GetProofResponse `protobuf:"bytes,7,opt,name=get_proof_response,json=getProofResponse,proto3,oneof"` -} - -func (*ProverMessage_GetStatusResponse) isProverMessage_Response() {} - -func (*ProverMessage_GenBatchProofResponse) isProverMessage_Response() {} - -func (*ProverMessage_GenAggregatedProofResponse) isProverMessage_Response() {} - -func (*ProverMessage_GenFinalProofResponse) isProverMessage_Response() {} - -func (*ProverMessage_CancelResponse) isProverMessage_Response() {} - -func (*ProverMessage_GetProofResponse) isProverMessage_Response() {} - -// * -// @dev GetStatusRequest -type GetStatusRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetStatusRequest) Reset() { - *x = GetStatusRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetStatusRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetStatusRequest) ProtoMessage() {} - -func (x *GetStatusRequest) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetStatusRequest.ProtoReflect.Descriptor instead. -func (*GetStatusRequest) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{3} -} - -// * -// @dev GenBatchProofRequest -// @param {input} - input prover -type GenBatchProofRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Input *InputProver `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` -} - -func (x *GenBatchProofRequest) Reset() { - *x = GenBatchProofRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenBatchProofRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenBatchProofRequest) ProtoMessage() {} - -func (x *GenBatchProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenBatchProofRequest.ProtoReflect.Descriptor instead. -func (*GenBatchProofRequest) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{4} -} - -func (x *GenBatchProofRequest) GetInput() *InputProver { - if x != nil { - return x.Input - } - return nil -} - -type GenStatelessBatchProofRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Input *StatelessInputProver `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` -} - -func (x *GenStatelessBatchProofRequest) Reset() { - *x = GenStatelessBatchProofRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenStatelessBatchProofRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenStatelessBatchProofRequest) ProtoMessage() {} - -func (x *GenStatelessBatchProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenStatelessBatchProofRequest.ProtoReflect.Descriptor instead. -func (*GenStatelessBatchProofRequest) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{5} -} - -func (x *GenStatelessBatchProofRequest) GetInput() *StatelessInputProver { - if x != nil { - return x.Input - } - return nil -} - -// * -// @dev GenAggregatedProofRequest -// @param {recursive_proof_1} - proof json of the first batch to aggregate -// @param {recursive_proof_2} - proof json of the second batch to aggregate -type GenAggregatedProofRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - RecursiveProof_1 string `protobuf:"bytes,1,opt,name=recursive_proof_1,json=recursiveProof1,proto3" json:"recursive_proof_1,omitempty"` - RecursiveProof_2 string `protobuf:"bytes,2,opt,name=recursive_proof_2,json=recursiveProof2,proto3" json:"recursive_proof_2,omitempty"` -} - -func (x *GenAggregatedProofRequest) Reset() { - *x = GenAggregatedProofRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenAggregatedProofRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenAggregatedProofRequest) ProtoMessage() {} - -func (x *GenAggregatedProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenAggregatedProofRequest.ProtoReflect.Descriptor instead. -func (*GenAggregatedProofRequest) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{6} -} - -func (x *GenAggregatedProofRequest) GetRecursiveProof_1() string { - if x != nil { - return x.RecursiveProof_1 - } - return "" -} - -func (x *GenAggregatedProofRequest) GetRecursiveProof_2() string { - if x != nil { - return x.RecursiveProof_2 - } - return "" -} - -// * -// @dev GenFinalProofRequest -// @param {recursive_proof} - proof json of the batch or aggregated proof to finalise -// @param {aggregator_addr} - address of the aggregator -type GenFinalProofRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - RecursiveProof string `protobuf:"bytes,1,opt,name=recursive_proof,json=recursiveProof,proto3" json:"recursive_proof,omitempty"` - AggregatorAddr string `protobuf:"bytes,2,opt,name=aggregator_addr,json=aggregatorAddr,proto3" json:"aggregator_addr,omitempty"` -} - -func (x *GenFinalProofRequest) Reset() { - *x = GenFinalProofRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenFinalProofRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenFinalProofRequest) ProtoMessage() {} - -func (x *GenFinalProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenFinalProofRequest.ProtoReflect.Descriptor instead. -func (*GenFinalProofRequest) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{7} -} - -func (x *GenFinalProofRequest) GetRecursiveProof() string { - if x != nil { - return x.RecursiveProof - } - return "" -} - -func (x *GenFinalProofRequest) GetAggregatorAddr() string { - if x != nil { - return x.AggregatorAddr - } - return "" -} - -// * -// @dev CancelRequest -// @param {id} - identifier of the proof request to cancel -type CancelRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` -} - -func (x *CancelRequest) Reset() { - *x = CancelRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CancelRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CancelRequest) ProtoMessage() {} - -func (x *CancelRequest) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CancelRequest.ProtoReflect.Descriptor instead. -func (*CancelRequest) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{8} -} - -func (x *CancelRequest) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -// * -// @dev Request GetProof -// @param {id} - proof identifier of the proof request -// @param {timeout} - time to wait until the service responds -type GetProofRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Timeout uint64 `protobuf:"varint,2,opt,name=timeout,proto3" json:"timeout,omitempty"` -} - -func (x *GetProofRequest) Reset() { - *x = GetProofRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetProofRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetProofRequest) ProtoMessage() {} - -func (x *GetProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetProofRequest.ProtoReflect.Descriptor instead. -func (*GetProofRequest) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{9} -} - -func (x *GetProofRequest) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *GetProofRequest) GetTimeout() uint64 { - if x != nil { - return x.Timeout - } - return 0 -} - -// * -// @dev Response GetStatus -// @param {status} - server status -// - BOOTING: being ready to compute proofs -// - COMPUTING: busy computing a proof -// - IDLE: waiting for a proof to compute -// - HALT: stop -// @param {last_computed_request_id} - last proof identifier that has been computed -// @param {last_computed_end_time} - last proof timestamp when it was finished -// @param {current_computing_request_id} - id of the proof that is being computed -// @param {current_computing_start_time} - timestamp when the proof that is being computed started -// @param {version_proto} - .proto verion -// @param {version_server} - server version -// @param {pending_request_queue_ids} - list of identifierss of proof requests that are in the pending queue -// @param {prover_name} - id of this prover server, normally specified via config.json, or UNSPECIFIED otherwise; it does not change if prover reboots -// @param {prover_id} - id of this prover instance or reboot; it changes if prover reboots; it is a UUID, automatically generated during the initialization -// @param {number_of_cores} - number of cores in the system where the prover is running -// @param {total_memory} - total memory in the system where the prover is running -// @param {free_memory} - free memory in the system where the prover is running -type GetStatusResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Status GetStatusResponse_Status `protobuf:"varint,1,opt,name=status,proto3,enum=aggregator.v1.GetStatusResponse_Status" json:"status,omitempty"` - LastComputedRequestId string `protobuf:"bytes,2,opt,name=last_computed_request_id,json=lastComputedRequestId,proto3" json:"last_computed_request_id,omitempty"` - LastComputedEndTime uint64 `protobuf:"varint,3,opt,name=last_computed_end_time,json=lastComputedEndTime,proto3" json:"last_computed_end_time,omitempty"` - CurrentComputingRequestId string `protobuf:"bytes,4,opt,name=current_computing_request_id,json=currentComputingRequestId,proto3" json:"current_computing_request_id,omitempty"` - CurrentComputingStartTime uint64 `protobuf:"varint,5,opt,name=current_computing_start_time,json=currentComputingStartTime,proto3" json:"current_computing_start_time,omitempty"` - VersionProto string `protobuf:"bytes,6,opt,name=version_proto,json=versionProto,proto3" json:"version_proto,omitempty"` - VersionServer string `protobuf:"bytes,7,opt,name=version_server,json=versionServer,proto3" json:"version_server,omitempty"` - PendingRequestQueueIds []string `protobuf:"bytes,8,rep,name=pending_request_queue_ids,json=pendingRequestQueueIds,proto3" json:"pending_request_queue_ids,omitempty"` - ProverName string `protobuf:"bytes,9,opt,name=prover_name,json=proverName,proto3" json:"prover_name,omitempty"` - ProverId string `protobuf:"bytes,10,opt,name=prover_id,json=proverId,proto3" json:"prover_id,omitempty"` - NumberOfCores uint64 `protobuf:"varint,11,opt,name=number_of_cores,json=numberOfCores,proto3" json:"number_of_cores,omitempty"` - TotalMemory uint64 `protobuf:"varint,12,opt,name=total_memory,json=totalMemory,proto3" json:"total_memory,omitempty"` - FreeMemory uint64 `protobuf:"varint,13,opt,name=free_memory,json=freeMemory,proto3" json:"free_memory,omitempty"` - ForkId uint64 `protobuf:"varint,14,opt,name=fork_id,json=forkId,proto3" json:"fork_id,omitempty"` -} - -func (x *GetStatusResponse) Reset() { - *x = GetStatusResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetStatusResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetStatusResponse) ProtoMessage() {} - -func (x *GetStatusResponse) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetStatusResponse.ProtoReflect.Descriptor instead. -func (*GetStatusResponse) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{10} -} - -func (x *GetStatusResponse) GetStatus() GetStatusResponse_Status { - if x != nil { - return x.Status - } - return GetStatusResponse_STATUS_UNSPECIFIED -} - -func (x *GetStatusResponse) GetLastComputedRequestId() string { - if x != nil { - return x.LastComputedRequestId - } - return "" -} - -func (x *GetStatusResponse) GetLastComputedEndTime() uint64 { - if x != nil { - return x.LastComputedEndTime - } - return 0 -} - -func (x *GetStatusResponse) GetCurrentComputingRequestId() string { - if x != nil { - return x.CurrentComputingRequestId - } - return "" -} - -func (x *GetStatusResponse) GetCurrentComputingStartTime() uint64 { - if x != nil { - return x.CurrentComputingStartTime - } - return 0 -} - -func (x *GetStatusResponse) GetVersionProto() string { - if x != nil { - return x.VersionProto - } - return "" -} - -func (x *GetStatusResponse) GetVersionServer() string { - if x != nil { - return x.VersionServer - } - return "" -} - -func (x *GetStatusResponse) GetPendingRequestQueueIds() []string { - if x != nil { - return x.PendingRequestQueueIds - } - return nil -} - -func (x *GetStatusResponse) GetProverName() string { - if x != nil { - return x.ProverName - } - return "" -} - -func (x *GetStatusResponse) GetProverId() string { - if x != nil { - return x.ProverId - } - return "" -} - -func (x *GetStatusResponse) GetNumberOfCores() uint64 { - if x != nil { - return x.NumberOfCores - } - return 0 -} - -func (x *GetStatusResponse) GetTotalMemory() uint64 { - if x != nil { - return x.TotalMemory - } - return 0 -} - -func (x *GetStatusResponse) GetFreeMemory() uint64 { - if x != nil { - return x.FreeMemory - } - return 0 -} - -func (x *GetStatusResponse) GetForkId() uint64 { - if x != nil { - return x.ForkId - } - return 0 -} - -// * -// @dev GenBatchProofResponse -// @param {id} - proof identifier, to be used in GetProofRequest() -// @param {result} - request result -type GenBatchProofResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Result Result `protobuf:"varint,2,opt,name=result,proto3,enum=aggregator.v1.Result" json:"result,omitempty"` -} - -func (x *GenBatchProofResponse) Reset() { - *x = GenBatchProofResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenBatchProofResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenBatchProofResponse) ProtoMessage() {} - -func (x *GenBatchProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenBatchProofResponse.ProtoReflect.Descriptor instead. -func (*GenBatchProofResponse) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{11} -} - -func (x *GenBatchProofResponse) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *GenBatchProofResponse) GetResult() Result { - if x != nil { - return x.Result - } - return Result_RESULT_UNSPECIFIED -} - -// * -// @dev GenAggregatedProofResponse -// @param {id} - proof identifier, to be used in GetProofRequest() -// @param {result} - request result -type GenAggregatedProofResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Result Result `protobuf:"varint,2,opt,name=result,proto3,enum=aggregator.v1.Result" json:"result,omitempty"` -} - -func (x *GenAggregatedProofResponse) Reset() { - *x = GenAggregatedProofResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenAggregatedProofResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenAggregatedProofResponse) ProtoMessage() {} - -func (x *GenAggregatedProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenAggregatedProofResponse.ProtoReflect.Descriptor instead. -func (*GenAggregatedProofResponse) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{12} -} - -func (x *GenAggregatedProofResponse) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *GenAggregatedProofResponse) GetResult() Result { - if x != nil { - return x.Result - } - return Result_RESULT_UNSPECIFIED -} - -// * -// @dev Response GenFinalProof -// @param {id} - proof identifier, to be used in GetProofRequest() -// @param {result} - request result -type GenFinalProofResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Result Result `protobuf:"varint,2,opt,name=result,proto3,enum=aggregator.v1.Result" json:"result,omitempty"` -} - -func (x *GenFinalProofResponse) Reset() { - *x = GenFinalProofResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenFinalProofResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenFinalProofResponse) ProtoMessage() {} - -func (x *GenFinalProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenFinalProofResponse.ProtoReflect.Descriptor instead. -func (*GenFinalProofResponse) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{13} -} - -func (x *GenFinalProofResponse) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *GenFinalProofResponse) GetResult() Result { - if x != nil { - return x.Result - } - return Result_RESULT_UNSPECIFIED -} - -// * -// @dev CancelResponse -// @param {result} - request result -type CancelResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Result Result `protobuf:"varint,1,opt,name=result,proto3,enum=aggregator.v1.Result" json:"result,omitempty"` -} - -func (x *CancelResponse) Reset() { - *x = CancelResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CancelResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CancelResponse) ProtoMessage() {} - -func (x *CancelResponse) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CancelResponse.ProtoReflect.Descriptor instead. -func (*CancelResponse) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{14} -} - -func (x *CancelResponse) GetResult() Result { - if x != nil { - return x.Result - } - return Result_RESULT_UNSPECIFIED -} - -// * -// @dev GetProofResponse -// @param {id} - proof identifier -// @param {final_proof} - groth16 proof + public circuit inputs -// @param {recursive_proof} - recursive proof json -// @param {result} - proof result -// - COMPLETED_OK: proof has been computed successfully and it is valid -// - ERROR: request error -// - COMPLETED_ERROR: proof has been computed successfully and it is not valid -// - PENDING: proof is being computed -// - INTERNAL_ERROR: server error during proof computation -// - CANCEL: proof has been cancelled -// -// @param {result_string} - extends result information -type GetProofResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - // Types that are assignable to Proof: - // - // *GetProofResponse_FinalProof - // *GetProofResponse_RecursiveProof - Proof isGetProofResponse_Proof `protobuf_oneof:"proof"` - Result GetProofResponse_Result `protobuf:"varint,4,opt,name=result,proto3,enum=aggregator.v1.GetProofResponse_Result" json:"result,omitempty"` - ResultString string `protobuf:"bytes,5,opt,name=result_string,json=resultString,proto3" json:"result_string,omitempty"` -} - -func (x *GetProofResponse) Reset() { - *x = GetProofResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetProofResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetProofResponse) ProtoMessage() {} - -func (x *GetProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetProofResponse.ProtoReflect.Descriptor instead. -func (*GetProofResponse) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{15} -} - -func (x *GetProofResponse) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (m *GetProofResponse) GetProof() isGetProofResponse_Proof { - if m != nil { - return m.Proof - } - return nil -} - -func (x *GetProofResponse) GetFinalProof() *FinalProof { - if x, ok := x.GetProof().(*GetProofResponse_FinalProof); ok { - return x.FinalProof - } - return nil -} - -func (x *GetProofResponse) GetRecursiveProof() string { - if x, ok := x.GetProof().(*GetProofResponse_RecursiveProof); ok { - return x.RecursiveProof - } - return "" -} - -func (x *GetProofResponse) GetResult() GetProofResponse_Result { - if x != nil { - return x.Result - } - return GetProofResponse_RESULT_UNSPECIFIED -} - -func (x *GetProofResponse) GetResultString() string { - if x != nil { - return x.ResultString - } - return "" -} - -type isGetProofResponse_Proof interface { - isGetProofResponse_Proof() -} - -type GetProofResponse_FinalProof struct { - FinalProof *FinalProof `protobuf:"bytes,2,opt,name=final_proof,json=finalProof,proto3,oneof"` -} - -type GetProofResponse_RecursiveProof struct { - RecursiveProof string `protobuf:"bytes,3,opt,name=recursive_proof,json=recursiveProof,proto3,oneof"` -} - -func (*GetProofResponse_FinalProof) isGetProofResponse_Proof() {} - -func (*GetProofResponse_RecursiveProof) isGetProofResponse_Proof() {} - -// @dev FinalProof -// @param {proof} - groth16 proof -// @param {public} - public circuit inputs -type FinalProof struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Proof string `protobuf:"bytes,1,opt,name=proof,proto3" json:"proof,omitempty"` - Public *PublicInputsExtended `protobuf:"bytes,2,opt,name=public,proto3" json:"public,omitempty"` -} - -func (x *FinalProof) Reset() { - *x = FinalProof{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FinalProof) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FinalProof) ProtoMessage() {} - -func (x *FinalProof) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FinalProof.ProtoReflect.Descriptor instead. -func (*FinalProof) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{16} -} - -func (x *FinalProof) GetProof() string { - if x != nil { - return x.Proof - } - return "" -} - -func (x *FinalProof) GetPublic() *PublicInputsExtended { - if x != nil { - return x.Public - } - return nil -} - -// @dev PublicInputs -// @param {old_state_root} -// @param {old_acc_input_hash} -// @param {old_batch_num} -// @param {chain_id} -// @param {batch_l2_data} -// @param {global_exit_root} -// @param {sequencer_addr} -// @param {aggregator_addr} -type PublicInputs struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - OldStateRoot []byte `protobuf:"bytes,1,opt,name=old_state_root,json=oldStateRoot,proto3" json:"old_state_root,omitempty"` - OldAccInputHash []byte `protobuf:"bytes,2,opt,name=old_acc_input_hash,json=oldAccInputHash,proto3" json:"old_acc_input_hash,omitempty"` - OldBatchNum uint64 `protobuf:"varint,3,opt,name=old_batch_num,json=oldBatchNum,proto3" json:"old_batch_num,omitempty"` - ChainId uint64 `protobuf:"varint,4,opt,name=chain_id,json=chainId,proto3" json:"chain_id,omitempty"` - ForkId uint64 `protobuf:"varint,5,opt,name=fork_id,json=forkId,proto3" json:"fork_id,omitempty"` - BatchL2Data []byte `protobuf:"bytes,6,opt,name=batch_l2_data,json=batchL2Data,proto3" json:"batch_l2_data,omitempty"` - L1InfoRoot []byte `protobuf:"bytes,7,opt,name=l1_info_root,json=l1InfoRoot,proto3" json:"l1_info_root,omitempty"` - TimestampLimit uint64 `protobuf:"varint,8,opt,name=timestamp_limit,json=timestampLimit,proto3" json:"timestamp_limit,omitempty"` - SequencerAddr string `protobuf:"bytes,9,opt,name=sequencer_addr,json=sequencerAddr,proto3" json:"sequencer_addr,omitempty"` - ForcedBlockhashL1 []byte `protobuf:"bytes,10,opt,name=forced_blockhash_l1,json=forcedBlockhashL1,proto3" json:"forced_blockhash_l1,omitempty"` - AggregatorAddr string `protobuf:"bytes,12,opt,name=aggregator_addr,json=aggregatorAddr,proto3" json:"aggregator_addr,omitempty"` - L1InfoTreeData map[uint32]*L1Data `protobuf:"bytes,16,rep,name=l1_info_tree_data,json=l1InfoTreeData,proto3" json:"l1_info_tree_data,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *PublicInputs) Reset() { - *x = PublicInputs{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PublicInputs) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PublicInputs) ProtoMessage() {} - -func (x *PublicInputs) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PublicInputs.ProtoReflect.Descriptor instead. -func (*PublicInputs) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{17} -} - -func (x *PublicInputs) GetOldStateRoot() []byte { - if x != nil { - return x.OldStateRoot - } - return nil -} - -func (x *PublicInputs) GetOldAccInputHash() []byte { - if x != nil { - return x.OldAccInputHash - } - return nil -} - -func (x *PublicInputs) GetOldBatchNum() uint64 { - if x != nil { - return x.OldBatchNum - } - return 0 -} - -func (x *PublicInputs) GetChainId() uint64 { - if x != nil { - return x.ChainId - } - return 0 -} - -func (x *PublicInputs) GetForkId() uint64 { - if x != nil { - return x.ForkId - } - return 0 -} - -func (x *PublicInputs) GetBatchL2Data() []byte { - if x != nil { - return x.BatchL2Data - } - return nil -} - -func (x *PublicInputs) GetL1InfoRoot() []byte { - if x != nil { - return x.L1InfoRoot - } - return nil -} - -func (x *PublicInputs) GetTimestampLimit() uint64 { - if x != nil { - return x.TimestampLimit - } - return 0 -} - -func (x *PublicInputs) GetSequencerAddr() string { - if x != nil { - return x.SequencerAddr - } - return "" -} - -func (x *PublicInputs) GetForcedBlockhashL1() []byte { - if x != nil { - return x.ForcedBlockhashL1 - } - return nil -} - -func (x *PublicInputs) GetAggregatorAddr() string { - if x != nil { - return x.AggregatorAddr - } - return "" -} - -func (x *PublicInputs) GetL1InfoTreeData() map[uint32]*L1Data { - if x != nil { - return x.L1InfoTreeData - } - return nil -} - -type StatelessPublicInputs struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Witness []byte `protobuf:"bytes,1,opt,name=witness,proto3" json:"witness,omitempty"` - OldAccInputHash []byte `protobuf:"bytes,2,opt,name=old_acc_input_hash,json=oldAccInputHash,proto3" json:"old_acc_input_hash,omitempty"` - OldBatchNum uint64 `protobuf:"varint,3,opt,name=old_batch_num,json=oldBatchNum,proto3" json:"old_batch_num,omitempty"` - ChainId uint64 `protobuf:"varint,4,opt,name=chain_id,json=chainId,proto3" json:"chain_id,omitempty"` - ForkId uint64 `protobuf:"varint,5,opt,name=fork_id,json=forkId,proto3" json:"fork_id,omitempty"` - BatchL2Data []byte `protobuf:"bytes,6,opt,name=batch_l2_data,json=batchL2Data,proto3" json:"batch_l2_data,omitempty"` - L1InfoRoot []byte `protobuf:"bytes,7,opt,name=l1_info_root,json=l1InfoRoot,proto3" json:"l1_info_root,omitempty"` - TimestampLimit uint64 `protobuf:"varint,8,opt,name=timestamp_limit,json=timestampLimit,proto3" json:"timestamp_limit,omitempty"` - SequencerAddr string `protobuf:"bytes,9,opt,name=sequencer_addr,json=sequencerAddr,proto3" json:"sequencer_addr,omitempty"` - ForcedBlockhashL1 []byte `protobuf:"bytes,10,opt,name=forced_blockhash_l1,json=forcedBlockhashL1,proto3" json:"forced_blockhash_l1,omitempty"` - AggregatorAddr string `protobuf:"bytes,11,opt,name=aggregator_addr,json=aggregatorAddr,proto3" json:"aggregator_addr,omitempty"` - L1InfoTreeData map[uint32]*L1Data `protobuf:"bytes,12,rep,name=l1_info_tree_data,json=l1InfoTreeData,proto3" json:"l1_info_tree_data,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *StatelessPublicInputs) Reset() { - *x = StatelessPublicInputs{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[18] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StatelessPublicInputs) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StatelessPublicInputs) ProtoMessage() {} - -func (x *StatelessPublicInputs) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[18] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StatelessPublicInputs.ProtoReflect.Descriptor instead. -func (*StatelessPublicInputs) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{18} -} - -func (x *StatelessPublicInputs) GetWitness() []byte { - if x != nil { - return x.Witness - } - return nil -} - -func (x *StatelessPublicInputs) GetOldAccInputHash() []byte { - if x != nil { - return x.OldAccInputHash - } - return nil -} - -func (x *StatelessPublicInputs) GetOldBatchNum() uint64 { - if x != nil { - return x.OldBatchNum - } - return 0 -} - -func (x *StatelessPublicInputs) GetChainId() uint64 { - if x != nil { - return x.ChainId - } - return 0 -} - -func (x *StatelessPublicInputs) GetForkId() uint64 { - if x != nil { - return x.ForkId - } - return 0 -} - -func (x *StatelessPublicInputs) GetBatchL2Data() []byte { - if x != nil { - return x.BatchL2Data - } - return nil -} - -func (x *StatelessPublicInputs) GetL1InfoRoot() []byte { - if x != nil { - return x.L1InfoRoot - } - return nil -} - -func (x *StatelessPublicInputs) GetTimestampLimit() uint64 { - if x != nil { - return x.TimestampLimit - } - return 0 -} - -func (x *StatelessPublicInputs) GetSequencerAddr() string { - if x != nil { - return x.SequencerAddr - } - return "" -} - -func (x *StatelessPublicInputs) GetForcedBlockhashL1() []byte { - if x != nil { - return x.ForcedBlockhashL1 - } - return nil -} - -func (x *StatelessPublicInputs) GetAggregatorAddr() string { - if x != nil { - return x.AggregatorAddr - } - return "" -} - -func (x *StatelessPublicInputs) GetL1InfoTreeData() map[uint32]*L1Data { - if x != nil { - return x.L1InfoTreeData - } - return nil -} - -// l1InfoTree leaf values -type L1Data struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - GlobalExitRoot []byte `protobuf:"bytes,1,opt,name=global_exit_root,json=globalExitRoot,proto3" json:"global_exit_root,omitempty"` - BlockhashL1 []byte `protobuf:"bytes,2,opt,name=blockhash_l1,json=blockhashL1,proto3" json:"blockhash_l1,omitempty"` - MinTimestamp uint32 `protobuf:"varint,3,opt,name=min_timestamp,json=minTimestamp,proto3" json:"min_timestamp,omitempty"` - SmtProof [][]byte `protobuf:"bytes,4,rep,name=smt_proof,json=smtProof,proto3" json:"smt_proof,omitempty"` -} - -func (x *L1Data) Reset() { - *x = L1Data{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[19] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *L1Data) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*L1Data) ProtoMessage() {} - -func (x *L1Data) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[19] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use L1Data.ProtoReflect.Descriptor instead. -func (*L1Data) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{19} -} - -func (x *L1Data) GetGlobalExitRoot() []byte { - if x != nil { - return x.GlobalExitRoot - } - return nil -} - -func (x *L1Data) GetBlockhashL1() []byte { - if x != nil { - return x.BlockhashL1 - } - return nil -} - -func (x *L1Data) GetMinTimestamp() uint32 { - if x != nil { - return x.MinTimestamp - } - return 0 -} - -func (x *L1Data) GetSmtProof() [][]byte { - if x != nil { - return x.SmtProof - } - return nil -} - -// * -// @dev InputProver -// @param {public_inputs} - public inputs -// @param {db} - database containing all key-values in smt matching the old state root -// @param {contracts_bytecode} - key is the hash(contractBytecode), value is the bytecode itself -type InputProver struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - PublicInputs *PublicInputs `protobuf:"bytes,1,opt,name=public_inputs,json=publicInputs,proto3" json:"public_inputs,omitempty"` - Db map[string]string `protobuf:"bytes,4,rep,name=db,proto3" json:"db,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // For debug/testing purpposes only. Don't fill this on production - ContractsBytecode map[string]string `protobuf:"bytes,5,rep,name=contracts_bytecode,json=contractsBytecode,proto3" json:"contracts_bytecode,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // For debug/testing purpposes only. Don't fill this on production -} - -func (x *InputProver) Reset() { - *x = InputProver{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[20] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *InputProver) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*InputProver) ProtoMessage() {} - -func (x *InputProver) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[20] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use InputProver.ProtoReflect.Descriptor instead. -func (*InputProver) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{20} -} - -func (x *InputProver) GetPublicInputs() *PublicInputs { - if x != nil { - return x.PublicInputs - } - return nil -} - -func (x *InputProver) GetDb() map[string]string { - if x != nil { - return x.Db - } - return nil -} - -func (x *InputProver) GetContractsBytecode() map[string]string { - if x != nil { - return x.ContractsBytecode - } - return nil -} - -type StatelessInputProver struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - PublicInputs *StatelessPublicInputs `protobuf:"bytes,1,opt,name=public_inputs,json=publicInputs,proto3" json:"public_inputs,omitempty"` -} - -func (x *StatelessInputProver) Reset() { - *x = StatelessInputProver{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[21] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StatelessInputProver) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StatelessInputProver) ProtoMessage() {} - -func (x *StatelessInputProver) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[21] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StatelessInputProver.ProtoReflect.Descriptor instead. -func (*StatelessInputProver) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{21} -} - -func (x *StatelessInputProver) GetPublicInputs() *StatelessPublicInputs { - if x != nil { - return x.PublicInputs - } - return nil -} - -// * -// @dev PublicInputsExtended -// @param {public_inputs} - public inputs -// @param {new_state_root} - final state root. Used as a sanity check. -// @param {new_acc_input_hash} - final accumulate input hash. Used as a sanity check. -// @param {new_local_exit_root} - new local exit root. Used as a sanity check. -// @param {new_batch_num} - final num batch. Used as a sanity check. -type PublicInputsExtended struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - PublicInputs *PublicInputs `protobuf:"bytes,1,opt,name=public_inputs,json=publicInputs,proto3" json:"public_inputs,omitempty"` - NewStateRoot []byte `protobuf:"bytes,2,opt,name=new_state_root,json=newStateRoot,proto3" json:"new_state_root,omitempty"` - NewAccInputHash []byte `protobuf:"bytes,3,opt,name=new_acc_input_hash,json=newAccInputHash,proto3" json:"new_acc_input_hash,omitempty"` - NewLocalExitRoot []byte `protobuf:"bytes,4,opt,name=new_local_exit_root,json=newLocalExitRoot,proto3" json:"new_local_exit_root,omitempty"` - NewBatchNum uint64 `protobuf:"varint,5,opt,name=new_batch_num,json=newBatchNum,proto3" json:"new_batch_num,omitempty"` -} - -func (x *PublicInputsExtended) Reset() { - *x = PublicInputsExtended{} - if protoimpl.UnsafeEnabled { - mi := &file_aggregator_proto_msgTypes[22] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PublicInputsExtended) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PublicInputsExtended) ProtoMessage() {} - -func (x *PublicInputsExtended) ProtoReflect() protoreflect.Message { - mi := &file_aggregator_proto_msgTypes[22] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PublicInputsExtended.ProtoReflect.Descriptor instead. -func (*PublicInputsExtended) Descriptor() ([]byte, []int) { - return file_aggregator_proto_rawDescGZIP(), []int{22} -} - -func (x *PublicInputsExtended) GetPublicInputs() *PublicInputs { - if x != nil { - return x.PublicInputs - } - return nil -} - -func (x *PublicInputsExtended) GetNewStateRoot() []byte { - if x != nil { - return x.NewStateRoot - } - return nil -} - -func (x *PublicInputsExtended) GetNewAccInputHash() []byte { - if x != nil { - return x.NewAccInputHash - } - return nil -} - -func (x *PublicInputsExtended) GetNewLocalExitRoot() []byte { - if x != nil { - return x.NewLocalExitRoot - } - return nil -} - -func (x *PublicInputsExtended) GetNewBatchNum() uint64 { - if x != nil { - return x.NewBatchNum - } - return 0 -} - -var File_aggregator_proto protoreflect.FileDescriptor - -var file_aggregator_proto_rawDesc = []byte{ - 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x12, 0x0d, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, - 0x31, 0x22, 0x1f, 0x0a, 0x07, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x06, - 0x76, 0x30, 0x5f, 0x30, 0x5f, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x76, 0x30, - 0x30, 0x31, 0x22, 0xb7, 0x05, 0x0a, 0x11, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, - 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x4f, 0x0a, 0x12, 0x67, 0x65, 0x74, 0x5f, - 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, - 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x10, 0x67, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x5c, 0x0a, 0x17, 0x67, 0x65, 0x6e, - 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x72, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x61, 0x67, 0x67, - 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x42, 0x61, - 0x74, 0x63, 0x68, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, - 0x00, 0x52, 0x14, 0x67, 0x65, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x6b, 0x0a, 0x1c, 0x67, 0x65, 0x6e, 0x5f, 0x61, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, - 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, - 0x6e, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x19, 0x67, 0x65, 0x6e, 0x41, 0x67, - 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x5c, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x5f, 0x66, 0x69, 0x6e, 0x61, - 0x6c, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, - 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x14, 0x67, 0x65, - 0x6e, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x45, 0x0a, 0x0e, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x5f, 0x72, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x61, 0x67, 0x67, - 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, - 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x63, 0x61, 0x6e, 0x63, - 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4c, 0x0a, 0x11, 0x67, 0x65, 0x74, - 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x07, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, - 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0f, 0x67, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x78, 0x0a, 0x21, 0x67, 0x65, 0x6e, 0x5f, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x70, - 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x08, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, - 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x42, - 0x61, 0x74, 0x63, 0x68, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x48, 0x00, 0x52, 0x1d, 0x67, 0x65, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6c, 0x65, 0x73, 0x73, - 0x42, 0x61, 0x74, 0x63, 0x68, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x42, 0x09, 0x0a, 0x07, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xcc, 0x04, 0x0a, - 0x0d, 0x50, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x52, - 0x0a, 0x13, 0x67, 0x65, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x72, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x61, 0x67, - 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, - 0x11, 0x67, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x5f, 0x0a, 0x18, 0x67, 0x65, 0x6e, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, - 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, - 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x15, 0x67, 0x65, - 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x6e, 0x0a, 0x1d, 0x67, 0x65, 0x6e, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, - 0x67, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x72, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x61, 0x67, 0x67, - 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x41, 0x67, - 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x1a, 0x67, 0x65, 0x6e, 0x41, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x5f, 0x0a, 0x18, 0x67, 0x65, 0x6e, 0x5f, 0x66, 0x69, 0x6e, 0x61, 0x6c, - 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, - 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x15, 0x67, - 0x65, 0x6e, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x48, 0x0a, 0x0f, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x5f, 0x72, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, - 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x61, - 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x0e, - 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4f, - 0x0a, 0x12, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x72, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x61, 0x67, 0x67, - 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x10, 0x67, - 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, - 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x12, 0x0a, 0x10, 0x47, - 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, - 0x48, 0x0a, 0x14, 0x47, 0x65, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x30, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, - 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x72, 0x6f, 0x76, - 0x65, 0x72, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x5a, 0x0a, 0x1d, 0x47, 0x65, 0x6e, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x39, 0x0a, 0x05, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x61, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6c, - 0x65, 0x73, 0x73, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x05, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x73, 0x0a, 0x19, 0x47, 0x65, 0x6e, 0x41, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x2a, 0x0a, 0x11, 0x72, 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x5f, - 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x5f, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x72, - 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x31, 0x12, 0x2a, - 0x0a, 0x11, 0x72, 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, - 0x66, 0x5f, 0x32, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x72, 0x65, 0x63, 0x75, 0x72, - 0x73, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x32, 0x22, 0x68, 0x0a, 0x14, 0x47, 0x65, - 0x6e, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x72, 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x5f, - 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, 0x63, - 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x27, 0x0a, 0x0f, 0x61, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, - 0x41, 0x64, 0x64, 0x72, 0x22, 0x1f, 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x3b, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, - 0x6f, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, - 0x75, 0x74, 0x22, 0xfc, 0x05, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, - 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x37, 0x0a, 0x18, 0x6c, 0x61, 0x73, - 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, 0x6c, 0x61, 0x73, - 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x75, - 0x74, 0x65, 0x64, 0x5f, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x04, 0x52, 0x13, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, - 0x45, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x19, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x69, 0x6e, 0x67, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x49, 0x64, 0x12, 0x3f, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x19, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x69, 0x6e, 0x67, - 0x53, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x76, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0c, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x25, - 0x0a, 0x0e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x53, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x12, 0x39, 0x0a, 0x19, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, - 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x5f, 0x71, 0x75, 0x65, 0x75, 0x65, 0x5f, 0x69, - 0x64, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, 0x16, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, - 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x51, 0x75, 0x65, 0x75, 0x65, 0x49, 0x64, 0x73, - 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x4e, 0x61, 0x6d, - 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x0a, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x49, 0x64, 0x12, 0x26, - 0x0a, 0x0f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x5f, 0x6f, 0x66, 0x5f, 0x63, 0x6f, 0x72, 0x65, - 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0d, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x4f, - 0x66, 0x43, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, - 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x74, 0x6f, - 0x74, 0x61, 0x6c, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x72, 0x65, - 0x65, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0a, - 0x66, 0x72, 0x65, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x17, 0x0a, 0x07, 0x66, 0x6f, - 0x72, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x66, 0x6f, 0x72, - 0x6b, 0x49, 0x64, 0x22, 0x6c, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, - 0x12, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, - 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, - 0x42, 0x4f, 0x4f, 0x54, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x14, 0x0a, 0x10, 0x53, 0x54, 0x41, - 0x54, 0x55, 0x53, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x55, 0x54, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x12, - 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x49, 0x44, 0x4c, 0x45, 0x10, 0x03, - 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x48, 0x41, 0x4c, 0x54, 0x10, - 0x04, 0x22, 0x56, 0x0a, 0x15, 0x47, 0x65, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2d, 0x0a, 0x06, 0x72, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x61, 0x67, 0x67, - 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22, 0x5b, 0x0a, 0x1a, 0x47, 0x65, 0x6e, - 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2d, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, - 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x06, - 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22, 0x56, 0x0a, 0x15, 0x47, 0x65, 0x6e, 0x46, 0x69, 0x6e, - 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, - 0x2d, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x15, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, - 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22, 0x3f, - 0x0a, 0x0e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x2d, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x15, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, - 0x2e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22, - 0xa5, 0x03, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x02, 0x69, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x5f, 0x70, 0x72, - 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x61, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x12, 0x29, 0x0a, 0x0f, 0x72, 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x5f, - 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0e, 0x72, - 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x3e, 0x0a, - 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, - 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, - 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x23, 0x0a, - 0x0d, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x53, 0x74, 0x72, 0x69, - 0x6e, 0x67, 0x22, 0xa9, 0x01, 0x0a, 0x06, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x16, 0x0a, - 0x12, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, - 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x17, 0x0a, 0x13, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, - 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x5f, 0x4f, 0x4b, 0x10, 0x01, 0x12, 0x10, - 0x0a, 0x0c, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x02, - 0x12, 0x1a, 0x0a, 0x16, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, - 0x45, 0x54, 0x45, 0x44, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x03, 0x12, 0x12, 0x0a, 0x0e, - 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x04, - 0x12, 0x19, 0x0a, 0x15, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, 0x49, 0x4e, 0x54, 0x45, 0x52, - 0x4e, 0x41, 0x4c, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x05, 0x12, 0x11, 0x0a, 0x0d, 0x52, - 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x10, 0x06, 0x42, 0x07, - 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0x5f, 0x0a, 0x0a, 0x46, 0x69, 0x6e, 0x61, 0x6c, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x3b, 0x0a, 0x06, 0x70, - 0x75, 0x62, 0x6c, 0x69, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x61, 0x67, - 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, - 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, - 0x52, 0x06, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x22, 0xde, 0x04, 0x0a, 0x0c, 0x50, 0x75, 0x62, - 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x24, 0x0a, 0x0e, 0x6f, 0x6c, 0x64, - 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x0c, 0x6f, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, - 0x2b, 0x0a, 0x12, 0x6f, 0x6c, 0x64, 0x5f, 0x61, 0x63, 0x63, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6f, 0x6c, 0x64, - 0x41, 0x63, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x22, 0x0a, 0x0d, - 0x6f, 0x6c, 0x64, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x6e, 0x75, 0x6d, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x0b, 0x6f, 0x6c, 0x64, 0x42, 0x61, 0x74, 0x63, 0x68, 0x4e, 0x75, 0x6d, - 0x12, 0x19, 0x0a, 0x08, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x04, 0x52, 0x07, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x66, - 0x6f, 0x72, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x66, 0x6f, - 0x72, 0x6b, 0x49, 0x64, 0x12, 0x22, 0x0a, 0x0d, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x6c, 0x32, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x62, 0x61, 0x74, - 0x63, 0x68, 0x4c, 0x32, 0x44, 0x61, 0x74, 0x61, 0x12, 0x20, 0x0a, 0x0c, 0x6c, 0x31, 0x5f, 0x69, - 0x6e, 0x66, 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, - 0x6c, 0x31, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x74, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x08, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4c, 0x69, - 0x6d, 0x69, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x72, - 0x5f, 0x61, 0x64, 0x64, 0x72, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x73, 0x65, 0x71, - 0x75, 0x65, 0x6e, 0x63, 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x12, 0x2e, 0x0a, 0x13, 0x66, 0x6f, - 0x72, 0x63, 0x65, 0x64, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x68, 0x61, 0x73, 0x68, 0x5f, 0x6c, - 0x31, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x11, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x64, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x68, 0x61, 0x73, 0x68, 0x4c, 0x31, 0x12, 0x27, 0x0a, 0x0f, 0x61, 0x67, - 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x18, 0x0c, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x41, - 0x64, 0x64, 0x72, 0x12, 0x5a, 0x0a, 0x11, 0x6c, 0x31, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x5f, 0x74, - 0x72, 0x65, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, - 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x50, - 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x2e, 0x4c, 0x31, 0x49, 0x6e, - 0x66, 0x6f, 0x54, 0x72, 0x65, 0x65, 0x44, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x0e, 0x6c, 0x31, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x72, 0x65, 0x65, 0x44, 0x61, 0x74, 0x61, 0x1a, - 0x58, 0x0a, 0x13, 0x4c, 0x31, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x72, 0x65, 0x65, 0x44, 0x61, 0x74, - 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, - 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xe4, 0x04, 0x0a, 0x15, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x77, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x77, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x12, 0x2b, 0x0a, - 0x12, 0x6f, 0x6c, 0x64, 0x5f, 0x61, 0x63, 0x63, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x68, - 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6f, 0x6c, 0x64, 0x41, 0x63, - 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x22, 0x0a, 0x0d, 0x6f, 0x6c, - 0x64, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x6e, 0x75, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x0b, 0x6f, 0x6c, 0x64, 0x42, 0x61, 0x74, 0x63, 0x68, 0x4e, 0x75, 0x6d, 0x12, 0x19, - 0x0a, 0x08, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x07, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x66, 0x6f, 0x72, - 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6b, - 0x49, 0x64, 0x12, 0x22, 0x0a, 0x0d, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x6c, 0x32, 0x5f, 0x64, - 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x62, 0x61, 0x74, 0x63, 0x68, - 0x4c, 0x32, 0x44, 0x61, 0x74, 0x61, 0x12, 0x20, 0x0a, 0x0c, 0x6c, 0x31, 0x5f, 0x69, 0x6e, 0x66, - 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x6c, 0x31, - 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4c, 0x69, 0x6d, 0x69, - 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x72, 0x5f, 0x61, - 0x64, 0x64, 0x72, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x73, 0x65, 0x71, 0x75, 0x65, - 0x6e, 0x63, 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x12, 0x2e, 0x0a, 0x13, 0x66, 0x6f, 0x72, 0x63, - 0x65, 0x64, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x68, 0x61, 0x73, 0x68, 0x5f, 0x6c, 0x31, 0x18, - 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x11, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x64, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x68, 0x61, 0x73, 0x68, 0x4c, 0x31, 0x12, 0x27, 0x0a, 0x0f, 0x61, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x41, 0x64, 0x64, - 0x72, 0x12, 0x63, 0x0a, 0x11, 0x6c, 0x31, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x5f, 0x74, 0x72, 0x65, - 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x61, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x73, 0x2e, 0x4c, 0x31, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x72, 0x65, 0x65, 0x44, 0x61, 0x74, - 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x6c, 0x31, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x72, - 0x65, 0x65, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x58, 0x0a, 0x13, 0x4c, 0x31, 0x49, 0x6e, 0x66, 0x6f, - 0x54, 0x72, 0x65, 0x65, 0x44, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x2b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, - 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, - 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x22, 0x97, 0x01, 0x0a, 0x06, 0x4c, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x28, 0x0a, 0x10, 0x67, - 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x45, 0x78, 0x69, - 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x68, 0x61, - 0x73, 0x68, 0x5f, 0x6c, 0x31, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x62, 0x6c, 0x6f, - 0x63, 0x6b, 0x68, 0x61, 0x73, 0x68, 0x4c, 0x31, 0x12, 0x23, 0x0a, 0x0d, 0x6d, 0x69, 0x6e, 0x5f, - 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, - 0x0c, 0x6d, 0x69, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x1b, 0x0a, - 0x09, 0x73, 0x6d, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0c, - 0x52, 0x08, 0x73, 0x6d, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xe2, 0x02, 0x0a, 0x0b, 0x49, - 0x6e, 0x70, 0x75, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x12, 0x40, 0x0a, 0x0d, 0x70, 0x75, - 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1b, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x52, 0x0c, - 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x32, 0x0a, 0x02, - 0x64, 0x62, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, - 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x72, - 0x6f, 0x76, 0x65, 0x72, 0x2e, 0x44, 0x62, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x02, 0x64, 0x62, - 0x12, 0x60, 0x0a, 0x12, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x5f, 0x62, 0x79, - 0x74, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x61, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, - 0x74, 0x73, 0x42, 0x79, 0x74, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x11, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x42, 0x79, 0x74, 0x65, 0x63, 0x6f, - 0x64, 0x65, 0x1a, 0x35, 0x0a, 0x07, 0x44, 0x62, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x44, 0x0a, 0x16, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x42, 0x79, 0x74, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0x61, 0x0a, 0x14, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x50, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x12, 0x49, 0x0a, 0x0d, 0x70, 0x75, 0x62, 0x6c, 0x69, - 0x63, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, - 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, - 0x74, 0x61, 0x74, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x73, 0x52, 0x0c, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x73, 0x22, 0xfe, 0x01, 0x0a, 0x14, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x73, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x12, 0x40, 0x0a, 0x0d, 0x70, - 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, - 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x52, - 0x0c, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x24, 0x0a, - 0x0e, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x6e, 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, - 0x6f, 0x6f, 0x74, 0x12, 0x2b, 0x0a, 0x12, 0x6e, 0x65, 0x77, 0x5f, 0x61, 0x63, 0x63, 0x5f, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x0f, 0x6e, 0x65, 0x77, 0x41, 0x63, 0x63, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x48, 0x61, 0x73, 0x68, - 0x12, 0x2d, 0x0a, 0x13, 0x6e, 0x65, 0x77, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x65, 0x78, - 0x69, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x10, 0x6e, - 0x65, 0x77, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x45, 0x78, 0x69, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, - 0x22, 0x0a, 0x0d, 0x6e, 0x65, 0x77, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x6e, 0x75, 0x6d, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x6e, 0x65, 0x77, 0x42, 0x61, 0x74, 0x63, 0x68, - 0x4e, 0x75, 0x6d, 0x2a, 0x5c, 0x0a, 0x06, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x16, 0x0a, - 0x12, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, - 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, - 0x4f, 0x4b, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, 0x5f, 0x45, - 0x52, 0x52, 0x4f, 0x52, 0x10, 0x02, 0x12, 0x19, 0x0a, 0x15, 0x52, 0x45, 0x53, 0x55, 0x4c, 0x54, - 0x5f, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x4e, 0x41, 0x4c, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, - 0x03, 0x32, 0x64, 0x0a, 0x11, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x53, - 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x4f, 0x0a, 0x07, 0x43, 0x68, 0x61, 0x6e, 0x6e, 0x65, - 0x6c, 0x12, 0x1c, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x1a, - 0x20, 0x2e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, - 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x22, 0x00, 0x28, 0x01, 0x30, 0x01, 0x42, 0x41, 0x5a, 0x3f, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x30, 0x78, 0x50, 0x6f, 0x6c, 0x79, 0x67, 0x6f, 0x6e, 0x48, - 0x65, 0x72, 0x6d, 0x65, 0x7a, 0x2f, 0x7a, 0x6b, 0x65, 0x76, 0x6d, 0x2d, 0x61, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x63, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x72, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, -} - -var ( - file_aggregator_proto_rawDescOnce sync.Once - file_aggregator_proto_rawDescData = file_aggregator_proto_rawDesc -) - -func file_aggregator_proto_rawDescGZIP() []byte { - file_aggregator_proto_rawDescOnce.Do(func() { - file_aggregator_proto_rawDescData = protoimpl.X.CompressGZIP(file_aggregator_proto_rawDescData) - }) - return file_aggregator_proto_rawDescData -} - -var file_aggregator_proto_enumTypes = make([]protoimpl.EnumInfo, 3) -var file_aggregator_proto_msgTypes = make([]protoimpl.MessageInfo, 27) -var file_aggregator_proto_goTypes = []interface{}{ - (Result)(0), // 0: aggregator.v1.Result - (GetStatusResponse_Status)(0), // 1: aggregator.v1.GetStatusResponse.Status - (GetProofResponse_Result)(0), // 2: aggregator.v1.GetProofResponse.Result - (*Version)(nil), // 3: aggregator.v1.Version - (*AggregatorMessage)(nil), // 4: aggregator.v1.AggregatorMessage - (*ProverMessage)(nil), // 5: aggregator.v1.ProverMessage - (*GetStatusRequest)(nil), // 6: aggregator.v1.GetStatusRequest - (*GenBatchProofRequest)(nil), // 7: aggregator.v1.GenBatchProofRequest - (*GenStatelessBatchProofRequest)(nil), // 8: aggregator.v1.GenStatelessBatchProofRequest - (*GenAggregatedProofRequest)(nil), // 9: aggregator.v1.GenAggregatedProofRequest - (*GenFinalProofRequest)(nil), // 10: aggregator.v1.GenFinalProofRequest - (*CancelRequest)(nil), // 11: aggregator.v1.CancelRequest - (*GetProofRequest)(nil), // 12: aggregator.v1.GetProofRequest - (*GetStatusResponse)(nil), // 13: aggregator.v1.GetStatusResponse - (*GenBatchProofResponse)(nil), // 14: aggregator.v1.GenBatchProofResponse - (*GenAggregatedProofResponse)(nil), // 15: aggregator.v1.GenAggregatedProofResponse - (*GenFinalProofResponse)(nil), // 16: aggregator.v1.GenFinalProofResponse - (*CancelResponse)(nil), // 17: aggregator.v1.CancelResponse - (*GetProofResponse)(nil), // 18: aggregator.v1.GetProofResponse - (*FinalProof)(nil), // 19: aggregator.v1.FinalProof - (*PublicInputs)(nil), // 20: aggregator.v1.PublicInputs - (*StatelessPublicInputs)(nil), // 21: aggregator.v1.StatelessPublicInputs - (*L1Data)(nil), // 22: aggregator.v1.L1Data - (*InputProver)(nil), // 23: aggregator.v1.InputProver - (*StatelessInputProver)(nil), // 24: aggregator.v1.StatelessInputProver - (*PublicInputsExtended)(nil), // 25: aggregator.v1.PublicInputsExtended - nil, // 26: aggregator.v1.PublicInputs.L1InfoTreeDataEntry - nil, // 27: aggregator.v1.StatelessPublicInputs.L1InfoTreeDataEntry - nil, // 28: aggregator.v1.InputProver.DbEntry - nil, // 29: aggregator.v1.InputProver.ContractsBytecodeEntry -} -var file_aggregator_proto_depIdxs = []int32{ - 6, // 0: aggregator.v1.AggregatorMessage.get_status_request:type_name -> aggregator.v1.GetStatusRequest - 7, // 1: aggregator.v1.AggregatorMessage.gen_batch_proof_request:type_name -> aggregator.v1.GenBatchProofRequest - 9, // 2: aggregator.v1.AggregatorMessage.gen_aggregated_proof_request:type_name -> aggregator.v1.GenAggregatedProofRequest - 10, // 3: aggregator.v1.AggregatorMessage.gen_final_proof_request:type_name -> aggregator.v1.GenFinalProofRequest - 11, // 4: aggregator.v1.AggregatorMessage.cancel_request:type_name -> aggregator.v1.CancelRequest - 12, // 5: aggregator.v1.AggregatorMessage.get_proof_request:type_name -> aggregator.v1.GetProofRequest - 8, // 6: aggregator.v1.AggregatorMessage.gen_stateless_batch_proof_request:type_name -> aggregator.v1.GenStatelessBatchProofRequest - 13, // 7: aggregator.v1.ProverMessage.get_status_response:type_name -> aggregator.v1.GetStatusResponse - 14, // 8: aggregator.v1.ProverMessage.gen_batch_proof_response:type_name -> aggregator.v1.GenBatchProofResponse - 15, // 9: aggregator.v1.ProverMessage.gen_aggregated_proof_response:type_name -> aggregator.v1.GenAggregatedProofResponse - 16, // 10: aggregator.v1.ProverMessage.gen_final_proof_response:type_name -> aggregator.v1.GenFinalProofResponse - 17, // 11: aggregator.v1.ProverMessage.cancel_response:type_name -> aggregator.v1.CancelResponse - 18, // 12: aggregator.v1.ProverMessage.get_proof_response:type_name -> aggregator.v1.GetProofResponse - 23, // 13: aggregator.v1.GenBatchProofRequest.input:type_name -> aggregator.v1.InputProver - 24, // 14: aggregator.v1.GenStatelessBatchProofRequest.input:type_name -> aggregator.v1.StatelessInputProver - 1, // 15: aggregator.v1.GetStatusResponse.status:type_name -> aggregator.v1.GetStatusResponse.Status - 0, // 16: aggregator.v1.GenBatchProofResponse.result:type_name -> aggregator.v1.Result - 0, // 17: aggregator.v1.GenAggregatedProofResponse.result:type_name -> aggregator.v1.Result - 0, // 18: aggregator.v1.GenFinalProofResponse.result:type_name -> aggregator.v1.Result - 0, // 19: aggregator.v1.CancelResponse.result:type_name -> aggregator.v1.Result - 19, // 20: aggregator.v1.GetProofResponse.final_proof:type_name -> aggregator.v1.FinalProof - 2, // 21: aggregator.v1.GetProofResponse.result:type_name -> aggregator.v1.GetProofResponse.Result - 25, // 22: aggregator.v1.FinalProof.public:type_name -> aggregator.v1.PublicInputsExtended - 26, // 23: aggregator.v1.PublicInputs.l1_info_tree_data:type_name -> aggregator.v1.PublicInputs.L1InfoTreeDataEntry - 27, // 24: aggregator.v1.StatelessPublicInputs.l1_info_tree_data:type_name -> aggregator.v1.StatelessPublicInputs.L1InfoTreeDataEntry - 20, // 25: aggregator.v1.InputProver.public_inputs:type_name -> aggregator.v1.PublicInputs - 28, // 26: aggregator.v1.InputProver.db:type_name -> aggregator.v1.InputProver.DbEntry - 29, // 27: aggregator.v1.InputProver.contracts_bytecode:type_name -> aggregator.v1.InputProver.ContractsBytecodeEntry - 21, // 28: aggregator.v1.StatelessInputProver.public_inputs:type_name -> aggregator.v1.StatelessPublicInputs - 20, // 29: aggregator.v1.PublicInputsExtended.public_inputs:type_name -> aggregator.v1.PublicInputs - 22, // 30: aggregator.v1.PublicInputs.L1InfoTreeDataEntry.value:type_name -> aggregator.v1.L1Data - 22, // 31: aggregator.v1.StatelessPublicInputs.L1InfoTreeDataEntry.value:type_name -> aggregator.v1.L1Data - 5, // 32: aggregator.v1.AggregatorService.Channel:input_type -> aggregator.v1.ProverMessage - 4, // 33: aggregator.v1.AggregatorService.Channel:output_type -> aggregator.v1.AggregatorMessage - 33, // [33:34] is the sub-list for method output_type - 32, // [32:33] is the sub-list for method input_type - 32, // [32:32] is the sub-list for extension type_name - 32, // [32:32] is the sub-list for extension extendee - 0, // [0:32] is the sub-list for field type_name -} - -func init() { file_aggregator_proto_init() } -func file_aggregator_proto_init() { - if File_aggregator_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_aggregator_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Version); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AggregatorMessage); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ProverMessage); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetStatusRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenBatchProofRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenStatelessBatchProofRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenAggregatedProofRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenFinalProofRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CancelRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetProofRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetStatusResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenBatchProofResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenAggregatedProofResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenFinalProofResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CancelResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetProofResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FinalProof); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PublicInputs); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StatelessPublicInputs); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*L1Data); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*InputProver); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StatelessInputProver); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_aggregator_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PublicInputsExtended); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_aggregator_proto_msgTypes[1].OneofWrappers = []interface{}{ - (*AggregatorMessage_GetStatusRequest)(nil), - (*AggregatorMessage_GenBatchProofRequest)(nil), - (*AggregatorMessage_GenAggregatedProofRequest)(nil), - (*AggregatorMessage_GenFinalProofRequest)(nil), - (*AggregatorMessage_CancelRequest)(nil), - (*AggregatorMessage_GetProofRequest)(nil), - (*AggregatorMessage_GenStatelessBatchProofRequest)(nil), - } - file_aggregator_proto_msgTypes[2].OneofWrappers = []interface{}{ - (*ProverMessage_GetStatusResponse)(nil), - (*ProverMessage_GenBatchProofResponse)(nil), - (*ProverMessage_GenAggregatedProofResponse)(nil), - (*ProverMessage_GenFinalProofResponse)(nil), - (*ProverMessage_CancelResponse)(nil), - (*ProverMessage_GetProofResponse)(nil), - } - file_aggregator_proto_msgTypes[15].OneofWrappers = []interface{}{ - (*GetProofResponse_FinalProof)(nil), - (*GetProofResponse_RecursiveProof)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_aggregator_proto_rawDesc, - NumEnums: 3, - NumMessages: 27, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_aggregator_proto_goTypes, - DependencyIndexes: file_aggregator_proto_depIdxs, - EnumInfos: file_aggregator_proto_enumTypes, - MessageInfos: file_aggregator_proto_msgTypes, - }.Build() - File_aggregator_proto = out.File - file_aggregator_proto_rawDesc = nil - file_aggregator_proto_goTypes = nil - file_aggregator_proto_depIdxs = nil -} diff --git a/aggregator/prover/aggregator_grpc.pb.go b/aggregator/prover/aggregator_grpc.pb.go deleted file mode 100644 index 32bdb107..00000000 --- a/aggregator/prover/aggregator_grpc.pb.go +++ /dev/null @@ -1,150 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.4.0 -// - protoc v5.27.0 -// source: aggregator.proto - -package prover - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.62.0 or later. -const _ = grpc.SupportPackageIsVersion8 - -const ( - AggregatorService_Channel_FullMethodName = "/aggregator.v1.AggregatorService/Channel" -) - -// AggregatorServiceClient is the client API for AggregatorService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -// -// * -// Define all methods implementes by the gRPC -// Channel: prover receives aggregator messages and returns prover messages with the same id -type AggregatorServiceClient interface { - Channel(ctx context.Context, opts ...grpc.CallOption) (AggregatorService_ChannelClient, error) -} - -type aggregatorServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewAggregatorServiceClient(cc grpc.ClientConnInterface) AggregatorServiceClient { - return &aggregatorServiceClient{cc} -} - -func (c *aggregatorServiceClient) Channel(ctx context.Context, opts ...grpc.CallOption) (AggregatorService_ChannelClient, error) { - cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) - stream, err := c.cc.NewStream(ctx, &AggregatorService_ServiceDesc.Streams[0], AggregatorService_Channel_FullMethodName, cOpts...) - if err != nil { - return nil, err - } - x := &aggregatorServiceChannelClient{ClientStream: stream} - return x, nil -} - -type AggregatorService_ChannelClient interface { - Send(*ProverMessage) error - Recv() (*AggregatorMessage, error) - grpc.ClientStream -} - -type aggregatorServiceChannelClient struct { - grpc.ClientStream -} - -func (x *aggregatorServiceChannelClient) Send(m *ProverMessage) error { - return x.ClientStream.SendMsg(m) -} - -func (x *aggregatorServiceChannelClient) Recv() (*AggregatorMessage, error) { - m := new(AggregatorMessage) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -// AggregatorServiceServer is the server API for AggregatorService service. -// All implementations must embed UnimplementedAggregatorServiceServer -// for forward compatibility -// -// * -// Define all methods implementes by the gRPC -// Channel: prover receives aggregator messages and returns prover messages with the same id -type AggregatorServiceServer interface { - Channel(AggregatorService_ChannelServer) error - mustEmbedUnimplementedAggregatorServiceServer() -} - -// UnimplementedAggregatorServiceServer must be embedded to have forward compatible implementations. -type UnimplementedAggregatorServiceServer struct { -} - -func (UnimplementedAggregatorServiceServer) Channel(AggregatorService_ChannelServer) error { - return status.Errorf(codes.Unimplemented, "method Channel not implemented") -} -func (UnimplementedAggregatorServiceServer) mustEmbedUnimplementedAggregatorServiceServer() {} - -// UnsafeAggregatorServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to AggregatorServiceServer will -// result in compilation errors. -type UnsafeAggregatorServiceServer interface { - mustEmbedUnimplementedAggregatorServiceServer() -} - -func RegisterAggregatorServiceServer(s grpc.ServiceRegistrar, srv AggregatorServiceServer) { - s.RegisterService(&AggregatorService_ServiceDesc, srv) -} - -func _AggregatorService_Channel_Handler(srv interface{}, stream grpc.ServerStream) error { - return srv.(AggregatorServiceServer).Channel(&aggregatorServiceChannelServer{ServerStream: stream}) -} - -type AggregatorService_ChannelServer interface { - Send(*AggregatorMessage) error - Recv() (*ProverMessage, error) - grpc.ServerStream -} - -type aggregatorServiceChannelServer struct { - grpc.ServerStream -} - -func (x *aggregatorServiceChannelServer) Send(m *AggregatorMessage) error { - return x.ServerStream.SendMsg(m) -} - -func (x *aggregatorServiceChannelServer) Recv() (*ProverMessage, error) { - m := new(ProverMessage) - if err := x.ServerStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -// AggregatorService_ServiceDesc is the grpc.ServiceDesc for AggregatorService service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var AggregatorService_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "aggregator.v1.AggregatorService", - HandlerType: (*AggregatorServiceServer)(nil), - Methods: []grpc.MethodDesc{}, - Streams: []grpc.StreamDesc{ - { - StreamName: "Channel", - Handler: _AggregatorService_Channel_Handler, - ServerStreams: true, - ClientStreams: true, - }, - }, - Metadata: "aggregator.proto", -} diff --git a/aggregator/prover/mocks/mock_channel.go b/aggregator/prover/mocks/mock_channel.go deleted file mode 100644 index 20638d69..00000000 --- a/aggregator/prover/mocks/mock_channel.go +++ /dev/null @@ -1,406 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - - mock "github.com/stretchr/testify/mock" - metadata "google.golang.org/grpc/metadata" - - prover "github.com/agglayer/aggkit/aggregator/prover" -) - -// ChannelMock is an autogenerated mock type for the AggregatorService_ChannelServer type -type ChannelMock struct { - mock.Mock -} - -type ChannelMock_Expecter struct { - mock *mock.Mock -} - -func (_m *ChannelMock) EXPECT() *ChannelMock_Expecter { - return &ChannelMock_Expecter{mock: &_m.Mock} -} - -// Context provides a mock function with no fields -func (_m *ChannelMock) Context() context.Context { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Context") - } - - var r0 context.Context - if rf, ok := ret.Get(0).(func() context.Context); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(context.Context) - } - } - - return r0 -} - -// ChannelMock_Context_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Context' -type ChannelMock_Context_Call struct { - *mock.Call -} - -// Context is a helper method to define mock.On call -func (_e *ChannelMock_Expecter) Context() *ChannelMock_Context_Call { - return &ChannelMock_Context_Call{Call: _e.mock.On("Context")} -} - -func (_c *ChannelMock_Context_Call) Run(run func()) *ChannelMock_Context_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *ChannelMock_Context_Call) Return(_a0 context.Context) *ChannelMock_Context_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ChannelMock_Context_Call) RunAndReturn(run func() context.Context) *ChannelMock_Context_Call { - _c.Call.Return(run) - return _c -} - -// Recv provides a mock function with no fields -func (_m *ChannelMock) Recv() (*prover.ProverMessage, error) { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Recv") - } - - var r0 *prover.ProverMessage - var r1 error - if rf, ok := ret.Get(0).(func() (*prover.ProverMessage, error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() *prover.ProverMessage); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*prover.ProverMessage) - } - } - - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// ChannelMock_Recv_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Recv' -type ChannelMock_Recv_Call struct { - *mock.Call -} - -// Recv is a helper method to define mock.On call -func (_e *ChannelMock_Expecter) Recv() *ChannelMock_Recv_Call { - return &ChannelMock_Recv_Call{Call: _e.mock.On("Recv")} -} - -func (_c *ChannelMock_Recv_Call) Run(run func()) *ChannelMock_Recv_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *ChannelMock_Recv_Call) Return(_a0 *prover.ProverMessage, _a1 error) *ChannelMock_Recv_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *ChannelMock_Recv_Call) RunAndReturn(run func() (*prover.ProverMessage, error)) *ChannelMock_Recv_Call { - _c.Call.Return(run) - return _c -} - -// RecvMsg provides a mock function with given fields: m -func (_m *ChannelMock) RecvMsg(m interface{}) error { - ret := _m.Called(m) - - if len(ret) == 0 { - panic("no return value specified for RecvMsg") - } - - var r0 error - if rf, ok := ret.Get(0).(func(interface{}) error); ok { - r0 = rf(m) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// ChannelMock_RecvMsg_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RecvMsg' -type ChannelMock_RecvMsg_Call struct { - *mock.Call -} - -// RecvMsg is a helper method to define mock.On call -// - m interface{} -func (_e *ChannelMock_Expecter) RecvMsg(m interface{}) *ChannelMock_RecvMsg_Call { - return &ChannelMock_RecvMsg_Call{Call: _e.mock.On("RecvMsg", m)} -} - -func (_c *ChannelMock_RecvMsg_Call) Run(run func(m interface{})) *ChannelMock_RecvMsg_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(interface{})) - }) - return _c -} - -func (_c *ChannelMock_RecvMsg_Call) Return(_a0 error) *ChannelMock_RecvMsg_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ChannelMock_RecvMsg_Call) RunAndReturn(run func(interface{}) error) *ChannelMock_RecvMsg_Call { - _c.Call.Return(run) - return _c -} - -// Send provides a mock function with given fields: _a0 -func (_m *ChannelMock) Send(_a0 *prover.AggregatorMessage) error { - ret := _m.Called(_a0) - - if len(ret) == 0 { - panic("no return value specified for Send") - } - - var r0 error - if rf, ok := ret.Get(0).(func(*prover.AggregatorMessage) error); ok { - r0 = rf(_a0) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// ChannelMock_Send_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Send' -type ChannelMock_Send_Call struct { - *mock.Call -} - -// Send is a helper method to define mock.On call -// - _a0 *prover.AggregatorMessage -func (_e *ChannelMock_Expecter) Send(_a0 interface{}) *ChannelMock_Send_Call { - return &ChannelMock_Send_Call{Call: _e.mock.On("Send", _a0)} -} - -func (_c *ChannelMock_Send_Call) Run(run func(_a0 *prover.AggregatorMessage)) *ChannelMock_Send_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*prover.AggregatorMessage)) - }) - return _c -} - -func (_c *ChannelMock_Send_Call) Return(_a0 error) *ChannelMock_Send_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ChannelMock_Send_Call) RunAndReturn(run func(*prover.AggregatorMessage) error) *ChannelMock_Send_Call { - _c.Call.Return(run) - return _c -} - -// SendHeader provides a mock function with given fields: _a0 -func (_m *ChannelMock) SendHeader(_a0 metadata.MD) error { - ret := _m.Called(_a0) - - if len(ret) == 0 { - panic("no return value specified for SendHeader") - } - - var r0 error - if rf, ok := ret.Get(0).(func(metadata.MD) error); ok { - r0 = rf(_a0) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// ChannelMock_SendHeader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SendHeader' -type ChannelMock_SendHeader_Call struct { - *mock.Call -} - -// SendHeader is a helper method to define mock.On call -// - _a0 metadata.MD -func (_e *ChannelMock_Expecter) SendHeader(_a0 interface{}) *ChannelMock_SendHeader_Call { - return &ChannelMock_SendHeader_Call{Call: _e.mock.On("SendHeader", _a0)} -} - -func (_c *ChannelMock_SendHeader_Call) Run(run func(_a0 metadata.MD)) *ChannelMock_SendHeader_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(metadata.MD)) - }) - return _c -} - -func (_c *ChannelMock_SendHeader_Call) Return(_a0 error) *ChannelMock_SendHeader_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ChannelMock_SendHeader_Call) RunAndReturn(run func(metadata.MD) error) *ChannelMock_SendHeader_Call { - _c.Call.Return(run) - return _c -} - -// SendMsg provides a mock function with given fields: m -func (_m *ChannelMock) SendMsg(m interface{}) error { - ret := _m.Called(m) - - if len(ret) == 0 { - panic("no return value specified for SendMsg") - } - - var r0 error - if rf, ok := ret.Get(0).(func(interface{}) error); ok { - r0 = rf(m) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// ChannelMock_SendMsg_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SendMsg' -type ChannelMock_SendMsg_Call struct { - *mock.Call -} - -// SendMsg is a helper method to define mock.On call -// - m interface{} -func (_e *ChannelMock_Expecter) SendMsg(m interface{}) *ChannelMock_SendMsg_Call { - return &ChannelMock_SendMsg_Call{Call: _e.mock.On("SendMsg", m)} -} - -func (_c *ChannelMock_SendMsg_Call) Run(run func(m interface{})) *ChannelMock_SendMsg_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(interface{})) - }) - return _c -} - -func (_c *ChannelMock_SendMsg_Call) Return(_a0 error) *ChannelMock_SendMsg_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ChannelMock_SendMsg_Call) RunAndReturn(run func(interface{}) error) *ChannelMock_SendMsg_Call { - _c.Call.Return(run) - return _c -} - -// SetHeader provides a mock function with given fields: _a0 -func (_m *ChannelMock) SetHeader(_a0 metadata.MD) error { - ret := _m.Called(_a0) - - if len(ret) == 0 { - panic("no return value specified for SetHeader") - } - - var r0 error - if rf, ok := ret.Get(0).(func(metadata.MD) error); ok { - r0 = rf(_a0) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// ChannelMock_SetHeader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetHeader' -type ChannelMock_SetHeader_Call struct { - *mock.Call -} - -// SetHeader is a helper method to define mock.On call -// - _a0 metadata.MD -func (_e *ChannelMock_Expecter) SetHeader(_a0 interface{}) *ChannelMock_SetHeader_Call { - return &ChannelMock_SetHeader_Call{Call: _e.mock.On("SetHeader", _a0)} -} - -func (_c *ChannelMock_SetHeader_Call) Run(run func(_a0 metadata.MD)) *ChannelMock_SetHeader_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(metadata.MD)) - }) - return _c -} - -func (_c *ChannelMock_SetHeader_Call) Return(_a0 error) *ChannelMock_SetHeader_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *ChannelMock_SetHeader_Call) RunAndReturn(run func(metadata.MD) error) *ChannelMock_SetHeader_Call { - _c.Call.Return(run) - return _c -} - -// SetTrailer provides a mock function with given fields: _a0 -func (_m *ChannelMock) SetTrailer(_a0 metadata.MD) { - _m.Called(_a0) -} - -// ChannelMock_SetTrailer_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetTrailer' -type ChannelMock_SetTrailer_Call struct { - *mock.Call -} - -// SetTrailer is a helper method to define mock.On call -// - _a0 metadata.MD -func (_e *ChannelMock_Expecter) SetTrailer(_a0 interface{}) *ChannelMock_SetTrailer_Call { - return &ChannelMock_SetTrailer_Call{Call: _e.mock.On("SetTrailer", _a0)} -} - -func (_c *ChannelMock_SetTrailer_Call) Run(run func(_a0 metadata.MD)) *ChannelMock_SetTrailer_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(metadata.MD)) - }) - return _c -} - -func (_c *ChannelMock_SetTrailer_Call) Return() *ChannelMock_SetTrailer_Call { - _c.Call.Return() - return _c -} - -func (_c *ChannelMock_SetTrailer_Call) RunAndReturn(run func(metadata.MD)) *ChannelMock_SetTrailer_Call { - _c.Run(run) - return _c -} - -// NewChannelMock creates a new instance of ChannelMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewChannelMock(t interface { - mock.TestingT - Cleanup(func()) -}) *ChannelMock { - mock := &ChannelMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/aggregator/prover/prover.go b/aggregator/prover/prover.go deleted file mode 100644 index 00141581..00000000 --- a/aggregator/prover/prover.go +++ /dev/null @@ -1,460 +0,0 @@ -package prover - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "math/big" - "net" - "strconv" - "strings" - "time" - - "github.com/agglayer/aggkit/config/types" - "github.com/agglayer/aggkit/log" - "github.com/ethereum/go-ethereum/common" - "github.com/iden3/go-iden3-crypto/poseidon" -) - -const ( - StateRootStartIndex = 19 - StateRootFinalIndex = StateRootStartIndex + 8 - AccInputHashStartIndex = 27 - AccInputHashFinalIndex = AccInputHashStartIndex + 8 -) - -var ( - ErrBadProverResponse = errors.New("prover returned wrong type for response") //nolint:revive - ErrProverInternalError = errors.New("prover returned INTERNAL_ERROR response") //nolint:revive - ErrProverCompletedError = errors.New("prover returned COMPLETED_ERROR response") //nolint:revive - ErrBadRequest = errors.New("prover returned ERROR for a bad request") //nolint:revive - ErrUnspecified = errors.New("prover returned an UNSPECIFIED response") //nolint:revive - ErrUnknown = errors.New("prover returned an unknown response") //nolint:revive - ErrProofCanceled = errors.New("proof has been canceled") //nolint:revive -) - -// Prover abstraction of the grpc prover client. -type Prover struct { - logger *log.Logger - name string - id string - address net.Addr - proofStatePollingInterval types.Duration - stream AggregatorService_ChannelServer -} - -// New returns a new Prover instance. -func New(logger *log.Logger, stream AggregatorService_ChannelServer, - addr net.Addr, proofStatePollingInterval types.Duration) (*Prover, error) { - p := &Prover{ - logger: logger, - stream: stream, - address: addr, - proofStatePollingInterval: proofStatePollingInterval, - } - - status, err := p.Status() - if err != nil { - return nil, fmt.Errorf("failed to retrieve prover id %w", err) - } - p.name = status.ProverName - p.id = status.ProverId - - return p, nil -} - -// Name returns the Prover name. -func (p *Prover) Name() string { return p.name } - -// ID returns the Prover ID. -func (p *Prover) ID() string { return p.id } - -// Addr returns the prover IP address. -func (p *Prover) Addr() string { - if p.address == nil { - return "" - } - - return p.address.String() -} - -// Status gets the prover status. -func (p *Prover) Status() (*GetStatusResponse, error) { - req := &AggregatorMessage{ - Request: &AggregatorMessage_GetStatusRequest{ - GetStatusRequest: &GetStatusRequest{}, - }, - } - res, err := p.call(req) - if err != nil { - return nil, err - } - if msg, ok := res.Response.(*ProverMessage_GetStatusResponse); ok { - return msg.GetStatusResponse, nil - } - - return nil, fmt.Errorf("%w, wanted %T, got %T", ErrBadProverResponse, &ProverMessage_GetStatusResponse{}, res.Response) -} - -// IsIdle returns true if the prover is idling. -func (p *Prover) IsIdle() (bool, error) { - status, err := p.Status() - if err != nil { - return false, err - } - - return status.Status == GetStatusResponse_STATUS_IDLE, nil -} - -// SupportsForkID returns true if the prover supports the given fork id. -func (p *Prover) SupportsForkID(forkID uint64) bool { - status, err := p.Status() - if err != nil { - p.logger.Warnf("Error asking status for prover ID %s: %v", p.ID(), err) - return false - } - - p.logger.Debugf("Prover %s supports fork ID %d", p.ID(), status.ForkId) - - return status.ForkId == forkID -} - -// BatchProof instructs the prover to generate a batch proof for the provided -// input. It returns the ID of the proof being computed. -func (p *Prover) BatchProof(input *StatelessInputProver) (*string, error) { - req := &AggregatorMessage{ - Request: &AggregatorMessage_GenStatelessBatchProofRequest{ - GenStatelessBatchProofRequest: &GenStatelessBatchProofRequest{Input: input}, - }, - } - res, err := p.call(req) - if err != nil { - return nil, err - } - - if msg, ok := res.Response.(*ProverMessage_GenBatchProofResponse); ok { - switch msg.GenBatchProofResponse.Result { - case Result_RESULT_UNSPECIFIED: - return nil, fmt.Errorf( - "failed to generate proof %s, %w, input %v", - msg.GenBatchProofResponse.String(), ErrUnspecified, input, - ) - case Result_RESULT_OK: - return &msg.GenBatchProofResponse.Id, nil - case Result_RESULT_ERROR: - return nil, fmt.Errorf( - "failed to generate proof %s, %w, input %v", - msg.GenBatchProofResponse.String(), ErrBadRequest, input, - ) - case Result_RESULT_INTERNAL_ERROR: - return nil, fmt.Errorf( - "failed to generate proof %s, %w, input %v", - msg.GenBatchProofResponse.String(), ErrProverInternalError, input, - ) - default: - return nil, fmt.Errorf( - "failed to generate proof %s, %w,input %v", - msg.GenBatchProofResponse.String(), ErrUnknown, input, - ) - } - } - - return nil, fmt.Errorf( - "%w, wanted %T, got %T", - ErrBadProverResponse, &ProverMessage_GenBatchProofResponse{}, res.Response, - ) -} - -// AggregatedProof instructs the prover to generate an aggregated proof from -// the two inputs provided. It returns the ID of the proof being computed. -func (p *Prover) AggregatedProof(inputProof1, inputProof2 string) (*string, error) { - req := &AggregatorMessage{ - Request: &AggregatorMessage_GenAggregatedProofRequest{ - GenAggregatedProofRequest: &GenAggregatedProofRequest{ - RecursiveProof_1: inputProof1, - RecursiveProof_2: inputProof2, - }, - }, - } - res, err := p.call(req) - if err != nil { - return nil, err - } - - if msg, ok := res.Response.(*ProverMessage_GenAggregatedProofResponse); ok { - switch msg.GenAggregatedProofResponse.Result { - case Result_RESULT_UNSPECIFIED: - return nil, fmt.Errorf("failed to aggregate proofs %s, %w, input 1 %s, input 2 %s", - msg.GenAggregatedProofResponse.String(), ErrUnspecified, inputProof1, inputProof2) - case Result_RESULT_OK: - return &msg.GenAggregatedProofResponse.Id, nil - case Result_RESULT_ERROR: - return nil, fmt.Errorf("failed to aggregate proofs %s, %w, input 1 %s, input 2 %s", - msg.GenAggregatedProofResponse.String(), ErrBadRequest, inputProof1, inputProof2) - case Result_RESULT_INTERNAL_ERROR: - return nil, fmt.Errorf("failed to aggregate proofs %s, %w, input 1 %s, input 2 %s", - msg.GenAggregatedProofResponse.String(), ErrProverInternalError, inputProof1, inputProof2) - default: - return nil, fmt.Errorf("failed to aggregate proofs %s, %w, input 1 %s, input 2 %s", - msg.GenAggregatedProofResponse.String(), ErrUnknown, inputProof1, inputProof2) - } - } - - return nil, fmt.Errorf( - "%w, wanted %T, got %T", - ErrBadProverResponse, &ProverMessage_GenAggregatedProofResponse{}, res.Response, - ) -} - -// FinalProof instructs the prover to generate a final proof for the given -// input. It returns the ID of the proof being computed. -func (p *Prover) FinalProof(inputProof string, aggregatorAddr string) (*string, error) { - req := &AggregatorMessage{ - Request: &AggregatorMessage_GenFinalProofRequest{ - GenFinalProofRequest: &GenFinalProofRequest{ - RecursiveProof: inputProof, - AggregatorAddr: aggregatorAddr, - }, - }, - } - res, err := p.call(req) - if err != nil { - return nil, err - } - - if msg, ok := res.Response.(*ProverMessage_GenFinalProofResponse); ok { - switch msg.GenFinalProofResponse.Result { - case Result_RESULT_UNSPECIFIED: - return nil, fmt.Errorf("failed to generate final proof %s, %w, input %s", - msg.GenFinalProofResponse.String(), ErrUnspecified, inputProof) - case Result_RESULT_OK: - return &msg.GenFinalProofResponse.Id, nil - case Result_RESULT_ERROR: - return nil, fmt.Errorf("failed to generate final proof %s, %w, input %s", - msg.GenFinalProofResponse.String(), ErrBadRequest, inputProof) - case Result_RESULT_INTERNAL_ERROR: - return nil, fmt.Errorf("failed to generate final proof %s, %w, input %s", - msg.GenFinalProofResponse.String(), ErrProverInternalError, inputProof) - default: - return nil, fmt.Errorf("failed to generate final proof %s, %w, input %s", - msg.GenFinalProofResponse.String(), ErrUnknown, inputProof) - } - } - - return nil, fmt.Errorf( - "%w, wanted %T, got %T", - ErrBadProverResponse, &ProverMessage_GenFinalProofResponse{}, res.Response, - ) -} - -// CancelProofRequest asks the prover to stop the generation of the proof -// matching the provided proofID. -func (p *Prover) CancelProofRequest(proofID string) error { - req := &AggregatorMessage{ - Request: &AggregatorMessage_CancelRequest{ - CancelRequest: &CancelRequest{Id: proofID}, - }, - } - res, err := p.call(req) - if err != nil { - return err - } - if msg, ok := res.Response.(*ProverMessage_CancelResponse); ok { - switch msg.CancelResponse.Result { - case Result_RESULT_UNSPECIFIED: - return fmt.Errorf("failed to cancel proof id [%s], %w, %s", - proofID, ErrUnspecified, msg.CancelResponse.String()) - case Result_RESULT_OK: - return nil - case Result_RESULT_ERROR: - return fmt.Errorf("failed to cancel proof id [%s], %w, %s", - proofID, ErrBadRequest, msg.CancelResponse.String()) - case Result_RESULT_INTERNAL_ERROR: - return fmt.Errorf("failed to cancel proof id [%s], %w, %s", - proofID, ErrProverInternalError, msg.CancelResponse.String()) - default: - return fmt.Errorf("failed to cancel proof id [%s], %w, %s", - proofID, ErrUnknown, msg.CancelResponse.String()) - } - } - - return fmt.Errorf("%w, wanted %T, got %T", ErrBadProverResponse, &ProverMessage_CancelResponse{}, res.Response) -} - -// WaitRecursiveProof waits for a recursive proof to be generated by the prover -// and returns it. -func (p *Prover) WaitRecursiveProof(ctx context.Context, proofID string) (string, common.Hash, common.Hash, error) { - res, err := p.waitProof(ctx, proofID) - if err != nil { - return "", common.Hash{}, common.Hash{}, err - } - - resProof, ok := res.Proof.(*GetProofResponse_RecursiveProof) - if !ok { - return "", common.Hash{}, common.Hash{}, fmt.Errorf( - "%w, wanted %T, got %T", - ErrBadProverResponse, &GetProofResponse_RecursiveProof{}, res.Proof, - ) - } - - sr, err := GetSanityCheckHashFromProof(p.logger, resProof.RecursiveProof, StateRootStartIndex, StateRootFinalIndex) - if err != nil && sr != (common.Hash{}) { - p.logger.Errorf("Error getting state root from proof: %v", err) - } - - accInputHash, err := GetSanityCheckHashFromProof(p.logger, resProof.RecursiveProof, - AccInputHashStartIndex, AccInputHashFinalIndex) - if err != nil && accInputHash != (common.Hash{}) { - p.logger.Errorf("Error getting acc input hash from proof: %v", err) - } - - if sr == (common.Hash{}) { - p.logger.Info("Recursive proof does not contain state root. Possibly mock prover is in use.") - } - - return resProof.RecursiveProof, sr, accInputHash, nil -} - -// WaitFinalProof waits for the final proof to be generated by the prover and -// returns it. -func (p *Prover) WaitFinalProof(ctx context.Context, proofID string) (*FinalProof, error) { - res, err := p.waitProof(ctx, proofID) - if err != nil { - return nil, err - } - resProof, ok := res.Proof.(*GetProofResponse_FinalProof) - if !ok { - return nil, fmt.Errorf("%w, wanted %T, got %T", ErrBadProverResponse, &GetProofResponse_FinalProof{}, res.Proof) - } - - return resProof.FinalProof, nil -} - -// waitProof waits for a proof to be generated by the prover and returns the -// prover response. -func (p *Prover) waitProof(ctx context.Context, proofID string) (*GetProofResponse, error) { - req := &AggregatorMessage{ - Request: &AggregatorMessage_GetProofRequest{ - GetProofRequest: &GetProofRequest{ - // TODO(pg): set Timeout field? - Id: proofID, - }, - }, - } - - for { - select { - case <-ctx.Done(): - return nil, ctx.Err() - default: - res, err := p.call(req) - if err != nil { - return nil, err - } - if msg, ok := res.Response.(*ProverMessage_GetProofResponse); ok { - switch msg.GetProofResponse.Result { - case GetProofResponse_RESULT_PENDING: - time.Sleep(p.proofStatePollingInterval.Duration) - - continue - case GetProofResponse_RESULT_UNSPECIFIED: - return nil, fmt.Errorf("failed to get proof ID: %s, %w, prover response: %s", - proofID, ErrUnspecified, msg.GetProofResponse.String()) - case GetProofResponse_RESULT_COMPLETED_OK: - return msg.GetProofResponse, nil - case GetProofResponse_RESULT_ERROR: - return nil, fmt.Errorf("failed to get proof with ID %s, %w, prover response: %s", - proofID, ErrBadRequest, msg.GetProofResponse.String()) - case GetProofResponse_RESULT_COMPLETED_ERROR: - return nil, fmt.Errorf("failed to get proof with ID %s, %w, prover response: %s", - proofID, ErrProverCompletedError, msg.GetProofResponse.String()) - case GetProofResponse_RESULT_INTERNAL_ERROR: - return nil, fmt.Errorf("failed to get proof ID: %s, %w, prover response: %s", - proofID, ErrProverInternalError, msg.GetProofResponse.String()) - case GetProofResponse_RESULT_CANCEL: - return nil, fmt.Errorf("proof generation was cancelled for proof ID %s, %w, prover response: %s", - proofID, ErrProofCanceled, msg.GetProofResponse.String()) - default: - return nil, fmt.Errorf("failed to get proof ID: %s, %w, prover response: %s", - proofID, ErrUnknown, msg.GetProofResponse.String()) - } - } - - return nil, fmt.Errorf( - "%w, wanted %T, got %T", - ErrBadProverResponse, &ProverMessage_GetProofResponse{}, res.Response, - ) - } - } -} - -// call sends a message to the prover and waits to receive the response over -// the connection stream. -func (p *Prover) call(req *AggregatorMessage) (*ProverMessage, error) { - if err := p.stream.Send(req); err != nil { - return nil, err - } - res, err := p.stream.Recv() - if err != nil { - return nil, err - } - - return res, nil -} - -// GetSanityCheckHashFromProof returns info from the proof -func GetSanityCheckHashFromProof(logger *log.Logger, proof string, startIndex, endIndex int) (common.Hash, error) { - type Publics struct { - Publics []string `mapstructure:"publics"` - } - - // Check if the proof contains the SR - if !strings.Contains(proof, "publics") { - return common.Hash{}, nil - } - - var publics Publics - err := json.Unmarshal([]byte(proof), &publics) - if err != nil { - logger.Errorf("Error unmarshalling proof: %v", err) - return common.Hash{}, err - } - - var ( - v [8]uint64 - j = 0 - ) - for i := startIndex; i < endIndex; i++ { - u64, err := strconv.ParseInt(publics.Publics[i], 10, 64) - if err != nil { - logger.Fatal(err) - } - v[j] = uint64(u64) - j++ - } - bigSR := fea2scalar(v[:]) - hexSR := fmt.Sprintf("%x", bigSR) - if len(hexSR)%2 != 0 { - hexSR = "0" + hexSR - } - - return common.HexToHash(hexSR), nil -} - -// fea2scalar converts array of uint64 values into one *big.Int. -func fea2scalar(v []uint64) *big.Int { - if len(v) != poseidon.NROUNDSF { - return big.NewInt(0) - } - res := new(big.Int).SetUint64(v[0]) - res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[1]), 32)) //nolint:mnd - res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[2]), 64)) //nolint:mnd - res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[3]), 96)) //nolint:mnd - res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[4]), 128)) //nolint:mnd - res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[5]), 160)) //nolint:mnd - res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[6]), 192)) //nolint:mnd - res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[7]), 224)) //nolint:mnd - - return res -} diff --git a/aggregator/prover/prover_test.go b/aggregator/prover/prover_test.go deleted file mode 100644 index 9e97981f..00000000 --- a/aggregator/prover/prover_test.go +++ /dev/null @@ -1,109 +0,0 @@ -package prover_test - -import ( - "context" - "fmt" - "net" - "os" - "testing" - "time" - - "github.com/agglayer/aggkit/aggregator/prover" - "github.com/agglayer/aggkit/aggregator/prover/mocks" - "github.com/agglayer/aggkit/config/types" - "github.com/agglayer/aggkit/log" - "github.com/ethereum/go-ethereum/common" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -const ( - dir = "../../test/vectors/proofs" -) - -type TestStateRoot struct { - Publics []string `mapstructure:"publics"` -} - -func TestProver(t *testing.T) { - mockChannel := mocks.ChannelMock{} - var addr net.Addr - - mockChannel.On("Send", mock.Anything).Return(nil) - mockChannel.On("Recv").Return(&prover.ProverMessage{ - Id: "test", - Response: &prover.ProverMessage_GetStatusResponse{ - GetStatusResponse: &prover.GetStatusResponse{ - Status: prover.GetStatusResponse_STATUS_IDLE, - ProverName: "testName", - ProverId: "testId", - }, - }, - }, nil).Times(1) - - p, err := prover.New(log.GetDefaultLogger(), &mockChannel, addr, types.Duration{Duration: time.Second * 5}) - require.NoError(t, err) - name := p.Name() - require.Equal(t, "testName", name, "name does not match") - address := p.Addr() - require.Equal(t, "", address, "address does not match") - id := p.ID() - require.Equal(t, "testId", id, "id does not match") - - mockChannel.On("Recv").Return(&prover.ProverMessage{ - Id: "test", - Response: &prover.ProverMessage_GetProofResponse{ - GetProofResponse: &prover.GetProofResponse{ - Proof: &prover.GetProofResponse_RecursiveProof{ - RecursiveProof: "this is a proof", - }, - Result: prover.GetProofResponse_RESULT_COMPLETED_OK, - }, - }, - }, nil) - - proof, sr, accinputHash, err := p.WaitRecursiveProof(context.Background(), "proofID") - require.NoError(t, err) - - require.NotNil(t, proof, "proof is nil") - require.NotNil(t, sr, "state root is nil") - require.Equal(t, common.Hash{}, accinputHash, "state root is not empty") -} -func TestCalculateStateRoots(t *testing.T) { - var expectedStateRoots = map[string]string{ - "1871.json": "0x0ed594d8bc0bb38f3190ff25fb1e5b4fe1baf0e2e0c1d7bf3307f07a55d3a60f", - "1872.json": "0xb6aac97ebb0eb2d4a3bdd40cfe49b6a22d42fe7deff1a8fae182a9c11cc8a7b1", - "1873.json": "0x6f88be87a2ad2928a655bbd38c6f1b59ca8c0f53fd8e9e9d5806e90783df701f", - "1874.json": "0x6f88be87a2ad2928a655bbd38c6f1b59ca8c0f53fd8e9e9d5806e90783df701f", - "1875.json": "0xf4a439c5642a182d9e27c8ab82c64b44418ba5fa04c175a013bed452c19908c9"} - - // Read all files in the directory - files, err := os.ReadDir(dir) - require.NoError(t, err) - - for _, file := range files { - if file.IsDir() { - continue - } - - // Read the file - data, err := os.ReadFile(fmt.Sprintf("%s/%s", dir, file.Name())) - require.NoError(t, err) - - // Get the state root from the batch proof - fileStateRoot, err := prover.GetSanityCheckHashFromProof(log.GetDefaultLogger(), string(data), prover.StateRootStartIndex, prover.StateRootFinalIndex) - require.NoError(t, err) - - // Get the expected state root - expectedStateRoot, ok := expectedStateRoots[file.Name()] - require.True(t, ok, "Expected state root not found") - - // Check Acc Input Hash - accInputHash, err := prover.GetSanityCheckHashFromProof(log.GetDefaultLogger(), string(data), prover.AccInputHashStartIndex, prover.AccInputHashFinalIndex) - require.NotEqual(t, common.Hash{}, accInputHash, "Acc Input Hash is empty") - require.NoError(t, err) - - // Compare the state roots - require.Equal(t, expectedStateRoot, fileStateRoot.String(), "State roots do not match") - } -} diff --git a/aggsender/mocks/agg_sender_storage.go b/aggsender/mocks/agg_sender_storage.go index 93ca8563..c43629d2 100644 --- a/aggsender/mocks/agg_sender_storage.go +++ b/aggsender/mocks/agg_sender_storage.go @@ -189,7 +189,7 @@ func (_c *AggSenderStorage_GetCertificatesByStatus_Call) RunAndReturn(run func([ return _c } -// GetLastSentCertificate provides a mock function with no fields +// GetLastSentCertificate provides a mock function with given fields: func (_m *AggSenderStorage) GetLastSentCertificate() (*types.CertificateInfo, error) { ret := _m.Called() diff --git a/aggsender/mocks/block_notifier.go b/aggsender/mocks/block_notifier.go index 9b8db6fa..ea1af641 100644 --- a/aggsender/mocks/block_notifier.go +++ b/aggsender/mocks/block_notifier.go @@ -20,7 +20,7 @@ func (_m *BlockNotifier) EXPECT() *BlockNotifier_Expecter { return &BlockNotifier_Expecter{mock: &_m.Mock} } -// String provides a mock function with no fields +// String provides a mock function with given fields: func (_m *BlockNotifier) String() string { ret := _m.Called() diff --git a/aggsender/mocks/epoch_notifier.go b/aggsender/mocks/epoch_notifier.go index 793722d1..2753dccc 100644 --- a/aggsender/mocks/epoch_notifier.go +++ b/aggsender/mocks/epoch_notifier.go @@ -51,11 +51,11 @@ func (_c *EpochNotifier_Start_Call) Return() *EpochNotifier_Start_Call { } func (_c *EpochNotifier_Start_Call) RunAndReturn(run func(context.Context)) *EpochNotifier_Start_Call { - _c.Run(run) + _c.Call.Return(run) return _c } -// String provides a mock function with no fields +// String provides a mock function with given fields: func (_m *EpochNotifier) String() string { ret := _m.Called() diff --git a/aggsender/mocks/generic_subscriber.go b/aggsender/mocks/generic_subscriber.go index 59a27642..b4bee4b4 100644 --- a/aggsender/mocks/generic_subscriber.go +++ b/aggsender/mocks/generic_subscriber.go @@ -46,7 +46,7 @@ func (_c *GenericSubscriber_Publish_Call[T]) Return() *GenericSubscriber_Publish } func (_c *GenericSubscriber_Publish_Call[T]) RunAndReturn(run func(T)) *GenericSubscriber_Publish_Call[T] { - _c.Run(run) + _c.Call.Return(run) return _c } diff --git a/aggsender/mocks/l2_bridge_syncer.go b/aggsender/mocks/l2_bridge_syncer.go index 3f562a5b..6a51be30 100644 --- a/aggsender/mocks/l2_bridge_syncer.go +++ b/aggsender/mocks/l2_bridge_syncer.go @@ -28,7 +28,7 @@ func (_m *L2BridgeSyncer) EXPECT() *L2BridgeSyncer_Expecter { return &L2BridgeSyncer_Expecter{mock: &_m.Mock} } -// BlockFinality provides a mock function with no fields +// BlockFinality provides a mock function with given fields: func (_m *L2BridgeSyncer) BlockFinality() etherman.BlockNumberFinality { ret := _m.Called() @@ -363,7 +363,7 @@ func (_c *L2BridgeSyncer_GetLastProcessedBlock_Call) RunAndReturn(run func(conte return _c } -// OriginNetwork provides a mock function with no fields +// OriginNetwork provides a mock function with given fields: func (_m *L2BridgeSyncer) OriginNetwork() uint32 { ret := _m.Called() diff --git a/aggsender/mocks/logger.go b/aggsender/mocks/logger.go index b2a845ca..54be6942 100644 --- a/aggsender/mocks/logger.go +++ b/aggsender/mocks/logger.go @@ -55,7 +55,7 @@ func (_c *Logger_Debug_Call) Return() *Logger_Debug_Call { } func (_c *Logger_Debug_Call) RunAndReturn(run func(...interface{})) *Logger_Debug_Call { - _c.Run(run) + _c.Call.Return(run) return _c } @@ -99,7 +99,7 @@ func (_c *Logger_Debugf_Call) Return() *Logger_Debugf_Call { } func (_c *Logger_Debugf_Call) RunAndReturn(run func(string, ...interface{})) *Logger_Debugf_Call { - _c.Run(run) + _c.Call.Return(run) return _c } @@ -141,7 +141,7 @@ func (_c *Logger_Error_Call) Return() *Logger_Error_Call { } func (_c *Logger_Error_Call) RunAndReturn(run func(...interface{})) *Logger_Error_Call { - _c.Run(run) + _c.Call.Return(run) return _c } @@ -185,7 +185,7 @@ func (_c *Logger_Errorf_Call) Return() *Logger_Errorf_Call { } func (_c *Logger_Errorf_Call) RunAndReturn(run func(string, ...interface{})) *Logger_Errorf_Call { - _c.Run(run) + _c.Call.Return(run) return _c } @@ -229,7 +229,7 @@ func (_c *Logger_Fatalf_Call) Return() *Logger_Fatalf_Call { } func (_c *Logger_Fatalf_Call) RunAndReturn(run func(string, ...interface{})) *Logger_Fatalf_Call { - _c.Run(run) + _c.Call.Return(run) return _c } @@ -271,7 +271,7 @@ func (_c *Logger_Info_Call) Return() *Logger_Info_Call { } func (_c *Logger_Info_Call) RunAndReturn(run func(...interface{})) *Logger_Info_Call { - _c.Run(run) + _c.Call.Return(run) return _c } @@ -315,7 +315,7 @@ func (_c *Logger_Infof_Call) Return() *Logger_Infof_Call { } func (_c *Logger_Infof_Call) RunAndReturn(run func(string, ...interface{})) *Logger_Infof_Call { - _c.Run(run) + _c.Call.Return(run) return _c } @@ -357,7 +357,7 @@ func (_c *Logger_Warn_Call) Return() *Logger_Warn_Call { } func (_c *Logger_Warn_Call) RunAndReturn(run func(...interface{})) *Logger_Warn_Call { - _c.Run(run) + _c.Call.Return(run) return _c } @@ -401,7 +401,7 @@ func (_c *Logger_Warnf_Call) Return() *Logger_Warnf_Call { } func (_c *Logger_Warnf_Call) RunAndReturn(run func(string, ...interface{})) *Logger_Warnf_Call { - _c.Run(run) + _c.Call.Return(run) return _c } diff --git a/bridgesync/bridgesync_test.go b/bridgesync/bridgesync_test.go index 9973d664..50187482 100644 --- a/bridgesync/bridgesync_test.go +++ b/bridgesync/bridgesync_test.go @@ -29,20 +29,22 @@ func TestNewLx(t *testing.T) { ctx := context.Background() dbPath := path.Join(t.TempDir(), "TestNewLx.sqlite") bridge := common.HexToAddress("0x1234567890abcdef1234567890abcdef12345678") - syncBlockChunkSize := uint64(100) - blockFinalityType := etherman.SafeBlock - initialBlock := uint64(0) - waitForNewBlocksPeriod := time.Second * 10 - retryAfterErrorPeriod := time.Second * 5 - maxRetryAttemptsAfterError := 3 - originNetwork := uint32(1) + const ( + syncBlockChunkSize = uint64(100) + blockFinalityType = etherman.SafeBlock + initialBlock = uint64(0) + waitForNewBlocksPeriod = time.Second * 10 + retryAfterErrorPeriod = time.Second * 5 + maxRetryAttemptsAfterError = 3 + originNetwork = uint32(1) + ) mockEthClient := mocksbridgesync.NewEthClienter(t) mockReorgDetector := mocksbridgesync.NewReorgDetector(t) mockReorgDetector.EXPECT().Subscribe(mock.Anything).Return(nil, nil) - bridgeSync, err := NewL1( + l1BridgeSync, err := NewL1( ctx, dbPath, bridge, @@ -59,11 +61,11 @@ func TestNewLx(t *testing.T) { ) assert.NoError(t, err) - assert.NotNil(t, bridgeSync) - assert.Equal(t, originNetwork, bridgeSync.OriginNetwork()) - assert.Equal(t, blockFinalityType, bridgeSync.BlockFinality()) + assert.NotNil(t, l1BridgeSync) + assert.Equal(t, originNetwork, l1BridgeSync.OriginNetwork()) + assert.Equal(t, blockFinalityType, l1BridgeSync.BlockFinality()) - bridgeSyncL2, err := NewL2( + l2BridgdeSync, err := NewL2( ctx, dbPath, bridge, @@ -80,9 +82,9 @@ func TestNewLx(t *testing.T) { ) assert.NoError(t, err) - assert.NotNil(t, bridgeSync) - assert.Equal(t, originNetwork, bridgeSyncL2.OriginNetwork()) - assert.Equal(t, blockFinalityType, bridgeSyncL2.BlockFinality()) + assert.NotNil(t, l1BridgeSync) + assert.Equal(t, originNetwork, l2BridgdeSync.OriginNetwork()) + assert.Equal(t, blockFinalityType, l2BridgdeSync.BlockFinality()) } func TestGetLastProcessedBlock(t *testing.T) { diff --git a/bridgesync/mocks/eth_clienter.go b/bridgesync/mocks/eth_clienter.go index 12a99a98..3d208e45 100644 --- a/bridgesync/mocks/eth_clienter.go +++ b/bridgesync/mocks/eth_clienter.go @@ -265,7 +265,7 @@ func (_c *EthClienter_CallContract_Call) RunAndReturn(run func(context.Context, return _c } -// Client provides a mock function with no fields +// Client provides a mock function with given fields: func (_m *EthClienter) Client() *rpc.Client { ret := _m.Called() diff --git a/cmd/main.go b/cmd/main.go index 5127df85..41b36409 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -41,8 +41,7 @@ var ( Aliases: []string{"co"}, Usage: "List of components to run", Required: false, - Value: cli.NewStringSlice(common.SEQUENCE_SENDER, common.AGGREGATOR, - common.AGGORACLE, common.BRIDGE, common.AGGSENDER), + Value: cli.NewStringSlice(common.AGGORACLE, common.AGGSENDER, common.BRIDGE), } saveConfigFlag = cli.StringFlag{ Name: config.FlagSaveConfigPath, diff --git a/cmd/run.go b/cmd/run.go index 607d159c..a8de796c 100644 --- a/cmd/run.go +++ b/cmd/run.go @@ -2,43 +2,30 @@ package main import ( "context" - "crypto/ecdsa" "fmt" - "math/big" "os" "os/signal" "runtime" - dataCommitteeClient "github.com/0xPolygon/cdk-data-availability/client" jRPC "github.com/0xPolygon/cdk-rpc/rpc" - ethtxman "github.com/0xPolygon/zkevm-ethtx-manager/etherman" - "github.com/0xPolygon/zkevm-ethtx-manager/etherman/etherscan" "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" ethtxlog "github.com/0xPolygon/zkevm-ethtx-manager/log" "github.com/agglayer/aggkit" "github.com/agglayer/aggkit/agglayer" "github.com/agglayer/aggkit/aggoracle" "github.com/agglayer/aggkit/aggoracle/chaingersender" - "github.com/agglayer/aggkit/aggregator" - "github.com/agglayer/aggkit/aggregator/db" "github.com/agglayer/aggkit/aggsender" "github.com/agglayer/aggkit/bridgesync" "github.com/agglayer/aggkit/claimsponsor" aggkitcommon "github.com/agglayer/aggkit/common" "github.com/agglayer/aggkit/config" - "github.com/agglayer/aggkit/dataavailability" - "github.com/agglayer/aggkit/dataavailability/datacommittee" "github.com/agglayer/aggkit/etherman" ethermanconfig "github.com/agglayer/aggkit/etherman/config" - "github.com/agglayer/aggkit/etherman/contracts" "github.com/agglayer/aggkit/l1infotreesync" "github.com/agglayer/aggkit/lastgersync" "github.com/agglayer/aggkit/log" "github.com/agglayer/aggkit/reorgdetector" "github.com/agglayer/aggkit/rpc" - "github.com/agglayer/aggkit/sequencesender" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/translator" "github.com/ethereum/go-ethereum/ethclient" "github.com/urfave/cli/v2" ) @@ -88,21 +75,6 @@ func start(cliCtx *cli.Context) error { var rpcServices []jRPC.Service for _, component := range components { switch component { - case aggkitcommon.SEQUENCE_SENDER: - cfg.SequenceSender.Log = cfg.Log - seqSender := createSequenceSender(*cfg, l1Client, l1InfoTreeSync) - // start sequence sender in a goroutine, checking for errors - go seqSender.Start(cliCtx.Context) - - case aggkitcommon.AGGREGATOR: - aggregator := createAggregator(cliCtx.Context, *cfg, !cliCtx.Bool(config.FlagMigrations)) - // start aggregator in a goroutine, checking for errors - go func() { - if err := aggregator.Start(); err != nil { - aggregator.Stop() - log.Fatal(err) - } - }() case aggkitcommon.AGGORACLE: aggOracle := createAggoracle(*cfg, l1Client, l2Client, l1InfoTreeSync) go aggOracle.Start(cliCtx.Context) @@ -181,150 +153,6 @@ func createAggSender( return aggsender.New(ctx, logger, cfg, agglayerClient, l1InfoTreeSync, l2Syncer, epochNotifier) } -func createAggregator(ctx context.Context, c config.Config, runMigrations bool) *aggregator.Aggregator { - logger := log.WithFields("module", aggkitcommon.AGGREGATOR) - // Migrations - if runMigrations { - logger.Infof("Running DB migrations. File %s", c.Aggregator.DBPath) - runAggregatorMigrations(c.Aggregator.DBPath) - } - - etherman, err := newEtherman(c) - if err != nil { - logger.Fatal(err) - } - - // READ CHAIN ID FROM POE SC - - if c.Aggregator.ChainID == 0 { - l2ChainID, err := etherman.GetL2ChainID() - if err != nil { - logger.Fatal(err) - } - log.Infof("Autodiscover L2ChainID: %d", l2ChainID) - c.Aggregator.ChainID = l2ChainID - } - - aggregator, err := aggregator.New(ctx, c.Aggregator, logger, etherman) - if err != nil { - logger.Fatal(err) - } - - return aggregator -} - -func createSequenceSender( - cfg config.Config, - l1Client *ethclient.Client, - l1InfoTreeSync *l1infotreesync.L1InfoTreeSync, -) *sequencesender.SequenceSender { - logger := log.WithFields("module", aggkitcommon.SEQUENCE_SENDER) - - // Check config - if cfg.SequenceSender.RPCURL == "" { - logger.Fatal("Required field RPCURL is empty in sequence sender config") - } - - ethman, err := etherman.NewClient(ethermanconfig.Config{ - EthermanConfig: ethtxman.Config{ - URL: cfg.SequenceSender.EthTxManager.Etherman.URL, - MultiGasProvider: cfg.SequenceSender.EthTxManager.Etherman.MultiGasProvider, - L1ChainID: cfg.SequenceSender.EthTxManager.Etherman.L1ChainID, - Etherscan: etherscan.Config{ - ApiKey: cfg.SequenceSender.EthTxManager.Etherman.Etherscan.ApiKey, - Url: cfg.SequenceSender.EthTxManager.Etherman.Etherscan.Url, - }, - HTTPHeaders: cfg.SequenceSender.EthTxManager.Etherman.HTTPHeaders, - }, - }, cfg.NetworkConfig.L1Config, cfg.Common) - if err != nil { - logger.Fatalf("Failed to create etherman. Err: %w, ", err) - } - - auth, _, err := ethman.LoadAuthFromKeyStore(cfg.SequenceSender.PrivateKey.Path, cfg.SequenceSender.PrivateKey.Password) - if err != nil { - logger.Fatal(err) - } - cfg.SequenceSender.SenderAddress = auth.From - blockFinalityType := etherman.BlockNumberFinality(cfg.SequenceSender.BlockFinality) - - blockFinality, err := blockFinalityType.ToBlockNum() - if err != nil { - logger.Fatalf("Failed to create block finality. Err: %w, ", err) - } - txBuilder, err := newTxBuilder(cfg, logger, ethman, l1Client, l1InfoTreeSync, blockFinality) - if err != nil { - logger.Fatal(err) - } - seqSender, err := sequencesender.New(cfg.SequenceSender, logger, ethman, txBuilder) - if err != nil { - logger.Fatal(err) - } - - return seqSender -} - -func newTxBuilder( - cfg config.Config, - logger *log.Logger, - ethman *etherman.Client, - l1Client *ethclient.Client, - l1InfoTreeSync *l1infotreesync.L1InfoTreeSync, - blockFinality *big.Int, -) (txbuilder.TxBuilder, error) { - auth, _, err := ethman.LoadAuthFromKeyStore(cfg.SequenceSender.PrivateKey.Path, cfg.SequenceSender.PrivateKey.Password) - if err != nil { - log.Fatal(err) - } - da, err := newDataAvailability(cfg, ethman) - if err != nil { - log.Fatal(err) - } - var txBuilder txbuilder.TxBuilder - - switch contracts.VersionType(cfg.Common.ContractVersions) { - case contracts.VersionBanana: - if cfg.Common.IsValidiumMode { - txBuilder = txbuilder.NewTxBuilderBananaValidium( - logger, - ethman.Contracts.Banana.Rollup, - ethman.Contracts.Banana.GlobalExitRoot, - da, - *auth, - cfg.SequenceSender.MaxBatchesForL1, - l1InfoTreeSync, - l1Client, - blockFinality, - ) - } else { - txBuilder = txbuilder.NewTxBuilderBananaZKEVM( - logger, - ethman.Contracts.Banana.Rollup, - ethman.Contracts.Banana.GlobalExitRoot, - *auth, - cfg.SequenceSender.MaxTxSizeForL1, - l1InfoTreeSync, - l1Client, - blockFinality, - ) - } - case contracts.VersionElderberry: - if cfg.Common.IsValidiumMode { - txBuilder = txbuilder.NewTxBuilderElderberryValidium( - logger, ethman.Contracts.Elderberry.Rollup, da, *auth, cfg.SequenceSender.MaxBatchesForL1, - ) - } else { - txBuilder = txbuilder.NewTxBuilderElderberryZKEVM( - logger, ethman.Contracts.Elderberry.Rollup, *auth, cfg.SequenceSender.MaxTxSizeForL1, - ) - } - default: - err = fmt.Errorf("unknown contract version: %s", cfg.Common.ContractVersions) - } - - return txBuilder, err -} - func createAggoracle( cfg config.Config, l1Client, @@ -377,77 +205,6 @@ func createAggoracle( return aggOracle } -func newDataAvailability(c config.Config, etherman *etherman.Client) (*dataavailability.DataAvailability, error) { - if !c.Common.IsValidiumMode { - return nil, nil - } - logger := log.WithFields("module", "da-committee") - translator := translator.NewTranslatorImpl(logger) - logger.Infof("Translator rules: %v", c.Common.Translator) - translator.AddConfigRules(c.Common.Translator) - - // Backend specific config - daProtocolName, err := etherman.GetDAProtocolName() - if err != nil { - return nil, fmt.Errorf("error getting data availability protocol name: %w", err) - } - var daBackend dataavailability.DABackender - switch daProtocolName { - case string(dataavailability.DataAvailabilityCommittee): - var ( - pk *ecdsa.PrivateKey - err error - ) - _, pk, err = etherman.LoadAuthFromKeyStore(c.SequenceSender.PrivateKey.Path, c.SequenceSender.PrivateKey.Password) - if err != nil { - return nil, err - } - dacAddr, err := etherman.GetDAProtocolAddr() - if err != nil { - return nil, fmt.Errorf("error getting trusted sequencer URI. Error: %w", err) - } - - daBackend, err = datacommittee.New( - logger, - c.SequenceSender.EthTxManager.Etherman.URL, - dacAddr, - pk, - dataCommitteeClient.NewFactory(), - translator, - ) - if err != nil { - return nil, err - } - default: - return nil, fmt.Errorf("unexpected / unsupported DA protocol: %s", daProtocolName) - } - - return dataavailability.New(daBackend) -} - -func runAggregatorMigrations(dbPath string) { - runMigrations(dbPath, db.AggregatorMigrationName) -} - -func runMigrations(dbPath string, name string) { - log.Infof("running migrations for %v", name) - err := db.RunMigrationsUp(dbPath, name) - if err != nil { - log.Fatal(err) - } -} - -func newEtherman(c config.Config) (*etherman.Client, error) { - return etherman.NewClient(ethermanconfig.Config{ - EthermanConfig: ethtxman.Config{ - URL: c.Aggregator.EthTxManager.Etherman.URL, - MultiGasProvider: c.Aggregator.EthTxManager.Etherman.MultiGasProvider, - L1ChainID: c.Aggregator.EthTxManager.Etherman.L1ChainID, - HTTPHeaders: c.Aggregator.EthTxManager.Etherman.HTTPHeaders, - }, - }, c.NetworkConfig.L1Config, c.Common) -} - func logVersion() { log.Infow("Starting application", // version is already logged by default @@ -508,8 +265,9 @@ func runL1InfoTreeSyncerIfNeeded( l1Client *ethclient.Client, reorgDetector *reorgdetector.ReorgDetector, ) *l1infotreesync.L1InfoTreeSync { - if !isNeeded([]string{aggkitcommon.AGGORACLE, aggkitcommon.BRIDGE, - aggkitcommon.SEQUENCE_SENDER, aggkitcommon.AGGSENDER, aggkitcommon.L1INFOTREESYNC}, components) { + if !isNeeded([]string{ + aggkitcommon.AGGORACLE, aggkitcommon.AGGSENDER, + aggkitcommon.BRIDGE, aggkitcommon.L1INFOTREESYNC}, components) { return nil } l1InfoTreeSync, err := l1infotreesync.New( @@ -537,9 +295,9 @@ func runL1InfoTreeSyncerIfNeeded( func runL1ClientIfNeeded(components []string, urlRPCL1 string) *ethclient.Client { if !isNeeded([]string{ - aggkitcommon.SEQUENCE_SENDER, aggkitcommon.AGGREGATOR, - aggkitcommon.AGGORACLE, aggkitcommon.BRIDGE, + aggkitcommon.AGGORACLE, aggkitcommon.AGGSENDER, + aggkitcommon.BRIDGE, aggkitcommon.L1INFOTREESYNC, }, components) { return nil @@ -588,9 +346,8 @@ func runReorgDetectorL1IfNeeded( cfg *reorgdetector.Config, ) (*reorgdetector.ReorgDetector, chan error) { if !isNeeded([]string{ - aggkitcommon.SEQUENCE_SENDER, aggkitcommon.AGGREGATOR, - aggkitcommon.AGGORACLE, aggkitcommon.BRIDGE, aggkitcommon.AGGSENDER, - aggkitcommon.L1INFOTREESYNC}, + aggkitcommon.AGGORACLE, aggkitcommon.AGGSENDER, + aggkitcommon.BRIDGE, aggkitcommon.L1INFOTREESYNC}, components) { return nil, nil } diff --git a/common/components.go b/common/components.go index 2b562cff..83c5a81f 100644 --- a/common/components.go +++ b/common/components.go @@ -1,10 +1,6 @@ package common const ( - // SEQUENCE_SENDER name to identify the sequence-sender component - SEQUENCE_SENDER = "sequence-sender" //nolint:stylecheck - // AGGREGATOR name to identify the aggregator component - AGGREGATOR = "aggregator" // AGGORACLE name to identify the aggoracle component AGGORACLE = "aggoracle" // BRIDGE name to identify the bridge component (have RPC) diff --git a/config/config.go b/config/config.go index 9481a264..643a8f3e 100644 --- a/config/config.go +++ b/config/config.go @@ -9,7 +9,6 @@ import ( jRPC "github.com/0xPolygon/cdk-rpc/rpc" "github.com/agglayer/aggkit/aggoracle" - "github.com/agglayer/aggkit/aggregator" "github.com/agglayer/aggkit/aggsender" "github.com/agglayer/aggkit/bridgesync" "github.com/agglayer/aggkit/claimsponsor" @@ -19,7 +18,6 @@ import ( "github.com/agglayer/aggkit/lastgersync" "github.com/agglayer/aggkit/log" "github.com/agglayer/aggkit/reorgdetector" - "github.com/agglayer/aggkit/sequencesender" "github.com/mitchellh/mapstructure" "github.com/pelletier/go-toml/v2" "github.com/spf13/viper" @@ -59,11 +57,6 @@ const ( // FlagAllowDeprecatedFields is the flag to allow deprecated fields FlagAllowDeprecatedFields = "allow-deprecated-fields" - deprecatedFieldSyncDB = "Aggregator.Synchronizer.DB is deprecated. Use Aggregator.Synchronizer.SQLDB instead." - - deprecatedFieldPersistenceFilename = "EthTxManager.PersistenceFilename is deprecated." + - " Use EthTxManager.StoragePath instead." - EnvVarPrefix = "CDK" ConfigType = "toml" SaveConfigFileName = "aggkit_config.toml" @@ -102,21 +95,7 @@ type DeprecatedField struct { } var ( - deprecatedFieldsOnConfig = []DeprecatedField{ - { - FieldNamePattern: "sequencesender.ethtxmanager.persistencefilename", - Reason: deprecatedFieldPersistenceFilename, - }, - { - FieldNamePattern: "aggregator.synchronizer.db.", - Reason: deprecatedFieldSyncDB, - }, - - { - FieldNamePattern: "aggregator.ethtxmanager.persistencefilename", - Reason: deprecatedFieldPersistenceFilename, - }, - } + deprecatedFieldsOnConfig = []DeprecatedField{} ) /* @@ -128,14 +107,10 @@ The file is [TOML format] type Config struct { // Configuration of the etherman (client for access L1) Etherman ethermanconfig.Config - // Configuration of the aggregator - Aggregator aggregator.Config // Configure Log level for all the services, allow also to store the logs in a file Log log.Config // Configuration of the genesis of the network. This is used to known the initial state of the network NetworkConfig NetworkConfig - // Configuration of the sequence sender service - SequenceSender sequencesender.Config // Common Config that affects all the services Common common.Config // Configuration of the reorg detector service to be used for the L1 diff --git a/config/config_test.go b/config/config_test.go index a7da6481..e1944f7d 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -29,24 +29,6 @@ func TestLoadDefaultConfig(t *testing.T) { require.NotNil(t, cfg) } -const configWithDeprecatedFields = ` -[SequenceSender.EthTxManager] -nodepretatedfield = "value2" -persistencefilename = "value" -` - -func TestLoadConfigWithDeprecatedFields(t *testing.T) { - tmpFile, err := os.CreateTemp("", "ut_config") - require.NoError(t, err) - defer os.Remove(tmpFile.Name()) - _, err = tmpFile.Write([]byte(DefaultVars + "\n" + configWithDeprecatedFields)) - require.NoError(t, err) - ctx := newCliContextConfigFlag(t, tmpFile.Name()) - cfg, err := Load(ctx) - require.Error(t, err) - require.Nil(t, cfg) -} - func TestLoadConfigWithSaveConfigFile(t *testing.T) { tmpFile, err := os.CreateTemp("", "ut_config") require.NoError(t, err) @@ -68,48 +50,6 @@ func TestLoadConfigWithSaveConfigFile(t *testing.T) { require.NoError(t, err) } -func TestTLoadFileFromStringDeprecatedField(t *testing.T) { - configFileData := configWithDeprecatedFields - _, err := LoadFileFromString(configFileData, "toml") - require.Error(t, err) -} -func TestTLoadDeprecatedField(t *testing.T) { - tmpFile, err := os.CreateTemp("", "ut_config") - require.NoError(t, err) - defer os.Remove(tmpFile.Name()) - _, err = tmpFile.Write([]byte(DefaultVars + "\n" + configWithDeprecatedFields)) - require.NoError(t, err) - ctx := newCliContextConfigFlag(t, tmpFile.Name()) - _, err = Load(ctx) - require.Error(t, err) -} - -func TestTLoadDeprecatedFieldWithAllowFlag(t *testing.T) { - tmpFile, err := os.CreateTemp("", "ut_config") - require.NoError(t, err) - defer os.Remove(tmpFile.Name()) - _, err = tmpFile.Write([]byte(DefaultVars + "\n" + configWithDeprecatedFields)) - require.NoError(t, err) - ctx := newCliContextConfigFlag(t, tmpFile.Name()) - err = ctx.Set(FlagAllowDeprecatedFields, "true") - require.NoError(t, err) - _, err = Load(ctx) - require.NoError(t, err) -} - -func TestCheckDeprecatedFields(t *testing.T) { - err := checkDeprecatedFields([]string{deprecatedFieldsOnConfig[0].FieldNamePattern}) - require.Error(t, err) - require.Contains(t, err.Error(), deprecatedFieldsOnConfig[0].FieldNamePattern) - require.Contains(t, err.Error(), deprecatedFieldsOnConfig[0].Reason) -} - -func TestCheckDeprecatedFieldsPattern(t *testing.T) { - err := checkDeprecatedFields([]string{"aggregator.synchronizer.db.name"}) - require.Error(t, err) - require.Contains(t, err.Error(), deprecatedFieldSyncDB) -} - func TestLoadConfigWithInvalidFilename(t *testing.T) { ctx := newCliContextConfigFlag(t, "invalid_file") cfg, err := Load(ctx) @@ -117,38 +57,6 @@ func TestLoadConfigWithInvalidFilename(t *testing.T) { require.Nil(t, cfg) } -func TestLoadConfigWithForbiddenFields(t *testing.T) { - cases := []struct { - name string - input string - }{ - { - name: "[Aggregator.Synchronizer] DB", - input: `[Aggregator.Synchronizer.DB] - name = "value"`, - }, - { - name: "[SequenceSender.EthTxManager] PersistenceFilename", - input: `[SequenceSender.EthTxManager] - PersistenceFilename = "foo.json"`, - }, - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - tmpFile, err := os.CreateTemp("", "ut_config") - require.NoError(t, err) - defer os.Remove(tmpFile.Name()) - _, err = tmpFile.Write([]byte(c.input)) - require.NoError(t, err) - ctx := newCliContextConfigFlag(t, tmpFile.Name()) - cfg, err := Load(ctx) - require.Error(t, err) - require.Nil(t, cfg) - }) - } -} - func newCliContextConfigFlag(t *testing.T, values ...string) *cli.Context { t.Helper() flagSet := flag.NewFlagSet("test", flag.ContinueOnError) diff --git a/config/default.go b/config/default.go index f38d4c40..a7aa9747 100644 --- a/config/default.go +++ b/config/default.go @@ -17,8 +17,6 @@ SequencerPrivateKeyPath = "/app/sequencer.keystore" SequencerPrivateKeyPassword = "test" WitnessURL = "http://localhost:8123" -AggregatorPrivateKeyPath = "/app/keystore/aggregator.keystore" -AggregatorPrivateKeyPassword = "testonly" # Who send Proof to L1? AggLayer addr, or aggregator addr? SenderProofToL1Addr = "0x0000000000000000000000000000000000000000" polygonBridgeAddr = "0x0000000000000000000000000000000000000000" @@ -77,128 +75,6 @@ NetworkID = 1 IsValidiumMode = {{IsValidiumMode}} ContractVersions = "{{ContractVersions}}" -[SequenceSender] -WaitPeriodSendSequence = "15s" -LastBatchVirtualizationTimeMaxWaitPeriod = "10s" -L1BlockTimestampMargin = "30s" -MaxTxSizeForL1 = 131072 -L2Coinbase = "{{L2Coinbase}}" -PrivateKey = { Path = "{{SequencerPrivateKeyPath}}", Password = "{{SequencerPrivateKeyPassword}}"} -SequencesTxFileName = "sequencesender.json" -GasOffset = 80000 -WaitPeriodPurgeTxFile = "15m" -MaxPendingTx = 1 -MaxBatchesForL1 = 300 -BlockFinality = "FinalizedBlock" -RPCURL = "{{L2URL}}" -GetBatchWaitInterval = "10s" - [SequenceSender.EthTxManager] - FrequencyToMonitorTxs = "1s" - WaitTxToBeMined = "2m" - GetReceiptMaxTime = "250ms" - GetReceiptWaitInterval = "1s" - PrivateKeys = [ - {Path = "{{SequencerPrivateKeyPath}}", Password = "{{SequencerPrivateKeyPassword}}"}, - ] - ForcedGas = 0 - GasPriceMarginFactor = 1 - MaxGasPriceLimit = 0 - StoragePath = "ethtxmanager.sqlite" - ReadPendingL1Txs = false - SafeStatusL1NumberOfBlocks = 0 - FinalizedStatusL1NumberOfBlocks = 0 - [SequenceSender.EthTxManager.Etherman] - URL = "{{L1URL}}" - MultiGasProvider = false - L1ChainID = {{NetworkConfig.L1.L1ChainID}} -[Aggregator] -# GRPC server host -Host = "0.0.0.0" -# GRPC server port -Port = 50081 -RetryTime = "5s" -VerifyProofInterval = "10s" -ProofStatePollingInterval = "5s" -TxProfitabilityCheckerType = "acceptall" -TxProfitabilityMinReward = "1.1" -IntervalAfterWhichBatchConsolidateAnyway="0s" -BatchProofSanityCheckEnabled = true -# ChainID is L2ChainID. Is populated on runtimme -ChainID = 0 -ForkId = {{ForkId}} -SenderAddress = "{{SenderProofToL1Addr}}" -CleanupLockedProofsInterval = "2m" -GeneratingProofCleanupThreshold = "10m" -GasOffset = 0 -RPCURL = "{{L2URL}}" -WitnessURL = "{{WitnessURL}}" -UseFullWitness = false -SettlementBackend = "l1" -AggLayerTxTimeout = "5m" -AggLayerURL = "{{AggLayerURL}}" -SyncModeOnlyEnabled = false -DBPath = "{{PathRWData}}/aggregator_db.sqlite" - [Aggregator.SequencerPrivateKey] - Path = "{{SequencerPrivateKeyPath}}" - Password = "{{SequencerPrivateKeyPassword}}" - [Aggregator.Log] - Environment ="{{Log.Environment}}" # "production" or "development" - Level = "{{Log.Level}}" - Outputs = ["stderr"] - [Aggregator.EthTxManager] - FrequencyToMonitorTxs = "1s" - WaitTxToBeMined = "2m" - GetReceiptMaxTime = "250ms" - GetReceiptWaitInterval = "1s" - PrivateKeys = [ - {Path = "{{AggregatorPrivateKeyPath}}", Password = "{{AggregatorPrivateKeyPassword}}"}, - ] - ForcedGas = 0 - GasPriceMarginFactor = 1 - MaxGasPriceLimit = 0 - StoragePath = "" - ReadPendingL1Txs = false - SafeStatusL1NumberOfBlocks = 0 - FinalizedStatusL1NumberOfBlocks = 0 - [Aggregator.EthTxManager.Etherman] - URL = "{{L1URL}}" - L1ChainID = {{NetworkConfig.L1.L1ChainID}} - HTTPHeaders = [] - [Aggregator.Synchronizer] - [Aggregator.Synchronizer.Log] - Environment = "{{Log.Environment}}" # "production" or "development" - Level = "{{Log.Level}}" - Outputs = ["stderr"] - [Aggregator.Synchronizer.SQLDB] - DriverName = "sqlite3" - DataSource = "file:{{PathRWData}}/aggregator_sync_db.sqlite" - [Aggregator.Synchronizer.Synchronizer] - SyncInterval = "10s" - SyncChunkSize = 1000 - GenesisBlockNumber = {{genesisBlockNumber}} - SyncUpToBlock = "finalized" - BlockFinality = "finalized" - OverrideStorageCheck = false - [Aggregator.Synchronizer.Etherman] - L1URL = "{{L1URL}}" - ForkIDChunkSize = 100 - L1ChainID = {{NetworkConfig.L1.L1ChainID}} - PararellBlockRequest = false - [Aggregator.Synchronizer.Etherman.Contracts] - GlobalExitRootManagerAddr = "{{NetworkConfig.L1.GlobalExitRootManagerAddr}}" - RollupManagerAddr = "{{NetworkConfig.L1.RollupManagerAddr}}" - ZkEVMAddr = "{{NetworkConfig.L1.ZkEVMAddr}}" - [Aggregator.Synchronizer.Etherman.Validium] - Enabled = {{IsValidiumMode}} - # L2URL, empty ask to contract - TrustedSequencerURL = "" - RetryOnDACErrorInterval = "1m" - DataSourcePriority = ["trusted", "external"] - [Aggregator.Synchronizer.Etherman.Validium.Translator] - FullMatchRules = [] - [Aggregator.Synchronizer.Etherman.Validium.RateLimit] - NumRequests = 1000 - Interval = "1s" [ReorgDetectorL1] DBPath = "{{PathRWData}}/reorgdetectorl1.sqlite" @@ -239,7 +115,7 @@ WaitPeriodNextGER="100ms" ForcedGas = 0 GasPriceMarginFactor = 1 MaxGasPriceLimit = 0 - StoragePath = "{{PathRWData}}/ethtxmanager-sequencesender.sqlite" + StoragePath = "{{PathRWData}}/ethtxmanager-aggoracle.sqlite" ReadPendingL1Txs = false SafeStatusL1NumberOfBlocks = 5 FinalizedStatusL1NumberOfBlocks = 10 diff --git a/crates/aggkit-config/src/aggregator.rs b/crates/aggkit-config/src/aggregator.rs deleted file mode 100644 index 8f37a9af..00000000 --- a/crates/aggkit-config/src/aggregator.rs +++ /dev/null @@ -1,125 +0,0 @@ -use ethers::types::Address; -use serde::Deserialize; -use url::Url; - -#[derive(Deserialize, Debug, Clone)] -pub struct EthTxManager { - #[serde(rename = "Etherman")] - pub etherman: Etherman, -} - -impl Default for EthTxManager { - fn default() -> Self { - Self { - etherman: Etherman::default(), - } - } -} - -#[derive(Deserialize, Debug, Clone)] -pub struct Etherman { - #[serde(rename = "URL", default)] - pub url: String, -} - -impl Default for Etherman { - fn default() -> Self { - Self { - url: "http://localhost:8545".to_string(), - } - } -} - -/// The Aggregator configuration. -#[derive(Deserialize, Debug, Clone)] -pub struct Aggregator { - #[serde(rename = "ChainID", default)] - pub chain_id: String, - #[serde(rename = "Host", default)] - pub host: String, - #[serde(rename = "Port", default)] - pub port: String, - #[serde(rename = "RetryTime", default)] - pub retry_time: String, - #[serde(rename = "VerifyProofInterval", default)] - pub verify_proof_interval: String, - #[serde(rename = "ProofStatePollingInterval", default)] - pub proof_state_polling_interval: String, - #[serde(rename = "TxProfitabilityCheckerType", default)] - pub tx_profitability_checker_type: String, - #[serde(rename = "TxProfitabilityMinReward", default)] - pub tx_profitability_min_reward: String, - #[serde(rename = "IntervalAfterWhichBatchConsolidateAnyway", default)] - pub interval_after_which_batch_consolidate_anyway: String, - #[serde(rename = "ForkId", default)] - pub fork_id: u64, - #[serde(rename = "CleanupLockedProofsInterval", default)] - pub cleanup_locked_proofs_interval: String, - #[serde(rename = "GeneratingProofCleanupThreshold", default)] - pub generating_proof_cleanup_threshold: String, - #[serde(rename = "GasOffset", default)] - pub gas_offset: u64, - #[serde(rename = "RPCURL", default = "default_url")] - pub rpc_url: Url, - #[serde(rename = "WitnessURL", default = "default_url")] - pub witness_url: Url, - #[serde(rename = "SenderAddress", default = "default_address")] - pub sender_address: Address, - #[serde(rename = "SettlementBackend", default)] - pub settlement_backend: String, - #[serde(rename = "AggLayerTxTimeout", default)] - pub agg_layer_tx_timeout: String, - #[serde(rename = "AggLayerURL", default = "default_url")] - pub agg_layer_url: Url, - #[serde(rename = "UseFullWitness", default)] - pub use_full_witness: bool, - #[serde(rename = "SyncModeOnlyEnabled", default)] - pub sync_mode_only_enabled: bool, - - #[serde(rename = "EthTxManager", default)] - pub eth_tx_manager: EthTxManager, -} - -fn default_url() -> Url { - Url::parse("http://localhost:8546").unwrap() -} - -fn default_address() -> Address { - "0x0000000000000000000000000000000000000000" - .parse() - .unwrap() -} - -impl Default for Aggregator { - fn default() -> Self { - // Values are coming from https://github.com/0xPolygon/agglayer/blob/main/config/default.go#L11 - Self { - chain_id: "1".to_string(), - host: "localhost".to_string(), - port: "8545".to_string(), - retry_time: "10s".to_string(), - verify_proof_interval: "1m".to_string(), - proof_state_polling_interval: "10s".to_string(), - tx_profitability_checker_type: "default".to_string(), - tx_profitability_min_reward: "0.1".to_string(), - interval_after_which_batch_consolidate_anyway: "5m".to_string(), - fork_id: 0, - cleanup_locked_proofs_interval: "1h".to_string(), - generating_proof_cleanup_threshold: "10m".to_string(), - gas_offset: 0, - rpc_url: default_url(), - witness_url: default_url(), - sender_address: default_address(), - settlement_backend: "default".to_string(), - agg_layer_tx_timeout: "30s".to_string(), - agg_layer_url: Url::parse("http://localhost:8547").unwrap(), - use_full_witness: false, - sync_mode_only_enabled: false, - eth_tx_manager: EthTxManager { - etherman: Etherman { - url: "http://localhost:9093".to_string(), - }, - }, - } - } -} diff --git a/crates/aggkit-config/src/lib.rs b/crates/aggkit-config/src/lib.rs index c86e9932..97901ed0 100644 --- a/crates/aggkit-config/src/lib.rs +++ b/crates/aggkit-config/src/lib.rs @@ -6,15 +6,12 @@ use serde::Deserialize; pub(crate) const DEFAULT_IP: std::net::Ipv4Addr = std::net::Ipv4Addr::new(0, 0, 0, 0); -pub(crate) mod aggregator; pub(crate) mod l1; pub mod log; pub(crate) mod network_config; -pub(crate) mod sequence_sender; pub(crate) mod telemetry; pub use log::Log; -use sequence_sender::SequenceSender; /// The Agglayer configuration. #[derive(Deserialize, Debug)] @@ -29,10 +26,4 @@ pub struct Config { #[serde(rename = "NetworkConfig", default)] pub network_config: network_config::NetworkConfig, - - #[serde(rename = "Aggregator", default)] - pub aggregator: aggregator::Aggregator, - - #[serde(rename = "SequenceSender", default)] - pub sequence_sender: SequenceSender, } diff --git a/crates/aggkit-config/src/sequence_sender.rs b/crates/aggkit-config/src/sequence_sender.rs deleted file mode 100644 index c4e83cc5..00000000 --- a/crates/aggkit-config/src/sequence_sender.rs +++ /dev/null @@ -1,50 +0,0 @@ -use serde::Deserialize; - -/// The SequenceSender configuration. -#[derive(Deserialize, Debug, Clone)] -pub struct SequenceSender { - #[serde(rename = "WaitPeriodSendSequence", default)] - pub wait_period_send_sequence: String, - #[serde(rename = "LastBatchVirtualizationTimeMaxWaitPeriod", default)] - pub last_batch_virtualization_time_max_wait_period: String, - #[serde(rename = "MaxTxSizeForL1", default)] - pub max_tx_size_for_l1: u32, - #[serde(rename = "L2Coinbase", default)] - pub l2_coinbase: String, - #[serde(rename = "SequencesTxFileName", default)] - pub sequences_tx_file_name: String, - #[serde(rename = "GasOffset", default)] - pub gas_offset: u64, - #[serde(rename = "WaitPeriodPurgeTxFile", default)] - pub wait_period_purge_tx_file: String, - #[serde(rename = "MaxPendingTx", default)] - pub max_pending_tx: u32, - #[serde(rename = "MaxBatchesForL1", default)] - pub max_batches_for_l1: u32, - #[serde(rename = "BlockFinality", default)] - pub block_finality: String, - #[serde(rename = "RPCURL", default)] - pub rpc_url: String, - #[serde(rename = "GetBatchWaitInterval", default)] - pub get_batch_wait_interval: String, -} - -// Default trait implementation -impl Default for SequenceSender { - fn default() -> Self { - Self { - wait_period_send_sequence: "1s".to_string(), - last_batch_virtualization_time_max_wait_period: "1s".to_string(), - max_tx_size_for_l1: 1000, - l2_coinbase: "0x".to_string(), - sequences_tx_file_name: "sequences_tx.json".to_string(), - gas_offset: 0, - wait_period_purge_tx_file: "1s".to_string(), - max_pending_tx: 1000, - max_batches_for_l1: 100, - block_finality: "1s".to_string(), - rpc_url: "http://localhost:8545".to_string(), - get_batch_wait_interval: "1s".to_string(), - } - } -} diff --git a/crates/aggkit/Cargo.toml b/crates/aggkit/Cargo.toml index c2cbf809..f57cab2f 100644 --- a/crates/aggkit/Cargo.toml +++ b/crates/aggkit/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "aggkit" +name = "cdk" version.workspace = true edition.workspace = true diff --git a/crates/aggkit/build.rs b/crates/aggkit/build.rs index 3bb15ece..3eabedb2 100644 --- a/crates/aggkit/build.rs +++ b/crates/aggkit/build.rs @@ -70,7 +70,7 @@ fn build_versions() -> std::io::Result<()> { // Get the corresponding lines from the contents of the starlark file let versions = content .lines() - .skip(32) + .skip(34) .take(15) .collect::>() .join("\n"); diff --git a/crates/aggkit/src/cli.rs b/crates/aggkit/src/cli.rs index 5e5fd7ec..f94ee063 100644 --- a/crates/aggkit/src/cli.rs +++ b/crates/aggkit/src/cli.rs @@ -33,26 +33,6 @@ pub(crate) enum Commands { )] components: Option, }, - /// Run cdk-erigon node with the provided default configuration - Erigon { - /// The path to the configuration file - #[arg( - long, - short = 'C', - value_hint = ValueHint::FilePath, - env = "AGGKIT_CONFIG_PATH" - )] - config: PathBuf, - - /// The path to a chain specification file. - #[arg( - long, - short = 'g', - value_hint = ValueHint::FilePath, - env = "AGGKIT_GENESIS_PATH" - )] - chain: PathBuf, - }, /// Output the corresponding versions of the components Versions, } diff --git a/crates/aggkit/src/config_render.rs b/crates/aggkit/src/config_render.rs index e96971e3..0abfffec 100644 --- a/crates/aggkit/src/config_render.rs +++ b/crates/aggkit/src/config_render.rs @@ -8,7 +8,7 @@ use tempfile::{tempdir, TempDir}; pub fn render(config: &Config, genesis_file: PathBuf, timestamp: u64) -> Result { // Create a temporary directory let tmp_dir = tempdir()?; - let chain_id = config.aggregator.chain_id.clone(); + let chain_id = "0"; // TODO - this needs to be chainged to the chain_id from the config let res = crate::allocs_render::render_allocs(genesis_file.to_str().unwrap())?; // Write the three files to disk fs::write( @@ -114,9 +114,6 @@ http.vhosts: any http.corsdomain: any ws: true "#, - chain_id = config.aggregator.chain_id.clone(), - l2_sequencer_rpc_url = config.aggregator.witness_url.to_string(), - l1_rpc_url = config.aggregator.eth_tx_manager.etherman.url, l1_chain_id = config.network_config.l1.l1_chain_id, sequencer_address = config.sequence_sender.l2_coinbase, zkevm_address = res.wrapper.l1_config.zkevm_address, diff --git a/crates/aggkit/src/main.rs b/crates/aggkit/src/main.rs index 8261d0cf..181c381a 100644 --- a/crates/aggkit/src/main.rs +++ b/crates/aggkit/src/main.rs @@ -1,6 +1,4 @@ //! Command line interface. -use alloy_rpc_client::ClientBuilder; -use alloy_rpc_client::ReqwestClient; use aggkit_config::Config; use clap::Parser; use cli::Cli; @@ -8,17 +6,13 @@ use colored::*; use execute::Execute; use std::path::PathBuf; use std::process::Command; -use url::Url; pub mod allocs_render; mod cli; -mod config_render; mod helpers; mod logging; mod versions; -const CDK_ERIGON_BIN: &str = "cdk-erigon"; - #[tokio::main] async fn main() -> anyhow::Result<()> { dotenvy::dotenv().ok(); @@ -42,7 +36,6 @@ async fn main() -> anyhow::Result<()> { match cli.cmd { cli::Commands::Node { config, components } => node(config, components)?, - cli::Commands::Erigon { config, chain } => erigon(config, chain).await?, cli::Commands::Versions {} => versions::versions(), } @@ -115,70 +108,3 @@ pub fn node(config_path: PathBuf, components: Option) -> anyhow::Result< Ok(()) } - -/// This is the main erigon entrypoint. -/// This function starts everything needed to run an Erigon node. -pub async fn erigon(config_path: PathBuf, genesis_file: PathBuf) -> anyhow::Result<()> { - // Read the config - let config = read_config(config_path.clone())?; - - // Initialize the logger - logging::tracing(&config.log); - - // Render configuration files - let chain_id = config.aggregator.chain_id.clone(); - let rpc_url = Url::parse(&config.sequence_sender.rpc_url).unwrap(); - let timestamp = get_timestamp(rpc_url).await.unwrap(); - let erigon_config_path = config_render::render(&config, genesis_file, timestamp)?; - - println!("Starting erigon with config: {:?}", erigon_config_path); - - // Run cdk-erigon in system path - let output = Command::new(CDK_ERIGON_BIN) - .args(&[ - "--config", - erigon_config_path - .path() - .join(format!("dynamic-{}.yaml", chain_id)) - .to_str() - .unwrap(), - ]) - .execute_output() - .unwrap(); - - if let Some(exit_code) = output.status.code() { - if exit_code != 0 { - eprintln!( - "Failed. Leaving configuration files in: {:?}", - erigon_config_path - ); - std::process::exit(1); - } - } else { - eprintln!("Interrupted!"); - } - - Ok(()) -} - -/// Call the rpc server to retrieve the first batch timestamp -async fn get_timestamp(url: Url) -> Result { - // Instantiate a new client over a transport. - let client: ReqwestClient = ClientBuilder::default().http(url); - - // Prepare a request to the server. - let request = client.request("zkevm_getBatchByNumber", vec!["0"]); - - // Poll the request to completion. - let batch_json: Batch = request.await.unwrap(); - - // Parse the timestamp hex string into u64. - let ts = u64::from_str_radix(batch_json.timestamp.trim_start_matches("0x"), 16)?; - - Ok(ts) -} - -#[derive(serde::Deserialize, Debug, Clone)] -struct Batch { - timestamp: String, -} diff --git a/dataavailability/config.go b/dataavailability/config.go deleted file mode 100644 index 8163e7bc..00000000 --- a/dataavailability/config.go +++ /dev/null @@ -1,9 +0,0 @@ -package dataavailability - -// DABackendType is the data availability protocol for the CDK -type DABackendType string - -const ( - // DataAvailabilityCommittee is the DAC protocol backend - DataAvailabilityCommittee DABackendType = "DataAvailabilityCommittee" -) diff --git a/dataavailability/dataavailability.go b/dataavailability/dataavailability.go deleted file mode 100644 index 39a27468..00000000 --- a/dataavailability/dataavailability.go +++ /dev/null @@ -1,33 +0,0 @@ -package dataavailability - -import ( - "context" - - "github.com/agglayer/aggkit/etherman" -) - -// DataAvailability implements an abstract data availability integration -type DataAvailability struct { - backend DABackender -} - -// New creates a DataAvailability instance -func New(backend DABackender) (*DataAvailability, error) { - da := &DataAvailability{ - backend: backend, - } - - return da, da.backend.Init() -} - -// PostSequenceBanana sends sequence data to the backend and returns a response. -func (d *DataAvailability) PostSequenceBanana( - ctx context.Context, sequenceBanana etherman.SequenceBanana, -) ([]byte, error) { - return d.backend.PostSequenceBanana(ctx, sequenceBanana) -} - -// PostSequenceElderberry sends batch data to the backend and returns a response. -func (d *DataAvailability) PostSequenceElderberry(ctx context.Context, batchesData [][]byte) ([]byte, error) { - return d.backend.PostSequenceElderberry(ctx, batchesData) -} diff --git a/dataavailability/datacommittee/datacommittee.go b/dataavailability/datacommittee/datacommittee.go deleted file mode 100644 index be811a13..00000000 --- a/dataavailability/datacommittee/datacommittee.go +++ /dev/null @@ -1,401 +0,0 @@ -package datacommittee - -import ( - "crypto/ecdsa" - "crypto/rand" - "errors" - "fmt" - "math/big" - "sort" - "strings" - - "github.com/0xPolygon/cdk-contracts-tooling/contracts/banana/polygondatacommittee" - "github.com/0xPolygon/cdk-data-availability/client" - daTypes "github.com/0xPolygon/cdk-data-availability/types" - "github.com/0xPolygonHermez/zkevm-synchronizer-l1/translator" - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/log" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" - "github.com/ethereum/go-ethereum/ethclient" - "golang.org/x/net/context" -) - -const ( - unexpectedHashTemplate = "missmatch on transaction data. Expected hash %s, actual hash: %s" - translateContextName = "dataCommittee" -) - -// DataCommitteeMember represents a member of the Data Committee -type DataCommitteeMember struct { - Addr common.Address - URL string -} - -// DataCommittee represents a specific committee -type DataCommittee struct { - AddressesHash common.Hash - Members []DataCommitteeMember - RequiredSignatures uint64 -} - -// Backend implements the DAC integration -type Backend struct { - logger *log.Logger - dataCommitteeContract *polygondatacommittee.Polygondatacommittee - privKey *ecdsa.PrivateKey - dataCommitteeClientFactory client.Factory - - committeeMembers []DataCommitteeMember - selectedCommitteeMember int - ctx context.Context - Translator translator.Translator -} - -// New creates an instance of Backend -func New( - logger *log.Logger, - l1RPCURL string, - dataCommitteeAddr common.Address, - privKey *ecdsa.PrivateKey, - dataCommitteeClientFactory client.Factory, - translator translator.Translator, -) (*Backend, error) { - ethClient, err := ethclient.Dial(l1RPCURL) - if err != nil { - logger.Errorf("error connecting to %s: %+v", l1RPCURL, err) - return nil, err - } - - dataCommittee, err := polygondatacommittee.NewPolygondatacommittee(dataCommitteeAddr, ethClient) - if err != nil { - return nil, err - } - - return &Backend{ - logger: logger, - dataCommitteeContract: dataCommittee, - privKey: privKey, - dataCommitteeClientFactory: dataCommitteeClientFactory, - ctx: context.Background(), - Translator: translator, - }, nil -} - -// Init loads the DAC to be cached when needed -func (d *Backend) Init() error { - committee, err := d.getCurrentDataCommittee() - if err != nil { - return err - } - selectedCommitteeMember := -1 - if committee != nil { - d.committeeMembers = committee.Members - if len(committee.Members) > 0 { - nBig, err := rand.Int(rand.Reader, big.NewInt(int64(len(committee.Members)))) - if err != nil { - return err - } - selectedCommitteeMember = int(nBig.Int64()) - } - } - d.selectedCommitteeMember = selectedCommitteeMember - - return nil -} - -// GetSequence retrieves backend data by querying committee members for each hash concurrently. -func (d *Backend) GetSequence(_ context.Context, hashes []common.Hash, _ []byte) ([][]byte, error) { - initialMember := d.selectedCommitteeMember - - var batchData [][]byte - for retries := 0; retries < len(d.committeeMembers); retries++ { - member := d.committeeMembers[d.selectedCommitteeMember] - d.logger.Infof("trying to get data from %s at %s", member.Addr.Hex(), member.URL) - - c := d.dataCommitteeClientFactory.New(member.URL) - dataMap, err := c.ListOffChainData(d.ctx, hashes) - if err != nil { - d.logger.Warnf("error getting data from DAC node %s at %s: %s", member.Addr.Hex(), member.URL, err) - d.selectedCommitteeMember = (d.selectedCommitteeMember + 1) % len(d.committeeMembers) - if d.selectedCommitteeMember == initialMember { - break - } - continue - } - - batchData = make([][]byte, 0, len(hashes)) - for _, hash := range hashes { - actualTransactionsHash := crypto.Keccak256Hash(dataMap[hash]) - if actualTransactionsHash != hash { - unexpectedHash := fmt.Errorf(unexpectedHashTemplate, hash, actualTransactionsHash) - d.logger.Warnf("error getting data from DAC node %s at %s: %s", member.Addr.Hex(), member.URL, unexpectedHash) - d.selectedCommitteeMember = (d.selectedCommitteeMember + 1) % len(d.committeeMembers) - if d.selectedCommitteeMember == initialMember { - break - } - continue - } - batchData = append(batchData, dataMap[hash]) - } - return batchData, nil - } - - if err := d.Init(); err != nil { - return nil, fmt.Errorf("error loading data committee: %w", err) - } - - return nil, fmt.Errorf("couldn't get the data from any committee member") -} - -type signatureMsg struct { - addr common.Address - signature []byte - err error -} - -// PostSequenceElderberry submits batches and collects signatures from committee members. -func (d *Backend) PostSequenceElderberry(ctx context.Context, batchesData [][]byte) ([]byte, error) { - // Get current committee - committee, err := d.getCurrentDataCommittee() - if err != nil { - return nil, err - } - - // Authenticate as trusted sequencer by signing the sequence - sequence := make(daTypes.Sequence, 0, len(batchesData)) - for _, batchData := range batchesData { - sequence = append(sequence, batchData) - } - signedSequence, err := sequence.Sign(d.privKey) - if err != nil { - return nil, err - } - - // Request signatures to all members in parallel - ch := make(chan signatureMsg, len(committee.Members)) - signatureCtx, cancelSignatureCollection := context.WithCancel(ctx) - for _, member := range committee.Members { - signedSequenceElderberry := daTypes.SignedSequence{ - Sequence: sequence, - Signature: signedSequence, - } - go d.requestSignatureFromMember(signatureCtx, &signedSequenceElderberry, - func(c client.Client) ([]byte, error) { return c.SignSequence(ctx, signedSequenceElderberry) }, member, ch) - } - return d.collectSignatures(committee, ch, cancelSignatureCollection) -} - -// PostSequenceBanana submits a sequence to the data committee and collects the signed response from them. -func (d *Backend) PostSequenceBanana(ctx context.Context, sequence etherman.SequenceBanana) ([]byte, error) { - // Get current committee - committee, err := d.getCurrentDataCommittee() - if err != nil { - return nil, err - } - - sequenceBatches := make([]daTypes.Batch, 0, len(sequence.Batches)) - for _, batch := range sequence.Batches { - sequenceBatches = append(sequenceBatches, daTypes.Batch{ - L2Data: batch.L2Data, - Coinbase: batch.LastCoinbase, - ForcedBlockHashL1: batch.ForcedBlockHashL1, - ForcedGER: batch.ForcedGlobalExitRoot, - ForcedTimestamp: daTypes.ArgUint64(batch.ForcedBatchTimestamp), - }) - } - - sequenceBanana := daTypes.SequenceBanana{ - Batches: sequenceBatches, - OldAccInputHash: sequence.OldAccInputHash, - L1InfoRoot: sequence.L1InfoRoot, - MaxSequenceTimestamp: daTypes.ArgUint64(sequence.MaxSequenceTimestamp), - } - hashToSign := common.BytesToHash(sequenceBanana.HashToSign()) - if hashToSign != sequence.AccInputHash { - return nil, fmt.Errorf( - "calculated accInputHash diverges: DA = %s vs Seq = %s", - hashToSign, sequence.AccInputHash, - ) - } - - signature, err := sequenceBanana.Sign(d.privKey) - if err != nil { - return nil, err - } - - // Request signatures to all members in parallel - ch := make(chan signatureMsg, len(committee.Members)) - signatureCtx, cancelSignatureCollection := context.WithCancel(ctx) - for _, member := range committee.Members { - signedSequenceBanana := daTypes.SignedSequenceBanana{ - Sequence: sequenceBanana, - Signature: signature, - } - go d.requestSignatureFromMember(signatureCtx, - &signedSequenceBanana, - func(c client.Client) ([]byte, error) { return c.SignSequenceBanana(ctx, signedSequenceBanana) }, - member, ch) - } - - return d.collectSignatures(committee, ch, cancelSignatureCollection) -} - -func (d *Backend) collectSignatures( - committee *DataCommittee, ch chan signatureMsg, cancelSignatureCollection context.CancelFunc) ([]byte, error) { - // Collect signatures - // Stop requesting as soon as we have N valid signatures - var ( - msgs = make(signatureMsgs, 0, len(committee.Members)) - collectedSignatures uint64 - failedToCollect uint64 - ) - for collectedSignatures < committee.RequiredSignatures { - msg := <-ch - if msg.err != nil { - d.logger.Errorf("error when trying to get signature from %s: %s", msg.addr, msg.err) - failedToCollect++ - if len(committee.Members)-int(failedToCollect) < int(committee.RequiredSignatures) { - cancelSignatureCollection() - - return nil, errors.New("too many members failed to send their signature") - } - } else { - d.logger.Infof("received signature from %s", msg.addr) - collectedSignatures++ - } - msgs = append(msgs, msg) - } - - cancelSignatureCollection() - - return d.buildSignaturesAndAddrs(msgs, committee.Members), nil -} - -type funcSignType func(c client.Client) ([]byte, error) - -// funcSetSignatureType: is not possible to define a SetSignature function because -// the type daTypes.SequenceBanana and daTypes.Sequence belong to different packages -// So a future refactor is define a common interface for both -func (d *Backend) requestSignatureFromMember(ctx context.Context, signedSequence daTypes.SignedSequenceInterface, - funcSign funcSignType, - member DataCommitteeMember, ch chan signatureMsg) { - select { - case <-ctx.Done(): - return - default: - } - - // request - c := client.New(member.URL) - d.logger.Infof("sending request to sign the sequence to %s at %s", member.Addr.Hex(), member.URL) - // funcSign must call something like that c.SignSequenceBanana(ctx, signedSequence) - signature, err := funcSign(c) - - if err != nil { - ch <- signatureMsg{ - addr: member.Addr, - err: err, - } - - return - } - // verify returned signature - signedSequence.SetSignature(signature) - signer, err := signedSequence.Signer() - if err != nil { - ch <- signatureMsg{ - addr: member.Addr, - err: err, - } - - return - } - if signer != member.Addr { - ch <- signatureMsg{ - addr: member.Addr, - err: fmt.Errorf("invalid signer. Expected %s, actual %s", member.Addr.Hex(), signer.Hex()), - } - - return - } - ch <- signatureMsg{ - addr: member.Addr, - signature: signature, - } -} - -func (d *Backend) buildSignaturesAndAddrs(sigs signatureMsgs, members []DataCommitteeMember) []byte { - const ( - sigLen = 65 - ) - res := make([]byte, 0, len(sigs)*sigLen+len(members)*common.AddressLength) - sort.Sort(sigs) - for _, msg := range sigs { - d.logger.Debugf("adding signature %s from %s", common.Bytes2Hex(msg.signature), msg.addr.Hex()) - res = append(res, msg.signature...) - } - for _, member := range members { - d.logger.Debugf("adding addr %s", common.Bytes2Hex(member.Addr.Bytes())) - res = append(res, member.Addr.Bytes()...) - } - d.logger.Debugf("full res %s", common.Bytes2Hex(res)) - return res -} - -type signatureMsgs []signatureMsg - -func (s signatureMsgs) Len() int { return len(s) } -func (s signatureMsgs) Less(i, j int) bool { - return strings.ToUpper(s[i].addr.Hex()) < strings.ToUpper(s[j].addr.Hex()) -} -func (s signatureMsgs) Swap(i, j int) { s[i], s[j] = s[j], s[i] } - -// getCurrentDataCommittee return the currently registered data committee -func (d *Backend) getCurrentDataCommittee() (*DataCommittee, error) { - addrsHash, err := d.dataCommitteeContract.CommitteeHash(&bind.CallOpts{Pending: false}) - if err != nil { - return nil, fmt.Errorf("error getting CommitteeHash from L1 SC: %w", err) - } - - reqSign, err := d.dataCommitteeContract.RequiredAmountOfSignatures(&bind.CallOpts{Pending: false}) - if err != nil { - return nil, fmt.Errorf("error getting RequiredAmountOfSignatures from L1 SC: %w", err) - } - - members, err := d.getCurrentDataCommitteeMembers() - if err != nil { - return nil, err - } - - return &DataCommittee{ - AddressesHash: addrsHash, - RequiredSignatures: reqSign.Uint64(), - Members: members, - }, nil -} - -// getCurrentDataCommitteeMembers return the currently registered data committee members -func (d *Backend) getCurrentDataCommitteeMembers() ([]DataCommitteeMember, error) { - nMembers, err := d.dataCommitteeContract.GetAmountOfMembers(&bind.CallOpts{Pending: false}) - if err != nil { - return nil, fmt.Errorf("error getting GetAmountOfMembers from L1 SC: %w", err) - } - members := make([]DataCommitteeMember, 0, nMembers.Int64()) - for i := int64(0); i < nMembers.Int64(); i++ { - member, err := d.dataCommitteeContract.Members(&bind.CallOpts{Pending: false}, big.NewInt(i)) - if err != nil { - return nil, fmt.Errorf("error getting Members %d from L1 SC: %w", i, err) - } - if d.Translator != nil { - member.Url = d.Translator.Translate(translateContextName, member.Url) - } - members = append(members, DataCommitteeMember{ - Addr: member.Addr, - URL: member.Url, - }) - } - - return members, nil -} diff --git a/dataavailability/datacommittee/datacommittee_test.go b/dataavailability/datacommittee/datacommittee_test.go deleted file mode 100644 index 8931e6e8..00000000 --- a/dataavailability/datacommittee/datacommittee_test.go +++ /dev/null @@ -1,142 +0,0 @@ -package datacommittee - -import ( - "errors" - "math/big" - "testing" - - "github.com/0xPolygon/cdk-contracts-tooling/contracts/banana/polygondatacommittee" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/test/contracts/erc1967proxy" - "github.com/agglayer/aggkit/test/helpers" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" - "github.com/ethereum/go-ethereum/ethclient/simulated" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestUpdateDataCommitteeEvent(t *testing.T) { - // Set up testing environment - dac, ethBackend, da, auth := newSimulatedDACBackend(t) - - // Update the committee - requiredAmountOfSignatures := big.NewInt(2) - URLs := []string{"1", "2", "3"} - addrs := []common.Address{ - common.HexToAddress("0x1"), - common.HexToAddress("0x2"), - common.HexToAddress("0x3"), - } - addrsBytes := []byte{} - for _, addr := range addrs { - addrsBytes = append(addrsBytes, addr.Bytes()...) - } - _, err := da.SetupCommittee(auth, requiredAmountOfSignatures, URLs, addrsBytes) - require.NoError(t, err) - ethBackend.Commit() - - // Assert the committee update - actualSetup, err := dac.getCurrentDataCommittee() - require.NoError(t, err) - expectedMembers := []DataCommitteeMember{} - expectedSetup := DataCommittee{ - RequiredSignatures: uint64(len(URLs) - 1), - AddressesHash: crypto.Keccak256Hash(addrsBytes), - } - for i, url := range URLs { - expectedMembers = append(expectedMembers, DataCommitteeMember{ - URL: url, - Addr: addrs[i], - }) - } - expectedSetup.Members = expectedMembers - assert.Equal(t, expectedSetup, *actualSetup) -} - -func init() { - log.Init(log.Config{ - Level: "debug", - Outputs: []string{"stderr"}, - }) -} - -// NewSimulatedEtherman creates an etherman that uses a simulated blockchain. It's important to notice that the ChainID of the auth -// must be 1337. The address that holds the auth will have an initial balance of 10 ETH -func newSimulatedDACBackend(t *testing.T) ( - *Backend, - *simulated.Backend, - *polygondatacommittee.Polygondatacommittee, - *bind.TransactOpts, -) { - t.Helper() - - deployerAuth, err := helpers.CreateAccount(big.NewInt(1337)) - require.NoError(t, err) - - ethBackend, setup := helpers.NewSimulatedBackend(t, nil, deployerAuth) - - // DAC Setup - addr, _, _, err := polygondatacommittee.DeployPolygondatacommittee(setup.UserAuth, ethBackend.Client()) - require.NoError(t, err) - ethBackend.Commit() - - proxyAddr, err := deployDACProxy(setup.UserAuth, ethBackend.Client(), addr) - require.NoError(t, err) - ethBackend.Commit() - - da, err := polygondatacommittee.NewPolygondatacommittee(proxyAddr, ethBackend.Client()) - require.NoError(t, err) - - _, err = da.SetupCommittee(setup.UserAuth, big.NewInt(0), []string{}, []byte{}) - require.NoError(t, err) - ethBackend.Commit() - - c := &Backend{ - dataCommitteeContract: da, - } - - return c, ethBackend, da, setup.UserAuth -} - -func deployDACProxy(auth *bind.TransactOpts, client bind.ContractBackend, dacImpl common.Address) (common.Address, error) { - // Deploy proxy - dacABI, err := polygondatacommittee.PolygondatacommitteeMetaData.GetAbi() - if err != nil { - return common.Address{}, err - } - if dacABI == nil { - return common.Address{}, errors.New("GetABI returned nil") - } - initializeCallData, err := dacABI.Pack("initialize") - if err != nil { - return common.Address{}, err - } - proxyAddr, err := deployProxy( - auth, - client, - dacImpl, - initializeCallData, - ) - if err != nil { - return common.Address{}, err - } - log.Debugf("DAC proxy deployed at", proxyAddr) - - return proxyAddr, nil -} - -func deployProxy(auth *bind.TransactOpts, - client bind.ContractBackend, - implementationAddr common.Address, - initializeParams []byte) (common.Address, error) { - addr, _, _, err := erc1967proxy.DeployErc1967proxy( - auth, - client, - implementationAddr, - initializeParams, - ) - - return addr, err -} diff --git a/dataavailability/interfaces.go b/dataavailability/interfaces.go deleted file mode 100644 index 6ca62bab..00000000 --- a/dataavailability/interfaces.go +++ /dev/null @@ -1,54 +0,0 @@ -package dataavailability - -import ( - "context" - - "github.com/agglayer/aggkit/etherman" - "github.com/ethereum/go-ethereum/common" -) - -// DABackender is an interface for components that store and retrieve batch data -type DABackender interface { - SequenceRetriever - SequenceSender - // Init initializes the DABackend - Init() error -} - -// SequenceSender is used to send provided sequence of batches -type SequenceSender interface { - SequenceSenderElderberry - SequenceSenderBanana -} - -// SequenceSenderElderberry defines methods for sending sequence data to the data availability backend. -type SequenceSenderElderberry interface { - // PostSequence sends the sequence data to the data availability backend, and returns the dataAvailabilityMessage - // as expected by the contract - PostSequenceElderberry(ctx context.Context, batchesData [][]byte) ([]byte, error) -} - -// SequenceSenderBanana defines methods for sending sequence data to the data availability backend. -type SequenceSenderBanana interface { - // PostSequence sends the sequence data to the data availability backend, and returns the dataAvailabilityMessage - // as expected by the contract - PostSequenceBanana(ctx context.Context, sequence etherman.SequenceBanana) ([]byte, error) -} - -// SequenceRetriever is used to retrieve batch data -type SequenceRetriever interface { - // GetSequence retrieves the sequence data from the data availability backend - GetSequence(ctx context.Context, batchHashes []common.Hash, dataAvailabilityMessage []byte) ([][]byte, error) -} - -// BatchDataProvider is used to retrieve batch data -type BatchDataProvider interface { - // GetBatchL2Data retrieve the data of a batch from the DA backend. The returned data must be the pre-image of the hash - GetBatchL2Data(batchNum []uint64, batchHashes []common.Hash, dataAvailabilityMessage []byte) ([][]byte, error) -} - -// DataManager is an interface for components that send and retrieve batch data -type DataManager interface { - BatchDataProvider - SequenceSender -} diff --git a/dataavailability/mocks_da/batch_data_provider.go b/dataavailability/mocks_da/batch_data_provider.go deleted file mode 100644 index 36e782ac..00000000 --- a/dataavailability/mocks_da/batch_data_provider.go +++ /dev/null @@ -1,96 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_da - -import ( - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" -) - -// BatchDataProvider is an autogenerated mock type for the BatchDataProvider type -type BatchDataProvider struct { - mock.Mock -} - -type BatchDataProvider_Expecter struct { - mock *mock.Mock -} - -func (_m *BatchDataProvider) EXPECT() *BatchDataProvider_Expecter { - return &BatchDataProvider_Expecter{mock: &_m.Mock} -} - -// GetBatchL2Data provides a mock function with given fields: batchNum, batchHashes, dataAvailabilityMessage -func (_m *BatchDataProvider) GetBatchL2Data(batchNum []uint64, batchHashes []common.Hash, dataAvailabilityMessage []byte) ([][]byte, error) { - ret := _m.Called(batchNum, batchHashes, dataAvailabilityMessage) - - if len(ret) == 0 { - panic("no return value specified for GetBatchL2Data") - } - - var r0 [][]byte - var r1 error - if rf, ok := ret.Get(0).(func([]uint64, []common.Hash, []byte) ([][]byte, error)); ok { - return rf(batchNum, batchHashes, dataAvailabilityMessage) - } - if rf, ok := ret.Get(0).(func([]uint64, []common.Hash, []byte) [][]byte); ok { - r0 = rf(batchNum, batchHashes, dataAvailabilityMessage) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([][]byte) - } - } - - if rf, ok := ret.Get(1).(func([]uint64, []common.Hash, []byte) error); ok { - r1 = rf(batchNum, batchHashes, dataAvailabilityMessage) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// BatchDataProvider_GetBatchL2Data_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetBatchL2Data' -type BatchDataProvider_GetBatchL2Data_Call struct { - *mock.Call -} - -// GetBatchL2Data is a helper method to define mock.On call -// - batchNum []uint64 -// - batchHashes []common.Hash -// - dataAvailabilityMessage []byte -func (_e *BatchDataProvider_Expecter) GetBatchL2Data(batchNum interface{}, batchHashes interface{}, dataAvailabilityMessage interface{}) *BatchDataProvider_GetBatchL2Data_Call { - return &BatchDataProvider_GetBatchL2Data_Call{Call: _e.mock.On("GetBatchL2Data", batchNum, batchHashes, dataAvailabilityMessage)} -} - -func (_c *BatchDataProvider_GetBatchL2Data_Call) Run(run func(batchNum []uint64, batchHashes []common.Hash, dataAvailabilityMessage []byte)) *BatchDataProvider_GetBatchL2Data_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].([]uint64), args[1].([]common.Hash), args[2].([]byte)) - }) - return _c -} - -func (_c *BatchDataProvider_GetBatchL2Data_Call) Return(_a0 [][]byte, _a1 error) *BatchDataProvider_GetBatchL2Data_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *BatchDataProvider_GetBatchL2Data_Call) RunAndReturn(run func([]uint64, []common.Hash, []byte) ([][]byte, error)) *BatchDataProvider_GetBatchL2Data_Call { - _c.Call.Return(run) - return _c -} - -// NewBatchDataProvider creates a new instance of BatchDataProvider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewBatchDataProvider(t interface { - mock.TestingT - Cleanup(func()) -}) *BatchDataProvider { - mock := &BatchDataProvider{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/dataavailability/mocks_da/da_backender.go b/dataavailability/mocks_da/da_backender.go index d12e6b35..73c732c4 100644 --- a/dataavailability/mocks_da/da_backender.go +++ b/dataavailability/mocks_da/da_backender.go @@ -85,7 +85,7 @@ func (_c *DABackender_GetSequence_Call) RunAndReturn(run func(context.Context, [ return _c } -// Init provides a mock function with no fields +// Init provides a mock function with given fields: func (_m *DABackender) Init() error { ret := _m.Called() diff --git a/dataavailability/mocks_da/data_manager.go b/dataavailability/mocks_da/data_manager.go deleted file mode 100644 index f2ac6886..00000000 --- a/dataavailability/mocks_da/data_manager.go +++ /dev/null @@ -1,218 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_da - -import ( - context "context" - - common "github.com/ethereum/go-ethereum/common" - - etherman "github.com/agglayer/aggkit/etherman" - - mock "github.com/stretchr/testify/mock" -) - -// DataManager is an autogenerated mock type for the DataManager type -type DataManager struct { - mock.Mock -} - -type DataManager_Expecter struct { - mock *mock.Mock -} - -func (_m *DataManager) EXPECT() *DataManager_Expecter { - return &DataManager_Expecter{mock: &_m.Mock} -} - -// GetBatchL2Data provides a mock function with given fields: batchNum, batchHashes, dataAvailabilityMessage -func (_m *DataManager) GetBatchL2Data(batchNum []uint64, batchHashes []common.Hash, dataAvailabilityMessage []byte) ([][]byte, error) { - ret := _m.Called(batchNum, batchHashes, dataAvailabilityMessage) - - if len(ret) == 0 { - panic("no return value specified for GetBatchL2Data") - } - - var r0 [][]byte - var r1 error - if rf, ok := ret.Get(0).(func([]uint64, []common.Hash, []byte) ([][]byte, error)); ok { - return rf(batchNum, batchHashes, dataAvailabilityMessage) - } - if rf, ok := ret.Get(0).(func([]uint64, []common.Hash, []byte) [][]byte); ok { - r0 = rf(batchNum, batchHashes, dataAvailabilityMessage) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([][]byte) - } - } - - if rf, ok := ret.Get(1).(func([]uint64, []common.Hash, []byte) error); ok { - r1 = rf(batchNum, batchHashes, dataAvailabilityMessage) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// DataManager_GetBatchL2Data_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetBatchL2Data' -type DataManager_GetBatchL2Data_Call struct { - *mock.Call -} - -// GetBatchL2Data is a helper method to define mock.On call -// - batchNum []uint64 -// - batchHashes []common.Hash -// - dataAvailabilityMessage []byte -func (_e *DataManager_Expecter) GetBatchL2Data(batchNum interface{}, batchHashes interface{}, dataAvailabilityMessage interface{}) *DataManager_GetBatchL2Data_Call { - return &DataManager_GetBatchL2Data_Call{Call: _e.mock.On("GetBatchL2Data", batchNum, batchHashes, dataAvailabilityMessage)} -} - -func (_c *DataManager_GetBatchL2Data_Call) Run(run func(batchNum []uint64, batchHashes []common.Hash, dataAvailabilityMessage []byte)) *DataManager_GetBatchL2Data_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].([]uint64), args[1].([]common.Hash), args[2].([]byte)) - }) - return _c -} - -func (_c *DataManager_GetBatchL2Data_Call) Return(_a0 [][]byte, _a1 error) *DataManager_GetBatchL2Data_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *DataManager_GetBatchL2Data_Call) RunAndReturn(run func([]uint64, []common.Hash, []byte) ([][]byte, error)) *DataManager_GetBatchL2Data_Call { - _c.Call.Return(run) - return _c -} - -// PostSequenceBanana provides a mock function with given fields: ctx, sequence -func (_m *DataManager) PostSequenceBanana(ctx context.Context, sequence etherman.SequenceBanana) ([]byte, error) { - ret := _m.Called(ctx, sequence) - - if len(ret) == 0 { - panic("no return value specified for PostSequenceBanana") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, etherman.SequenceBanana) ([]byte, error)); ok { - return rf(ctx, sequence) - } - if rf, ok := ret.Get(0).(func(context.Context, etherman.SequenceBanana) []byte); ok { - r0 = rf(ctx, sequence) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, etherman.SequenceBanana) error); ok { - r1 = rf(ctx, sequence) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// DataManager_PostSequenceBanana_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PostSequenceBanana' -type DataManager_PostSequenceBanana_Call struct { - *mock.Call -} - -// PostSequenceBanana is a helper method to define mock.On call -// - ctx context.Context -// - sequence etherman.SequenceBanana -func (_e *DataManager_Expecter) PostSequenceBanana(ctx interface{}, sequence interface{}) *DataManager_PostSequenceBanana_Call { - return &DataManager_PostSequenceBanana_Call{Call: _e.mock.On("PostSequenceBanana", ctx, sequence)} -} - -func (_c *DataManager_PostSequenceBanana_Call) Run(run func(ctx context.Context, sequence etherman.SequenceBanana)) *DataManager_PostSequenceBanana_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(etherman.SequenceBanana)) - }) - return _c -} - -func (_c *DataManager_PostSequenceBanana_Call) Return(_a0 []byte, _a1 error) *DataManager_PostSequenceBanana_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *DataManager_PostSequenceBanana_Call) RunAndReturn(run func(context.Context, etherman.SequenceBanana) ([]byte, error)) *DataManager_PostSequenceBanana_Call { - _c.Call.Return(run) - return _c -} - -// PostSequenceElderberry provides a mock function with given fields: ctx, batchesData -func (_m *DataManager) PostSequenceElderberry(ctx context.Context, batchesData [][]byte) ([]byte, error) { - ret := _m.Called(ctx, batchesData) - - if len(ret) == 0 { - panic("no return value specified for PostSequenceElderberry") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, [][]byte) ([]byte, error)); ok { - return rf(ctx, batchesData) - } - if rf, ok := ret.Get(0).(func(context.Context, [][]byte) []byte); ok { - r0 = rf(ctx, batchesData) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, [][]byte) error); ok { - r1 = rf(ctx, batchesData) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// DataManager_PostSequenceElderberry_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PostSequenceElderberry' -type DataManager_PostSequenceElderberry_Call struct { - *mock.Call -} - -// PostSequenceElderberry is a helper method to define mock.On call -// - ctx context.Context -// - batchesData [][]byte -func (_e *DataManager_Expecter) PostSequenceElderberry(ctx interface{}, batchesData interface{}) *DataManager_PostSequenceElderberry_Call { - return &DataManager_PostSequenceElderberry_Call{Call: _e.mock.On("PostSequenceElderberry", ctx, batchesData)} -} - -func (_c *DataManager_PostSequenceElderberry_Call) Run(run func(ctx context.Context, batchesData [][]byte)) *DataManager_PostSequenceElderberry_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([][]byte)) - }) - return _c -} - -func (_c *DataManager_PostSequenceElderberry_Call) Return(_a0 []byte, _a1 error) *DataManager_PostSequenceElderberry_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *DataManager_PostSequenceElderberry_Call) RunAndReturn(run func(context.Context, [][]byte) ([]byte, error)) *DataManager_PostSequenceElderberry_Call { - _c.Call.Return(run) - return _c -} - -// NewDataManager creates a new instance of DataManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewDataManager(t interface { - mock.TestingT - Cleanup(func()) -}) *DataManager { - mock := &DataManager{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/dataavailability/mocks_da/func_sign_type.go b/dataavailability/mocks_da/func_sign_type.go deleted file mode 100644 index 6a343269..00000000 --- a/dataavailability/mocks_da/func_sign_type.go +++ /dev/null @@ -1,94 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_da - -import ( - client "github.com/0xPolygon/cdk-data-availability/client" - - mock "github.com/stretchr/testify/mock" -) - -// FuncSignType is an autogenerated mock type for the funcSignType type -type FuncSignType struct { - mock.Mock -} - -type FuncSignType_Expecter struct { - mock *mock.Mock -} - -func (_m *FuncSignType) EXPECT() *FuncSignType_Expecter { - return &FuncSignType_Expecter{mock: &_m.Mock} -} - -// Execute provides a mock function with given fields: c -func (_m *FuncSignType) Execute(c client.Client) ([]byte, error) { - ret := _m.Called(c) - - if len(ret) == 0 { - panic("no return value specified for Execute") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(client.Client) ([]byte, error)); ok { - return rf(c) - } - if rf, ok := ret.Get(0).(func(client.Client) []byte); ok { - r0 = rf(c) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(client.Client) error); ok { - r1 = rf(c) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FuncSignType_Execute_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Execute' -type FuncSignType_Execute_Call struct { - *mock.Call -} - -// Execute is a helper method to define mock.On call -// - c client.Client -func (_e *FuncSignType_Expecter) Execute(c interface{}) *FuncSignType_Execute_Call { - return &FuncSignType_Execute_Call{Call: _e.mock.On("Execute", c)} -} - -func (_c *FuncSignType_Execute_Call) Run(run func(c client.Client)) *FuncSignType_Execute_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(client.Client)) - }) - return _c -} - -func (_c *FuncSignType_Execute_Call) Return(_a0 []byte, _a1 error) *FuncSignType_Execute_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *FuncSignType_Execute_Call) RunAndReturn(run func(client.Client) ([]byte, error)) *FuncSignType_Execute_Call { - _c.Call.Return(run) - return _c -} - -// NewFuncSignType creates a new instance of FuncSignType. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewFuncSignType(t interface { - mock.TestingT - Cleanup(func()) -}) *FuncSignType { - mock := &FuncSignType{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/dataavailability/mocks_da/sequence_retriever.go b/dataavailability/mocks_da/sequence_retriever.go deleted file mode 100644 index f82d9a70..00000000 --- a/dataavailability/mocks_da/sequence_retriever.go +++ /dev/null @@ -1,98 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_da - -import ( - context "context" - - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" -) - -// SequenceRetriever is an autogenerated mock type for the SequenceRetriever type -type SequenceRetriever struct { - mock.Mock -} - -type SequenceRetriever_Expecter struct { - mock *mock.Mock -} - -func (_m *SequenceRetriever) EXPECT() *SequenceRetriever_Expecter { - return &SequenceRetriever_Expecter{mock: &_m.Mock} -} - -// GetSequence provides a mock function with given fields: ctx, batchHashes, dataAvailabilityMessage -func (_m *SequenceRetriever) GetSequence(ctx context.Context, batchHashes []common.Hash, dataAvailabilityMessage []byte) ([][]byte, error) { - ret := _m.Called(ctx, batchHashes, dataAvailabilityMessage) - - if len(ret) == 0 { - panic("no return value specified for GetSequence") - } - - var r0 [][]byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []common.Hash, []byte) ([][]byte, error)); ok { - return rf(ctx, batchHashes, dataAvailabilityMessage) - } - if rf, ok := ret.Get(0).(func(context.Context, []common.Hash, []byte) [][]byte); ok { - r0 = rf(ctx, batchHashes, dataAvailabilityMessage) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([][]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []common.Hash, []byte) error); ok { - r1 = rf(ctx, batchHashes, dataAvailabilityMessage) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SequenceRetriever_GetSequence_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSequence' -type SequenceRetriever_GetSequence_Call struct { - *mock.Call -} - -// GetSequence is a helper method to define mock.On call -// - ctx context.Context -// - batchHashes []common.Hash -// - dataAvailabilityMessage []byte -func (_e *SequenceRetriever_Expecter) GetSequence(ctx interface{}, batchHashes interface{}, dataAvailabilityMessage interface{}) *SequenceRetriever_GetSequence_Call { - return &SequenceRetriever_GetSequence_Call{Call: _e.mock.On("GetSequence", ctx, batchHashes, dataAvailabilityMessage)} -} - -func (_c *SequenceRetriever_GetSequence_Call) Run(run func(ctx context.Context, batchHashes []common.Hash, dataAvailabilityMessage []byte)) *SequenceRetriever_GetSequence_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]common.Hash), args[2].([]byte)) - }) - return _c -} - -func (_c *SequenceRetriever_GetSequence_Call) Return(_a0 [][]byte, _a1 error) *SequenceRetriever_GetSequence_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SequenceRetriever_GetSequence_Call) RunAndReturn(run func(context.Context, []common.Hash, []byte) ([][]byte, error)) *SequenceRetriever_GetSequence_Call { - _c.Call.Return(run) - return _c -} - -// NewSequenceRetriever creates a new instance of SequenceRetriever. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewSequenceRetriever(t interface { - mock.TestingT - Cleanup(func()) -}) *SequenceRetriever { - mock := &SequenceRetriever{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/dataavailability/mocks_da/sequence_sender.go b/dataavailability/mocks_da/sequence_sender.go deleted file mode 100644 index 86a3ff52..00000000 --- a/dataavailability/mocks_da/sequence_sender.go +++ /dev/null @@ -1,156 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_da - -import ( - context "context" - - etherman "github.com/agglayer/aggkit/etherman" - - mock "github.com/stretchr/testify/mock" -) - -// SequenceSender is an autogenerated mock type for the SequenceSender type -type SequenceSender struct { - mock.Mock -} - -type SequenceSender_Expecter struct { - mock *mock.Mock -} - -func (_m *SequenceSender) EXPECT() *SequenceSender_Expecter { - return &SequenceSender_Expecter{mock: &_m.Mock} -} - -// PostSequenceBanana provides a mock function with given fields: ctx, sequence -func (_m *SequenceSender) PostSequenceBanana(ctx context.Context, sequence etherman.SequenceBanana) ([]byte, error) { - ret := _m.Called(ctx, sequence) - - if len(ret) == 0 { - panic("no return value specified for PostSequenceBanana") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, etherman.SequenceBanana) ([]byte, error)); ok { - return rf(ctx, sequence) - } - if rf, ok := ret.Get(0).(func(context.Context, etherman.SequenceBanana) []byte); ok { - r0 = rf(ctx, sequence) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, etherman.SequenceBanana) error); ok { - r1 = rf(ctx, sequence) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SequenceSender_PostSequenceBanana_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PostSequenceBanana' -type SequenceSender_PostSequenceBanana_Call struct { - *mock.Call -} - -// PostSequenceBanana is a helper method to define mock.On call -// - ctx context.Context -// - sequence etherman.SequenceBanana -func (_e *SequenceSender_Expecter) PostSequenceBanana(ctx interface{}, sequence interface{}) *SequenceSender_PostSequenceBanana_Call { - return &SequenceSender_PostSequenceBanana_Call{Call: _e.mock.On("PostSequenceBanana", ctx, sequence)} -} - -func (_c *SequenceSender_PostSequenceBanana_Call) Run(run func(ctx context.Context, sequence etherman.SequenceBanana)) *SequenceSender_PostSequenceBanana_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(etherman.SequenceBanana)) - }) - return _c -} - -func (_c *SequenceSender_PostSequenceBanana_Call) Return(_a0 []byte, _a1 error) *SequenceSender_PostSequenceBanana_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SequenceSender_PostSequenceBanana_Call) RunAndReturn(run func(context.Context, etherman.SequenceBanana) ([]byte, error)) *SequenceSender_PostSequenceBanana_Call { - _c.Call.Return(run) - return _c -} - -// PostSequenceElderberry provides a mock function with given fields: ctx, batchesData -func (_m *SequenceSender) PostSequenceElderberry(ctx context.Context, batchesData [][]byte) ([]byte, error) { - ret := _m.Called(ctx, batchesData) - - if len(ret) == 0 { - panic("no return value specified for PostSequenceElderberry") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, [][]byte) ([]byte, error)); ok { - return rf(ctx, batchesData) - } - if rf, ok := ret.Get(0).(func(context.Context, [][]byte) []byte); ok { - r0 = rf(ctx, batchesData) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, [][]byte) error); ok { - r1 = rf(ctx, batchesData) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SequenceSender_PostSequenceElderberry_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PostSequenceElderberry' -type SequenceSender_PostSequenceElderberry_Call struct { - *mock.Call -} - -// PostSequenceElderberry is a helper method to define mock.On call -// - ctx context.Context -// - batchesData [][]byte -func (_e *SequenceSender_Expecter) PostSequenceElderberry(ctx interface{}, batchesData interface{}) *SequenceSender_PostSequenceElderberry_Call { - return &SequenceSender_PostSequenceElderberry_Call{Call: _e.mock.On("PostSequenceElderberry", ctx, batchesData)} -} - -func (_c *SequenceSender_PostSequenceElderberry_Call) Run(run func(ctx context.Context, batchesData [][]byte)) *SequenceSender_PostSequenceElderberry_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([][]byte)) - }) - return _c -} - -func (_c *SequenceSender_PostSequenceElderberry_Call) Return(_a0 []byte, _a1 error) *SequenceSender_PostSequenceElderberry_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SequenceSender_PostSequenceElderberry_Call) RunAndReturn(run func(context.Context, [][]byte) ([]byte, error)) *SequenceSender_PostSequenceElderberry_Call { - _c.Call.Return(run) - return _c -} - -// NewSequenceSender creates a new instance of SequenceSender. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewSequenceSender(t interface { - mock.TestingT - Cleanup(func()) -}) *SequenceSender { - mock := &SequenceSender{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/dataavailability/mocks_da/sequence_sender_banana.go b/dataavailability/mocks_da/sequence_sender_banana.go deleted file mode 100644 index e6da586e..00000000 --- a/dataavailability/mocks_da/sequence_sender_banana.go +++ /dev/null @@ -1,97 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_da - -import ( - context "context" - - etherman "github.com/agglayer/aggkit/etherman" - - mock "github.com/stretchr/testify/mock" -) - -// SequenceSenderBanana is an autogenerated mock type for the SequenceSenderBanana type -type SequenceSenderBanana struct { - mock.Mock -} - -type SequenceSenderBanana_Expecter struct { - mock *mock.Mock -} - -func (_m *SequenceSenderBanana) EXPECT() *SequenceSenderBanana_Expecter { - return &SequenceSenderBanana_Expecter{mock: &_m.Mock} -} - -// PostSequenceBanana provides a mock function with given fields: ctx, sequence -func (_m *SequenceSenderBanana) PostSequenceBanana(ctx context.Context, sequence etherman.SequenceBanana) ([]byte, error) { - ret := _m.Called(ctx, sequence) - - if len(ret) == 0 { - panic("no return value specified for PostSequenceBanana") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, etherman.SequenceBanana) ([]byte, error)); ok { - return rf(ctx, sequence) - } - if rf, ok := ret.Get(0).(func(context.Context, etherman.SequenceBanana) []byte); ok { - r0 = rf(ctx, sequence) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, etherman.SequenceBanana) error); ok { - r1 = rf(ctx, sequence) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SequenceSenderBanana_PostSequenceBanana_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PostSequenceBanana' -type SequenceSenderBanana_PostSequenceBanana_Call struct { - *mock.Call -} - -// PostSequenceBanana is a helper method to define mock.On call -// - ctx context.Context -// - sequence etherman.SequenceBanana -func (_e *SequenceSenderBanana_Expecter) PostSequenceBanana(ctx interface{}, sequence interface{}) *SequenceSenderBanana_PostSequenceBanana_Call { - return &SequenceSenderBanana_PostSequenceBanana_Call{Call: _e.mock.On("PostSequenceBanana", ctx, sequence)} -} - -func (_c *SequenceSenderBanana_PostSequenceBanana_Call) Run(run func(ctx context.Context, sequence etherman.SequenceBanana)) *SequenceSenderBanana_PostSequenceBanana_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(etherman.SequenceBanana)) - }) - return _c -} - -func (_c *SequenceSenderBanana_PostSequenceBanana_Call) Return(_a0 []byte, _a1 error) *SequenceSenderBanana_PostSequenceBanana_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SequenceSenderBanana_PostSequenceBanana_Call) RunAndReturn(run func(context.Context, etherman.SequenceBanana) ([]byte, error)) *SequenceSenderBanana_PostSequenceBanana_Call { - _c.Call.Return(run) - return _c -} - -// NewSequenceSenderBanana creates a new instance of SequenceSenderBanana. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewSequenceSenderBanana(t interface { - mock.TestingT - Cleanup(func()) -}) *SequenceSenderBanana { - mock := &SequenceSenderBanana{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/dataavailability/mocks_da/sequence_sender_elderberry.go b/dataavailability/mocks_da/sequence_sender_elderberry.go deleted file mode 100644 index 3816fa1b..00000000 --- a/dataavailability/mocks_da/sequence_sender_elderberry.go +++ /dev/null @@ -1,95 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_da - -import ( - context "context" - - mock "github.com/stretchr/testify/mock" -) - -// SequenceSenderElderberry is an autogenerated mock type for the SequenceSenderElderberry type -type SequenceSenderElderberry struct { - mock.Mock -} - -type SequenceSenderElderberry_Expecter struct { - mock *mock.Mock -} - -func (_m *SequenceSenderElderberry) EXPECT() *SequenceSenderElderberry_Expecter { - return &SequenceSenderElderberry_Expecter{mock: &_m.Mock} -} - -// PostSequenceElderberry provides a mock function with given fields: ctx, batchesData -func (_m *SequenceSenderElderberry) PostSequenceElderberry(ctx context.Context, batchesData [][]byte) ([]byte, error) { - ret := _m.Called(ctx, batchesData) - - if len(ret) == 0 { - panic("no return value specified for PostSequenceElderberry") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, [][]byte) ([]byte, error)); ok { - return rf(ctx, batchesData) - } - if rf, ok := ret.Get(0).(func(context.Context, [][]byte) []byte); ok { - r0 = rf(ctx, batchesData) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, [][]byte) error); ok { - r1 = rf(ctx, batchesData) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// SequenceSenderElderberry_PostSequenceElderberry_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PostSequenceElderberry' -type SequenceSenderElderberry_PostSequenceElderberry_Call struct { - *mock.Call -} - -// PostSequenceElderberry is a helper method to define mock.On call -// - ctx context.Context -// - batchesData [][]byte -func (_e *SequenceSenderElderberry_Expecter) PostSequenceElderberry(ctx interface{}, batchesData interface{}) *SequenceSenderElderberry_PostSequenceElderberry_Call { - return &SequenceSenderElderberry_PostSequenceElderberry_Call{Call: _e.mock.On("PostSequenceElderberry", ctx, batchesData)} -} - -func (_c *SequenceSenderElderberry_PostSequenceElderberry_Call) Run(run func(ctx context.Context, batchesData [][]byte)) *SequenceSenderElderberry_PostSequenceElderberry_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([][]byte)) - }) - return _c -} - -func (_c *SequenceSenderElderberry_PostSequenceElderberry_Call) Return(_a0 []byte, _a1 error) *SequenceSenderElderberry_PostSequenceElderberry_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *SequenceSenderElderberry_PostSequenceElderberry_Call) RunAndReturn(run func(context.Context, [][]byte) ([]byte, error)) *SequenceSenderElderberry_PostSequenceElderberry_Call { - _c.Call.Return(run) - return _c -} - -// NewSequenceSenderElderberry creates a new instance of SequenceSenderElderberry. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewSequenceSenderElderberry(t interface { - mock.TestingT - Cleanup(func()) -}) *SequenceSenderElderberry { - mock := &SequenceSenderElderberry{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/etherman/aggregator.go b/etherman/aggregator.go deleted file mode 100644 index 5e1192ec..00000000 --- a/etherman/aggregator.go +++ /dev/null @@ -1,130 +0,0 @@ -package etherman - -import ( - "context" - "encoding/hex" - "errors" - "fmt" - "math/big" - "strings" - - ethmanTypes "github.com/agglayer/aggkit/aggregator/ethmantypes" - "github.com/agglayer/aggkit/log" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" -) - -// BuildTrustedVerifyBatchesTxData builds a []bytes to be sent to the PoE SC method TrustedVerifyBatches. -func (etherMan *Client) BuildTrustedVerifyBatchesTxData( - lastVerifiedBatch, newVerifiedBatch uint64, inputs *ethmanTypes.FinalProofInputs, beneficiary common.Address, -) (to *common.Address, data []byte, err error) { - opts, err := etherMan.generateRandomAuth() - if err != nil { - return nil, nil, fmt.Errorf("failed to build trusted verify batches, err: %w", err) - } - opts.NoSend = true - // force nonce, gas limit and gas price to avoid querying it from the chain - opts.Nonce = big.NewInt(1) - opts.GasLimit = uint64(1) - opts.GasPrice = big.NewInt(1) - - var newLocalExitRoot [32]byte - copy(newLocalExitRoot[:], inputs.NewLocalExitRoot) - - var newStateRoot [32]byte - copy(newStateRoot[:], inputs.NewStateRoot) - - proof, err := convertProof(inputs.FinalProof.Proof) - if err != nil { - log.Errorf("error converting proof. Error: %v, Proof: %s", err, inputs.FinalProof.Proof) - - return nil, nil, err - } - - const pendStateNum = 0 // TODO hardcoded for now until we implement the pending state feature - - tx, err := etherMan.Contracts.Banana.RollupManager.VerifyBatchesTrustedAggregator( - &opts, - etherMan.RollupID, - pendStateNum, - lastVerifiedBatch, - newVerifiedBatch, - newLocalExitRoot, - newStateRoot, - beneficiary, - proof, - ) - if err != nil { - if parsedErr, ok := TryParseError(err); ok { - err = parsedErr - } - - return nil, nil, err - } - - return tx.To(), tx.Data(), nil -} - -// GetBatchAccInputHash gets the batch accumulated input hash from the ethereum -func (etherMan *Client) GetBatchAccInputHash(ctx context.Context, batchNumber uint64) (common.Hash, error) { - rollupData, err := etherMan.Contracts.Banana.RollupManager.GetRollupSequencedBatches( - &bind.CallOpts{Pending: false}, etherMan.RollupID, batchNumber, - ) - if err != nil { - return common.Hash{}, err - } - - return rollupData.AccInputHash, nil -} - -// GetRollupId returns the rollup id -func (etherMan *Client) GetRollupId() uint32 { //nolint:stylecheck - return etherMan.RollupID -} - -// generateRandomAuth generates an authorization instance from a -// randomly generated private key to be used to estimate gas for PoE -// operations NOT restricted to the Trusted Sequencer -func (etherMan *Client) generateRandomAuth() (bind.TransactOpts, error) { - privateKey, err := crypto.GenerateKey() - if err != nil { - return bind.TransactOpts{}, errors.New("failed to generate a private key to estimate L1 txs") - } - chainID := big.NewInt(0).SetUint64(etherMan.l1Cfg.L1ChainID) - auth, err := bind.NewKeyedTransactorWithChainID(privateKey, chainID) - if err != nil { - return bind.TransactOpts{}, errors.New("failed to generate a fake authorization to estimate L1 txs") - } - - return *auth, nil -} - -func convertProof(p string) ([24][32]byte, error) { - if len(p) != 24*32*2+2 { - return [24][32]byte{}, fmt.Errorf("invalid proof length. Length: %d", len(p)) - } - p = strings.TrimPrefix(p, "0x") - proof := [24][32]byte{} - for i := 0; i < 24; i++ { - data := p[i*64 : (i+1)*64] - p, err := DecodeBytes(&data) - if err != nil { - return [24][32]byte{}, fmt.Errorf("failed to decode proof, err: %w", err) - } - var aux [32]byte - copy(aux[:], p) - proof[i] = aux - } - - return proof, nil -} - -// DecodeBytes decodes a hex string into a []byte -func DecodeBytes(val *string) ([]byte, error) { - if val == nil { - return []byte{}, nil - } - - return hex.DecodeString(strings.TrimPrefix(*val, "0x")) -} diff --git a/go.mod b/go.mod index 785acaa5..8ffa0c0e 100644 --- a/go.mod +++ b/go.mod @@ -3,17 +3,14 @@ module github.com/agglayer/aggkit go 1.22.4 require ( - github.com/0xPolygon/cdk-contracts-tooling v0.0.1 - github.com/0xPolygon/cdk-data-availability v0.0.11 + github.com/0xPolygon/cdk-contracts-tooling v0.0.2-0.20241225094934-1d381f5703ef github.com/0xPolygon/cdk-rpc v0.0.0-20241004114257-6c3cb6eebfb6 github.com/0xPolygon/zkevm-ethtx-manager v0.2.1 - github.com/0xPolygonHermez/zkevm-synchronizer-l1 v1.0.6 github.com/ethereum/go-ethereum v1.14.8 github.com/golang-collections/collections v0.0.0-20130729185459-604e922904d3 github.com/hermeznetwork/tracerr v0.3.2 github.com/iden3/go-iden3-crypto v0.0.17 github.com/invopop/jsonschema v0.12.0 - github.com/jackc/pgx/v4 v4.18.3 github.com/knadh/koanf/parsers/json v0.1.0 github.com/knadh/koanf/parsers/toml v0.1.0 github.com/knadh/koanf/providers/rawbytes v0.1.0 @@ -33,13 +30,12 @@ require ( golang.org/x/crypto v0.27.0 golang.org/x/net v0.29.0 golang.org/x/sync v0.9.0 - google.golang.org/grpc v1.64.0 google.golang.org/protobuf v1.34.2 modernc.org/sqlite v1.32.0 ) require ( - github.com/0xPolygon/cdk v0.1.0 // indirect + github.com/0xPolygonHermez/zkevm-synchronizer-l1 v1.0.6 // indirect github.com/DataDog/zstd v1.5.6 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/StackExchange/wmi v1.2.1 // indirect @@ -90,14 +86,6 @@ require ( github.com/holiman/bloomfilter/v2 v2.0.3 // indirect github.com/holiman/uint256 v1.3.1 // indirect github.com/huin/goupnp v1.3.0 // indirect - github.com/jackc/chunkreader/v2 v2.0.1 // indirect - github.com/jackc/pgconn v1.14.3 // indirect - github.com/jackc/pgio v1.0.0 // indirect - github.com/jackc/pgpassfile v1.0.0 // indirect - github.com/jackc/pgproto3/v2 v2.3.3 // indirect - github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect - github.com/jackc/pgtype v1.14.0 // indirect - github.com/jackc/puddle v1.3.0 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jmoiron/sqlx v1.2.0 // indirect github.com/klauspost/compress v1.17.9 // indirect @@ -110,7 +98,6 @@ require ( github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect - github.com/miguelmota/go-solidity-sha3 v0.1.1 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/pointerstructure v1.2.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect @@ -118,6 +105,7 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect + github.com/onsi/gomega v1.27.10 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect @@ -154,7 +142,6 @@ require ( golang.org/x/sys v0.25.0 // indirect golang.org/x/text v0.18.0 // indirect golang.org/x/time v0.5.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index e9d89463..aec6d2d7 100644 --- a/go.sum +++ b/go.sum @@ -1,21 +1,13 @@ -github.com/0xPolygon/cdk v0.1.0 h1:gS5nLprJV1E48R6+fsTYWL2LrzFHK2ym+GNDnCXTuJk= -github.com/0xPolygon/cdk v0.1.0/go.mod h1:p4gFppiLHwouh77OQU3Z7iHrkkWp77FHqzQXrddUp8s= -github.com/0xPolygon/cdk-contracts-tooling v0.0.1 h1:2HH8KpO1CZRl1zHfn0IYwJhPA7l91DOWrjdExmaB9Kk= -github.com/0xPolygon/cdk-contracts-tooling v0.0.1/go.mod h1:mFlcEjsm2YBBsu8atHJ3zyVnwM+Z/fMXpVmIJge+WVU= -github.com/0xPolygon/cdk-data-availability v0.0.11 h1:enmlyFYCvmDmcX/2fnDjWnn3svqqm9o2Fe+Kupoykdo= -github.com/0xPolygon/cdk-data-availability v0.0.11/go.mod h1:20WaXcSp7ggoxWePL9ReKSuqksHUx5h8LNQ+b56OHJE= +github.com/0xPolygon/cdk-contracts-tooling v0.0.2-0.20241225094934-1d381f5703ef h1:DRBrbysjMTyeFRbyo+zoltOTET+vR20CnXc4wupj+qo= +github.com/0xPolygon/cdk-contracts-tooling v0.0.2-0.20241225094934-1d381f5703ef/go.mod h1:mFlcEjsm2YBBsu8atHJ3zyVnwM+Z/fMXpVmIJge+WVU= github.com/0xPolygon/cdk-rpc v0.0.0-20241004114257-6c3cb6eebfb6 h1:FXL/rcO7/GtZ3kRFw+C7J6vmGnl8gcazg+Gh/NVmnas= github.com/0xPolygon/cdk-rpc v0.0.0-20241004114257-6c3cb6eebfb6/go.mod h1:2scWqMMufrQXu7TikDgQ3BsyaKoX8qP26D6E262vSOg= github.com/0xPolygon/zkevm-ethtx-manager v0.2.1 h1:2Yb+KdJFMpVrS9LIkd658XiWuN+MCTs7SgeWaopXScg= github.com/0xPolygon/zkevm-ethtx-manager v0.2.1/go.mod h1:lqQmzSo2OXEZItD0R4Cd+lqKFxphXEWgqHefVcGDZZc= github.com/0xPolygonHermez/zkevm-synchronizer-l1 v1.0.6 h1:+XsCHXvQezRdMnkI37Wa/nV4sOZshJavxNzRpH/R6dw= github.com/0xPolygonHermez/zkevm-synchronizer-l1 v1.0.6/go.mod h1:X4Su/M/+hSISqdl9yomKlRsbTyuZHsRohporyHsP8gg= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/DataDog/zstd v1.5.6 h1:LbEglqepa/ipmmQJUDnSsfvA8e8IStVcGaFWDuxvGOY= github.com/DataDog/zstd v1.5.6/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= -github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/StackExchange/wmi v1.2.1 h1:VIkavFPXSjcnS+O8yTq7NI32k0R5Aj+v39y29VYDOSA= @@ -45,8 +37,6 @@ github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= -github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4= github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= github.com/cockroachdb/errors v1.11.3 h1:5bA+k2Y6r+oz/6Z/RFlNeVCesGARKuC6YymtcDrbC/I= @@ -65,15 +55,12 @@ github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/Yj github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= github.com/consensys/gnark-crypto v0.13.0 h1:VPULb/v6bbYELAPTDFINEVaMTTybV5GLxDdcjnS+4oc= github.com/consensys/gnark-crypto v0.13.0/go.mod h1:wKqwsieaKPThcFkHe0d0zMsbHEUWFmZcG7KBCse210o= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/crate-crypto/go-ipa v0.0.0-20240724233137-53bbb0ceb27a h1:W8mUrRp6NOVl3J+MYp5kPMoUZPp7aOYHtaua31lwRHg= github.com/crate-crypto/go-ipa v0.0.0-20240724233137-53bbb0ceb27a/go.mod h1:sTwzHBvIzm2RfVCGNEBZgRyjwK40bVoun3ZnGOCafNM= github.com/crate-crypto/go-kzg-4844 v1.1.0 h1:EN/u9k2TF6OWSHrCCDBBU6GLNMq88OspHHlMnHfoyU4= github.com/crate-crypto/go-kzg-4844 v1.1.0/go.mod h1:JolLjpSff1tCCJKaJx4psrlEdlXuJEC996PL3tTAFks= -github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -110,8 +97,6 @@ github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxI github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-gorp/gorp/v3 v3.1.0 h1:ItKF/Vbuj31dmV4jxA1qblpSwkl9g1typ24xoe70IGs= github.com/go-gorp/gorp/v3 v3.1.0/go.mod h1:dLEjIyyRNiXvNZ8PSmzpt1GsWAUK8kjVhEpjH8TixEw= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -125,14 +110,11 @@ github.com/go-pkgz/expirable-cache v0.0.3/go.mod h1:+IauqN00R2FqNRLCLA+X5YljQJrw github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 h1:TQcrn6Wq+sKGkpyPvppOz99zsMBaUOKXq6HSv655U1c= github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= -github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= -github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-collections/collections v0.0.0-20130729185459-604e922904d3 h1:zN2lZNZRflqFyxVaTIU61KNKQ9C0055u9CAfpmqUvo4= @@ -162,7 +144,6 @@ github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/ github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo= github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -190,55 +171,6 @@ github.com/iden3/go-iden3-crypto v0.0.17 h1:NdkceRLJo/pI4UpcjVah4lN/a3yzxRUGXqxb github.com/iden3/go-iden3-crypto v0.0.17/go.mod h1:dLpM4vEPJ3nDHzhWFXDjzkn1qHoBeOT/3UEhXsEsP3E= github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI= github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= -github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= -github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= -github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= -github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= -github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= -github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= -github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= -github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= -github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= -github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= -github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= -github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= -github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= -github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= -github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= -github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= -github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= -github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= -github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag= -github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= -github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= -github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= -github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= -github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= -github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= -github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= -github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw= -github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= -github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= -github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= -github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= -github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= -github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= -github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= -github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0= -github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus= github.com/jackpal/go-nat-pmp v1.0.2/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA= @@ -258,13 +190,10 @@ github.com/knadh/koanf/providers/rawbytes v0.1.0 h1:dpzgu2KO6uf6oCb4aP05KDmKmAmI github.com/knadh/koanf/providers/rawbytes v0.1.0/go.mod h1:mMTB1/IcJ/yE++A2iEZbY1MLygX7vttU+C+S/YmPu9c= github.com/knadh/koanf/v2 v2.1.1 h1:/R8eXqasSTsmDCsAyYj+81Wteg8AqrV9CP6gvsTsOmM= github.com/knadh/koanf/v2 v2.1.1/go.mod h1:4mnTRbZCK+ALuBXHZMjDfG9y714L7TykVnZkXbMU3Es= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= @@ -273,9 +202,6 @@ github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+ github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= @@ -285,13 +211,8 @@ github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0V github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= @@ -302,8 +223,6 @@ github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOq github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.23 h1:gbShiuAP1W5j9UOksQ06aiiqPMxYecovVGwmTxWtuw0= github.com/mattn/go-sqlite3 v1.14.23/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= -github.com/miguelmota/go-solidity-sha3 v0.1.1 h1:3Y08sKZDtudtE5kbTBPC9RYJznoSYyWI9VD6mghU0CA= -github.com/miguelmota/go-solidity-sha3 v0.1.1/go.mod h1:sax1FvQF+f71j8W1uUHMZn8NxKyl5rYLks2nqj8RFEw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= @@ -345,7 +264,6 @@ github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -365,15 +283,11 @@ github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94 github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= -github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= -github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= -github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/rubenv/sql-migrate v1.7.0 h1:HtQq1xyTN2ISmQDggnh0c9U3JlP8apWh8YO2jzlXpTI= github.com/rubenv/sql-migrate v1.7.0/go.mod h1:S4wtDEG1CKn+0ShpTtzWhFpHHI5PvCUtiGI+C+Z2THE= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= @@ -384,15 +298,8 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= -github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/shirou/gopsutil v3.21.4-0.20210419000835-c7a38de76ee5+incompatible h1:Bn1aCHHRnjv4Bl16T8rcaFjYSrGrIZvpiGO6P3Q4GpU= github.com/shirou/gopsutil v3.21.4-0.20210419000835-c7a38de76ee5+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= -github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= -github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= @@ -406,15 +313,11 @@ github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+ github.com/status-im/keycard-go v0.2.0 h1:QDLFswOQu1r5jsycloeQh3bVU8n/NatHHaZobtDnDzA= github.com/status-im/keycard-go v0.2.0/go.mod h1:wlp8ZLbsmrF6g6WjugPAx+IzoLrkdf9+mHxBEeo3Hbg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -447,59 +350,35 @@ github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGC github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ= go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= -go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= @@ -512,22 +391,14 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -545,13 +416,10 @@ golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= @@ -560,30 +428,17 @@ golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= -golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 h1:NnYq6UN9ReLM9/Y01KWNOWyI5xQ9kbIms5GGJVwS/Yc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= -google.golang.org/grpc v1.64.0 h1:KH3VH9y/MgNQg1dE7b3XfVK0GsPSIzJwdF617gUSbvY= -google.golang.org/grpc v1.64.0/go.mod h1:oxjF8E3FBnjp+/gVFYdWacaLDx9na1aqy9oovLpxQYg= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -598,9 +453,7 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= @@ -615,7 +468,6 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= modernc.org/cc/v4 v4.21.4/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ= modernc.org/ccgo/v4 v4.21.0 h1:kKPI3dF7RIag8YcToh5ZwDcVMIv6VGa0ED5cvh0LMW4= diff --git a/l1infotreesync/downloader.go b/l1infotreesync/downloader.go index 073547c2..6674e1e7 100644 --- a/l1infotreesync/downloader.go +++ b/l1infotreesync/downloader.go @@ -90,9 +90,12 @@ func buildAppender(client EthClienter, globalExitRoot, log.Error(err) return nil, err } - err = sanityCheckContracts(globalExitRoot, rollupManager, ger, rm) - if err != nil && flags&FlagAllowWrongContractsAddrs == 0 { - return nil, fmt.Errorf("buildAppender: fails sanity check contracts. Err:%w", err) + + if flags&FlagAllowWrongContractsAddrs == 0 { + err = sanityCheckContracts(globalExitRoot, rollupManager, ger, rm) + if err != nil { + return nil, fmt.Errorf("buildAppender: fails sanity check contracts. Err:%w", err) + } } appender := make(sync.LogAppenderMap) diff --git a/l1infotreesync/downloader_test.go b/l1infotreesync/downloader_test.go index e2f1ed33..7dd52bfb 100644 --- a/l1infotreesync/downloader_test.go +++ b/l1infotreesync/downloader_test.go @@ -1,7 +1,7 @@ package l1infotreesync import ( - "fmt" + "errors" "math/big" "strings" "testing" @@ -14,24 +14,43 @@ import ( "github.com/stretchr/testify/require" ) -func TestBuildAppenderErrorOnBadContractAddr(t *testing.T) { - l1Client := mocks_l1infotreesync.NewEthClienter(t) - globalExitRoot := common.HexToAddress("0x1") - rollupManager := common.HexToAddress("0x2") - l1Client.EXPECT().CallContract(mock.Anything, mock.Anything, mock.Anything).Return(nil, fmt.Errorf("test-error")) - flags := FlagNone - _, err := buildAppender(l1Client, globalExitRoot, rollupManager, flags) - require.Error(t, err) -} +func TestBuildAppender(t *testing.T) { + tests := []struct { + name string + flags CreationFlags + mockError error + expectError bool + }{ + { + name: "ErrorOnBadContractAddr", + flags: FlagNone, + mockError: errors.New("test-error"), + expectError: true, + }, + { + name: "BypassBadContractAddr", + flags: FlagAllowWrongContractsAddrs, + mockError: nil, + expectError: false, + }, + } -func TestBuildAppenderBypassBadContractAddr(t *testing.T) { - l1Client := mocks_l1infotreesync.NewEthClienter(t) - globalExitRoot := common.HexToAddress("0x1") - rollupManager := common.HexToAddress("0x2") - l1Client.EXPECT().CallContract(mock.Anything, mock.Anything, mock.Anything).Return(nil, fmt.Errorf("test-error")) - flags := FlagAllowWrongContractsAddrs - _, err := buildAppender(l1Client, globalExitRoot, rollupManager, flags) - require.NoError(t, err) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l1Client := mocks_l1infotreesync.NewEthClienter(t) + globalExitRoot := common.HexToAddress("0x1") + rollupManager := common.HexToAddress("0x2") + if tt.flags == FlagNone { + l1Client.EXPECT().CallContract(mock.Anything, mock.Anything, mock.Anything).Return(nil, tt.mockError).Twice() + } + _, err := buildAppender(l1Client, globalExitRoot, rollupManager, tt.flags) + if tt.expectError { + require.Error(t, err) + } else { + require.NoError(t, err) + } + }) + } } func TestBuildAppenderVerifiedContractAddr(t *testing.T) { diff --git a/l1infotreesync/l1infotreesync.go b/l1infotreesync/l1infotreesync.go index c686cb1a..2d49f222 100644 --- a/l1infotreesync/l1infotreesync.go +++ b/l1infotreesync/l1infotreesync.go @@ -13,14 +13,16 @@ import ( "github.com/ethereum/go-ethereum/common" ) -type CreationFlags uint64 - const ( reorgDetectorID = "l1infotreesync" downloadBufferSize = 1000 - // CreationFlags defitinion - FlagNone CreationFlags = 0 - FlagAllowWrongContractsAddrs CreationFlags = 1 << iota // Allow to set wrong contracts addresses +) + +type CreationFlags uint64 + +const ( + FlagNone CreationFlags = 1 << iota // Check for correct contracts addresses + FlagAllowWrongContractsAddrs // Allow to set wrong contracts addresses ) var ( diff --git a/lastgersync/e2e_test.go b/lastgersync/e2e_test.go index e1fe3d25..f72100af 100644 --- a/lastgersync/e2e_test.go +++ b/lastgersync/e2e_test.go @@ -19,7 +19,13 @@ import ( func TestE2E(t *testing.T) { ctx := context.Background() setup := helpers.NewE2EEnvWithEVML2(t) - dbPathSyncer := path.Join(t.TempDir(), "lastgersyncTestE2E.sqlite") + dbPathSyncer := path.Join(t.TempDir(), "lastGERSyncTestE2E.sqlite") + const ( + retryAfterErrorPeriod = time.Millisecond * 30 + maxRetryAttemptsAfterError = 10 + waitForNewBlocksPeriod = time.Millisecond * 30 + syncBlockChunkSize = 10 + ) syncer, err := lastgersync.New( ctx, dbPathSyncer, @@ -27,11 +33,11 @@ func TestE2E(t *testing.T) { setup.L2Environment.SimBackend.Client(), setup.L2Environment.GERAddr, setup.InfoTreeSync, - 0, - 0, + retryAfterErrorPeriod, + maxRetryAttemptsAfterError, etherman.LatestBlock, - time.Millisecond*30, - 10, + waitForNewBlocksPeriod, + syncBlockChunkSize, ) require.NoError(t, err) go syncer.Start(ctx) diff --git a/proto/src/proto/aggregator/v1/aggregator.proto b/proto/src/proto/aggregator/v1/aggregator.proto deleted file mode 100644 index d462e205..00000000 --- a/proto/src/proto/aggregator/v1/aggregator.proto +++ /dev/null @@ -1,330 +0,0 @@ -syntax = "proto3"; - -package aggregator.v1; - -option go_package = ""github.com/agglayer/aggkit/proverclient/prover"; - -message Version { - string v0_0_1 = 1; -} - -// timestamps are represented in unix time in seconds - -/** - * Define all methods implementes by the gRPC - * Channel: prover receives aggregator messages and returns prover messages with the same id - */ -service AggregatorService { - rpc Channel(stream ProverMessage) returns (stream AggregatorMessage) {} -} - -message AggregatorMessage -{ - string id = 1; - oneof request - { - GetStatusRequest get_status_request = 2; - GenBatchProofRequest gen_batch_proof_request = 3; - GenAggregatedProofRequest gen_aggregated_proof_request = 4; - GenFinalProofRequest gen_final_proof_request = 5; - CancelRequest cancel_request = 6; - GetProofRequest get_proof_request = 7; - GenStatelessBatchProofRequest gen_stateless_batch_proof_request = 8; - } -} - -message ProverMessage -{ - string id = 1; - oneof response - { - GetStatusResponse get_status_response = 2; - GenBatchProofResponse gen_batch_proof_response = 3; - GenAggregatedProofResponse gen_aggregated_proof_response = 4; - GenFinalProofResponse gen_final_proof_response = 5; - CancelResponse cancel_response = 6; - GetProofResponse get_proof_response = 7; - } -} - -/////////////////// -// Request messages -/////////////////// - -/** - * @dev GetStatusRequest - */ -message GetStatusRequest {} - -/** - * @dev GenBatchProofRequest - * @param {input} - input prover - */ -message GenBatchProofRequest { - InputProver input = 1; -} - -message GenStatelessBatchProofRequest { - StatelessInputProver input = 1; -} - -/** - * @dev GenAggregatedProofRequest - * @param {recursive_proof_1} - proof json of the first batch to aggregate - * @param {recursive_proof_2} - proof json of the second batch to aggregate - */ -message GenAggregatedProofRequest { - string recursive_proof_1 = 1; - string recursive_proof_2 = 2; -} - -/** - * @dev GenFinalProofRequest - * @param {recursive_proof} - proof json of the batch or aggregated proof to finalise - * @param {aggregator_addr} - address of the aggregator - */ -message GenFinalProofRequest { - string recursive_proof = 1; - string aggregator_addr = 2; -} - -/** - * @dev CancelRequest - * @param {id} - identifier of the proof request to cancel - */ - message CancelRequest { - string id = 1; -} - -/** - * @dev Request GetProof - * @param {id} - proof identifier of the proof request - * @param {timeout} - time to wait until the service responds - */ -message GetProofRequest { - string id = 1; - uint64 timeout = 2; -} - -///////////////////// -// Responses messages -///////////////////// - -/** - * @dev Response GetStatus - * @param {status} - server status - * - BOOTING: being ready to compute proofs - * - COMPUTING: busy computing a proof - * - IDLE: waiting for a proof to compute - * - HALT: stop - * @param {last_computed_request_id} - last proof identifier that has been computed - * @param {last_computed_end_time} - last proof timestamp when it was finished - * @param {current_computing_request_id} - id of the proof that is being computed - * @param {current_computing_start_time} - timestamp when the proof that is being computed started - * @param {version_proto} - .proto verion - * @param {version_server} - server version - * @param {pending_request_queue_ids} - list of identifierss of proof requests that are in the pending queue - * @param {prover_name} - id of this prover server, normally specified via config.json, or UNSPECIFIED otherwise; it does not change if prover reboots - * @param {prover_id} - id of this prover instance or reboot; it changes if prover reboots; it is a UUID, automatically generated during the initialization - * @param {number_of_cores} - number of cores in the system where the prover is running - * @param {total_memory} - total memory in the system where the prover is running - * @param {free_memory} - free memory in the system where the prover is running - */ -message GetStatusResponse { - enum Status { - STATUS_UNSPECIFIED = 0; - STATUS_BOOTING = 1; - STATUS_COMPUTING = 2; - STATUS_IDLE = 3; - STATUS_HALT = 4; - } - Status status = 1; - string last_computed_request_id = 2; - uint64 last_computed_end_time = 3; - string current_computing_request_id = 4; - uint64 current_computing_start_time = 5; - string version_proto = 6; - string version_server = 7; - repeated string pending_request_queue_ids = 8; - string prover_name = 9; - string prover_id = 10; - uint64 number_of_cores = 11; - uint64 total_memory = 12; - uint64 free_memory = 13; - uint64 fork_id = 14; -} - -/** - * @dev Result - * - OK: succesfully completed - * - ERROR: request is not correct, i.e. input data is wrong - * - INTERNAL_ERROR: internal server error when delivering the response - */ -enum Result { - RESULT_UNSPECIFIED = 0; - RESULT_OK = 1; - RESULT_ERROR = 2; - RESULT_INTERNAL_ERROR = 3; -} - -/** - * @dev GenBatchProofResponse - * @param {id} - proof identifier, to be used in GetProofRequest() - * @param {result} - request result - */ -message GenBatchProofResponse { - string id = 1; - Result result = 2; -} - -/** - * @dev GenAggregatedProofResponse - * @param {id} - proof identifier, to be used in GetProofRequest() - * @param {result} - request result - */ -message GenAggregatedProofResponse { - string id = 1; - Result result = 2; -} - -/** - * @dev Response GenFinalProof - * @param {id} - proof identifier, to be used in GetProofRequest() - * @param {result} - request result - */ -message GenFinalProofResponse { - string id = 1; - Result result = 2; -} - -/** - * @dev CancelResponse - * @param {result} - request result - */ -message CancelResponse { - Result result = 1; -} - -/** - * @dev GetProofResponse - * @param {id} - proof identifier - * @param {final_proof} - groth16 proof + public circuit inputs - * @param {recursive_proof} - recursive proof json - * @param {result} - proof result - * - COMPLETED_OK: proof has been computed successfully and it is valid - * - ERROR: request error - * - COMPLETED_ERROR: proof has been computed successfully and it is not valid - * - PENDING: proof is being computed - * - INTERNAL_ERROR: server error during proof computation - * - CANCEL: proof has been cancelled - * @param {result_string} - extends result information - */ -message GetProofResponse { - enum Result { - RESULT_UNSPECIFIED = 0; - RESULT_COMPLETED_OK = 1; - RESULT_ERROR = 2; - RESULT_COMPLETED_ERROR = 3; - RESULT_PENDING = 4; - RESULT_INTERNAL_ERROR = 5; - RESULT_CANCEL = 6; - } - string id = 1; - oneof proof { - FinalProof final_proof = 2; - string recursive_proof =3; - } - Result result = 4; - string result_string = 5; -} - -/* - * @dev FinalProof - * @param {proof} - groth16 proof - * @param {public} - public circuit inputs -*/ -message FinalProof { - string proof = 1; - PublicInputsExtended public = 2; -} - -/* - * @dev PublicInputs - * @param {old_state_root} - * @param {old_acc_input_hash} - * @param {old_batch_num} - * @param {chain_id} - * @param {batch_l2_data} - * @param {global_exit_root} - * @param {sequencer_addr} - * @param {aggregator_addr} - */ -message PublicInputs { - bytes old_state_root = 1; - bytes old_acc_input_hash = 2; - uint64 old_batch_num = 3; - uint64 chain_id = 4; - uint64 fork_id = 5; - bytes batch_l2_data = 6; - bytes l1_info_root = 7; - uint64 timestamp_limit = 8; - string sequencer_addr = 9; - bytes forced_blockhash_l1 = 10; - string aggregator_addr = 12; - map l1_info_tree_data = 16; -} - -message StatelessPublicInputs { - bytes witness = 1; - bytes old_acc_input_hash = 2; - uint64 old_batch_num = 3; - uint64 chain_id = 4; - uint64 fork_id = 5; - bytes batch_l2_data = 6; - bytes l1_info_root = 7; - uint64 timestamp_limit = 8; - string sequencer_addr = 9; - bytes forced_blockhash_l1 = 10; - string aggregator_addr = 11; - map l1_info_tree_data = 12; -} - -// l1InfoTree leaf values -message L1Data { - bytes global_exit_root = 1; - bytes blockhash_l1 = 2; - uint32 min_timestamp = 3; - repeated bytes smt_proof = 4; -} - -/** - * @dev InputProver - * @param {public_inputs} - public inputs - * @param {db} - database containing all key-values in smt matching the old state root - * @param {contracts_bytecode} - key is the hash(contractBytecode), value is the bytecode itself - */ -message InputProver { - PublicInputs public_inputs = 1; - map db = 4; // For debug/testing purpposes only. Don't fill this on production - map contracts_bytecode = 5; // For debug/testing purpposes only. Don't fill this on production -} - -message StatelessInputProver { - StatelessPublicInputs public_inputs = 1; -} - -/** - * @dev PublicInputsExtended - * @param {public_inputs} - public inputs - * @param {new_state_root} - final state root. Used as a sanity check. - * @param {new_acc_input_hash} - final accumulate input hash. Used as a sanity check. - * @param {new_local_exit_root} - new local exit root. Used as a sanity check. - * @param {new_batch_num} - final num batch. Used as a sanity check. - */ -message PublicInputsExtended { - PublicInputs public_inputs = 1; - bytes new_state_root = 2; - bytes new_acc_input_hash = 3; - bytes new_local_exit_root = 4; - uint64 new_batch_num = 5; -} diff --git a/rpc/batch.go b/rpc/batch.go deleted file mode 100644 index 965266c6..00000000 --- a/rpc/batch.go +++ /dev/null @@ -1,149 +0,0 @@ -package rpc - -import ( - "encoding/json" - "errors" - "fmt" - "math/big" - - "github.com/0xPolygon/cdk-rpc/rpc" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/rpc/types" - "github.com/agglayer/aggkit/state" - "github.com/ethereum/go-ethereum/common" -) - -var ( - // ErrBusy is returned when the witness server is busy - ErrBusy = errors.New("witness server is busy") -) - -const busyResponse = "busy" - -type BatchEndpoints struct { - url string -} - -func NewBatchEndpoints(url string) *BatchEndpoints { - return &BatchEndpoints{url: url} -} - -func (b *BatchEndpoints) GetBatch(batchNumber uint64) (*types.RPCBatch, error) { - type zkEVMBatch struct { - AccInputHash string `json:"accInputHash"` - Blocks []string `json:"blocks"` - BatchL2Data string `json:"batchL2Data"` - Coinbase string `json:"coinbase"` - GlobalExitRoot string `json:"globalExitRoot"` - LocalExitRoot string `json:"localExitRoot"` - StateRoot string `json:"stateRoot"` - Closed bool `json:"closed"` - Timestamp string `json:"timestamp"` - } - - zkEVMBatchData := zkEVMBatch{} - - log.Infof("Getting batch %d from RPC", batchNumber) - - response, err := rpc.JSONRPCCall(b.url, "zkevm_getBatchByNumber", batchNumber) - if err != nil { - return nil, err - } - - // Check if the response is nil - if response.Result == nil { - return nil, state.ErrNotFound - } - - // Check if the response is an error - if response.Error != nil { - return nil, fmt.Errorf("error in the response calling zkevm_getBatchByNumber: %v", response.Error) - } - - // Get the batch number from the response hex string - err = json.Unmarshal(response.Result, &zkEVMBatchData) - if err != nil { - return nil, fmt.Errorf("error unmarshalling the batch from the response calling zkevm_getBatchByNumber: %w", err) - } - - rpcBatch := types.NewRPCBatch(batchNumber, common.HexToHash(zkEVMBatchData.AccInputHash), zkEVMBatchData.Blocks, - common.FromHex(zkEVMBatchData.BatchL2Data), common.HexToHash(zkEVMBatchData.GlobalExitRoot), - common.HexToHash(zkEVMBatchData.LocalExitRoot), common.HexToHash(zkEVMBatchData.StateRoot), - common.HexToAddress(zkEVMBatchData.Coinbase), zkEVMBatchData.Closed) - - if len(zkEVMBatchData.Blocks) > 0 { - lastL2BlockTimestamp, err := b.GetL2BlockTimestamp(zkEVMBatchData.Blocks[len(zkEVMBatchData.Blocks)-1]) - if err != nil { - return nil, fmt.Errorf("error getting the last l2 block timestamp from the rpc: %w", err) - } - rpcBatch.SetLastL2BLockTimestamp(lastL2BlockTimestamp) - } else { - log.Infof("No blocks in the batch, setting the last l2 block timestamp from the batch data") - rpcBatch.SetLastL2BLockTimestamp(new(big.Int).SetBytes(common.FromHex(zkEVMBatchData.Timestamp)).Uint64()) - } - - return rpcBatch, nil -} - -func (b *BatchEndpoints) GetL2BlockTimestamp(blockHash string) (uint64, error) { - type zkeEVML2Block struct { - Timestamp string `json:"timestamp"` - } - - log.Infof("Getting l2 block timestamp from RPC. Block hash: %s", blockHash) - - response, err := rpc.JSONRPCCall(b.url, "eth_getBlockByHash", blockHash, false) - if err != nil { - return 0, err - } - - // Check if the response is an error - if response.Error != nil { - return 0, fmt.Errorf("error in the response calling eth_getBlockByHash: %v", response.Error) - } - - // Get the l2 block from the response - l2Block := zkeEVML2Block{} - err = json.Unmarshal(response.Result, &l2Block) - if err != nil { - return 0, fmt.Errorf("error unmarshalling the l2 block from the response calling eth_getBlockByHash: %w", err) - } - - return new(big.Int).SetBytes(common.FromHex(l2Block.Timestamp)).Uint64(), nil -} - -func (b *BatchEndpoints) GetWitness(batchNumber uint64, fullWitness bool) ([]byte, error) { - var ( - witness string - response rpc.Response - err error - ) - - witnessType := "trimmed" - if fullWitness { - witnessType = "full" - } - - log.Infof("Requesting witness for batch %d of type %s", batchNumber, witnessType) - - response, err = rpc.JSONRPCCall(b.url, "zkevm_getBatchWitness", batchNumber, witnessType) - if err != nil { - return nil, err - } - - // Check if the response is an error - if response.Error != nil { - if response.Error.Message == busyResponse { - return nil, ErrBusy - } - - return nil, fmt.Errorf("error from witness for batch %d: %v", batchNumber, response.Error) - } - - err = json.Unmarshal(response.Result, &witness) - if err != nil { - return nil, err - } - - return common.FromHex(witness), nil -} diff --git a/rpc/batch_test.go b/rpc/batch_test.go deleted file mode 100644 index d6940bf3..00000000 --- a/rpc/batch_test.go +++ /dev/null @@ -1,265 +0,0 @@ -package rpc - -import ( - "encoding/json" - "errors" - "fmt" - "net/http" - "net/http/httptest" - "testing" - - "github.com/0xPolygon/cdk-rpc/rpc" - "github.com/ethereum/go-ethereum/common" - "github.com/stretchr/testify/require" -) - -func Test_getBatchFromRPC(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - batch uint64 - getBatchByNumberResp string - getBlockByHasResp string - getBatchByNumberErr error - getBlockByHashErr error - expectBlocks int - expectData []byte - expectTimestamp uint64 - expectErr error - }{ - { - name: "successfully fetched", - getBatchByNumberResp: `{"jsonrpc":"2.0","id":1,"result":{"blocks":["1", "2", "3"],"batchL2Data":"0x1234567"}}`, - getBlockByHasResp: `{"jsonrpc":"2.0","id":1,"result":{"timestamp":"0x123456"}}`, - batch: 0, - expectBlocks: 3, - expectData: common.FromHex("0x1234567"), - expectTimestamp: 1193046, - expectErr: nil, - }, - { - name: "invalid json", - getBatchByNumberResp: `{"jsonrpc":"2.0","id":1,"result":{"blocks":invalid,"batchL2Data":"test"}}`, - batch: 0, - expectBlocks: 3, - expectData: nil, - expectErr: errors.New("invalid character 'i' looking for beginning of value"), - }, - { - name: "wrong json", - getBatchByNumberResp: `{"jsonrpc":"2.0","id":1,"result":{"blocks":"invalid","batchL2Data":"test"}}`, - batch: 0, - expectBlocks: 3, - expectData: nil, - expectErr: errors.New("error unmarshalling the batch from the response calling zkevm_getBatchByNumber: json: cannot unmarshal string into Go struct field zkEVMBatch.blocks of type []string"), - }, - { - name: "error in the response", - getBatchByNumberResp: `{"jsonrpc":"2.0","id":1,"result":null,"error":{"code":-32602,"message":"Invalid params"}}`, - batch: 0, - expectBlocks: 0, - expectData: nil, - expectErr: errors.New("error in the response calling zkevm_getBatchByNumber: &{-32602 Invalid params }"), - }, - { - name: "http failed", - getBatchByNumberErr: errors.New("failed to fetch"), - batch: 0, - expectBlocks: 0, - expectData: nil, - expectErr: errors.New("invalid status code, expected: 200, found: 500"), - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var req rpc.Request - err := json.NewDecoder(r.Body).Decode(&req) - require.NoError(t, err) - - switch req.Method { - case "zkevm_getBatchByNumber": - if tt.getBatchByNumberErr != nil { - http.Error(w, tt.getBatchByNumberErr.Error(), http.StatusInternalServerError) - return - } - - _, _ = w.Write([]byte(tt.getBatchByNumberResp)) - case "eth_getBlockByHash": - if tt.getBlockByHashErr != nil { - http.Error(w, tt.getBlockByHashErr.Error(), http.StatusInternalServerError) - return - } - _, _ = w.Write([]byte(tt.getBlockByHasResp)) - default: - http.Error(w, "method not found", http.StatusNotFound) - } - })) - defer srv.Close() - - rcpBatchClient := NewBatchEndpoints(srv.URL) - rpcBatch, err := rcpBatchClient.GetBatch(tt.batch) - if rpcBatch != nil { - copiedrpcBatch := rpcBatch.DeepCopy() - require.NotNil(t, copiedrpcBatch) - str := copiedrpcBatch.String() - require.NotEmpty(t, str) - } - if tt.expectErr != nil { - require.Equal(t, tt.expectErr.Error(), err.Error()) - } else { - require.NoError(t, err) - require.Equal(t, tt.expectTimestamp, rpcBatch.LastL2BLockTimestamp()) - require.Equal(t, tt.expectData, rpcBatch.L2Data()) - } - }) - } -} - -func Test_getBatchWitnessRPC(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - batch uint64 - getBatchWitnessResp string - getBatchWitnessErr error - expectData []byte - expectErr error - full bool - }{ - { - name: "get batch trimmed witness success", - batch: 1, - getBatchWitnessResp: `{"jsonrpc":"2.0","id":1,"result":"0x0123456"}`, - getBatchWitnessErr: nil, - expectData: common.FromHex("0x0123456"), - expectErr: nil, - full: false, - }, - { - name: "get batch full witness success", - batch: 1, - getBatchWitnessResp: `{"jsonrpc":"2.0","id":1,"result":"0x0123456"}`, - getBatchWitnessErr: nil, - expectData: common.FromHex("0x0123456"), - expectErr: nil, - full: true, - }, - { - name: "get batch witness busy", - batch: 1, - getBatchWitnessResp: `{"jsonrpc":"2.0","id":1,"result":"", "error":{"code":-32000,"message":"busy"}}`, - getBatchWitnessErr: nil, - expectData: []byte{}, - expectErr: ErrBusy, - full: false, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var req rpc.Request - err := json.NewDecoder(r.Body).Decode(&req) - require.NoError(t, err) - - switch req.Method { - case "zkevm_getBatchWitness": - if tt.getBatchWitnessErr != nil { - http.Error(w, tt.getBatchWitnessErr.Error(), http.StatusInternalServerError) - return - } - _, _ = w.Write([]byte(tt.getBatchWitnessResp)) - default: - http.Error(w, "method not found", http.StatusNotFound) - } - })) - defer srv.Close() - - rcpBatchClient := NewBatchEndpoints(srv.URL) - witness, err := rcpBatchClient.GetWitness(tt.batch, false) - if tt.expectErr != nil { - require.Equal(t, tt.expectErr.Error(), err.Error()) - } else { - require.NoError(t, err) - require.Equal(t, tt.expectData, witness) - } - }) - } -} - -func Test_getGetL2BlockTimestamp(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - blockHash []byte - response string - error error - expectData uint64 - expectErr error - }{ - { - name: "success", - blockHash: []byte{1}, - response: `{"jsonrpc":"2.0","id":1,"result":{"timestamp":"0x123456"}}`, - error: nil, - expectData: uint64(0x123456), - expectErr: nil, - }, - { - name: "fail", - blockHash: []byte{2}, - response: `{"jsonrpc":"2.0","id":1,"result":{"timestamp":"0x123456"}}`, - error: fmt.Errorf("error"), - expectData: 0, - expectErr: fmt.Errorf("invalid status code, expected: 200, found: 500"), - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var req rpc.Request - err := json.NewDecoder(r.Body).Decode(&req) - require.NoError(t, err) - - switch req.Method { - case "eth_getBlockByHash": - if tt.error != nil { - http.Error(w, tt.error.Error(), http.StatusInternalServerError) - return - } - _, _ = w.Write([]byte(tt.response)) - default: - http.Error(w, "method not found", http.StatusNotFound) - } - })) - defer srv.Close() - - rcpBatchClient := NewBatchEndpoints(srv.URL) - timestamp, err := rcpBatchClient.GetL2BlockTimestamp(string(tt.blockHash)) - if tt.expectErr != nil { - require.Equal(t, tt.expectErr.Error(), err.Error()) - } else { - require.NoError(t, err) - require.Equal(t, tt.expectData, timestamp) - } - }) - } -} diff --git a/rpc/types/rpcbatch.go b/rpc/types/rpcbatch.go index 41900b9a..89517f7f 100644 --- a/rpc/types/rpcbatch.go +++ b/rpc/types/rpcbatch.go @@ -3,7 +3,6 @@ package types import ( "fmt" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" "github.com/ethereum/go-ethereum/common" ) @@ -37,23 +36,6 @@ func NewRPCBatch(batchNumber uint64, accInputHash common.Hash, blockHashes []str } } -// DeepCopy -func (b *RPCBatch) DeepCopy() seqsendertypes.Batch { - return &RPCBatch{ - accInputHash: b.accInputHash, - batchNumber: b.batchNumber, - blockHashes: b.blockHashes, - batchL2Data: b.batchL2Data, - globalExitRoot: b.globalExitRoot, - localExitRoot: b.localExitRoot, - stateRoot: b.stateRoot, - coinbase: b.coinbase, - closed: b.closed, - lastL2BlockTimestamp: b.lastL2BlockTimestamp, - l1InfoTreeIndex: b.l1InfoTreeIndex, - } -} - // LastCoinbase func (b *RPCBatch) LastCoinbase() common.Address { return b.coinbase diff --git a/scripts/local_config b/scripts/local_config index fbc0beb5..aab17d20 100755 --- a/scripts/local_config +++ b/scripts/local_config @@ -164,18 +164,9 @@ function export_values_of_genesis(){ ############################################################################### function export_values_of_cdk_node_config(){ local _CDK_CONFIG_FILE=$1 - export_key_from_toml_file zkevm_l2_sequencer_address $_CDK_CONFIG_FILE SequenceSender L2Coinbase - if [ $? -ne 0 ]; then - export_key_from_toml_file_or_fatal zkevm_l2_sequencer_address $_CDK_CONFIG_FILE "." L2Coinbase - fi - export_obj_key_from_toml_file zkevm_l2_sequencer_keystore_password $_CDK_CONFIG_FILE SequenceSender PrivateKey Password - if [ $? -ne 0 ]; then - export_key_from_toml_file_or_fatal zkevm_l2_sequencer_keystore_password $_CDK_CONFIG_FILE "." SequencerPrivateKeyPassword - fi - export_key_from_toml_file l1_chain_id $_CDK_CONFIG_FILE SequenceSender.EthTxManager.Etherman L1ChainID - if [ $? -ne 0 ]; then - export_key_from_toml_file_or_fatal l1_chain_id $_CDK_CONFIG_FILE L1Config chainId - fi + export_key_from_toml_file_or_fatal zkevm_l2_sequencer_address $_CDK_CONFIG_FILE "." L2Coinbase + export_key_from_toml_file_or_fatal zkevm_l2_sequencer_keystore_password $_CDK_CONFIG_FILE "." SequencerPrivateKeyPassword + export_key_from_toml_file_or_fatal l1_chain_id $_CDK_CONFIG_FILE L1Config chainId export_key_from_toml_file zkevm_is_validium $_CDK_CONFIG_FILE Common IsValidiumMode if [ $? -ne 0 ]; then export_key_from_toml_file_or_fatal zkevm_is_validium $_CDK_CONFIG_FILE "." IsValidiumMode @@ -189,19 +180,8 @@ function export_values_of_cdk_node_config(){ log_debug "l2_chain_id not found in Aggregator section, using 0" export l2_chain_id="0" fi - export_key_from_toml_file_or_fatal zkevm_aggregator_port $_CDK_CONFIG_FILE Aggregator Port - export_key_from_toml_file zkevm_l2_agglayer_address $_CDK_CONFIG_FILE Aggregator SenderAddress - if [ $? -ne 0 ]; then - export_key_from_toml_file zkevm_l2_agglayer_address $_CDK_CONFIG_FILE "." SenderProofToL1Addr - fi - export_obj_key_from_toml_file zkevm_l2_aggregator_keystore_password $_CDK_CONFIG_FILE Aggregator.EthTxManager PrivateKeys Password - if [ $? -ne 0 ]; then - export_key_from_toml_file zkevm_l2_aggregator_keystore_password $_CDK_CONFIG_FILE "." AggregatorPrivateKeyPassword - fi - export_key_from_toml_file zkevm_rollup_fork_id $_CDK_CONFIG_FILE Aggregator ForkId - if [ $? -ne 0 ]; then - export_key_from_toml_file_or_fatal zkevm_rollup_fork_id $_CDK_CONFIG_FILE "." ForkId - fi + export_key_from_toml_file_or_fatal zkevm_l2_agglayer_address $_CDK_CONFIG_FILE "." SenderProofToL1Addr + export_key_from_toml_file_or_fatal zkevm_rollup_fork_id $_CDK_CONFIG_FILE "." ForkId export_key_from_toml_file zkevm_l2_agglayer_keystore_password $_CDK_CONFIG_FILE AggSender.SequencerPrivateKey Password if [ $? -ne 0 ]; then export_key_from_toml_file_or_fatal zkevm_l2_agglayer_keystore_password $_CDK_CONFIG_FILE "." SequencerPrivateKeyPassword diff --git a/sequencesender/config.go b/sequencesender/config.go deleted file mode 100644 index 20d61142..00000000 --- a/sequencesender/config.go +++ /dev/null @@ -1,73 +0,0 @@ -package sequencesender - -import ( - "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" - "github.com/agglayer/aggkit/config/types" - "github.com/agglayer/aggkit/log" - "github.com/ethereum/go-ethereum/common" -) - -// Config represents the configuration of a sequence sender -type Config struct { - // WaitPeriodSendSequence is the time the sequencer waits until - // trying to send a sequence to L1 - WaitPeriodSendSequence types.Duration `mapstructure:"WaitPeriodSendSequence"` - // LastBatchVirtualizationTimeMaxWaitPeriod is time since sequences should be sent - LastBatchVirtualizationTimeMaxWaitPeriod types.Duration `mapstructure:"LastBatchVirtualizationTimeMaxWaitPeriod"` - // L1BlockTimestampMargin is the time difference (margin) that must exists between last L1 block - // and last L2 block in the sequence before sending the sequence to L1. If the difference is - // lower than this value, then sequencesender will wait until the difference is equal or greater - L1BlockTimestampMargin types.Duration `mapstructure:"L1BlockTimestampMargin"` - // MaxTxSizeForL1 is the maximum size a single transaction can have. This field has - // non-trivial consequences: larger transactions than 128KB are significantly harder and - // more expensive to propagate; larger transactions also take more resources - // to validate whether they fit into the pool or not. - MaxTxSizeForL1 uint64 `mapstructure:"MaxTxSizeForL1"` - // SenderAddress defines which private key the eth tx manager needs to use - // to sign the L1 txs - SenderAddress common.Address - // L2Coinbase defines which address is going to receive the fees - L2Coinbase common.Address `mapstructure:"L2Coinbase"` - // PrivateKey defines all the key store files that are going - // to be read in order to provide the private keys to sign the L1 txs - PrivateKey types.KeystoreFileConfig `mapstructure:"PrivateKey"` - // Batch number where there is a forkid change (fork upgrade) - ForkUpgradeBatchNumber uint64 - // GasOffset is the amount of gas to be added to the gas estimation in order - // to provide an amount that is higher than the estimated one. This is used - // to avoid the TX getting reverted in case something has changed in the network - // state after the estimation which can cause the TX to require more gas to be - // executed. - // - // ex: - // gas estimation: 1000 - // gas offset: 100 - // final gas: 1100 - GasOffset uint64 `mapstructure:"GasOffset"` - - // SequencesTxFileName is the file name to store sequences sent to L1 - SequencesTxFileName string - - // WaitPeriodPurgeTxFile is the time to wait before purging from file the finished sent L1 tx - WaitPeriodPurgeTxFile types.Duration `mapstructure:"WaitPeriodPurgeTxFile"` - - // MaxPendingTx is the maximum number of pending transactions (those that are not in a final state) - MaxPendingTx uint64 - - // EthTxManager is the config for the ethtxmanager - EthTxManager ethtxmanager.Config `mapstructure:"EthTxManager"` - - // Log is the log configuration - Log log.Config `mapstructure:"Log"` - - // MaxBatchesForL1 is the maximum amount of batches to be sequenced in a single L1 tx - MaxBatchesForL1 uint64 `mapstructure:"MaxBatchesForL1"` - // BlockFinality indicates the status of the blocks that will be queried in order to sync - BlockFinality string `jsonschema:"enum=LatestBlock, enum=SafeBlock, enum=PendingBlock, enum=FinalizedBlock, enum=EarliestBlock" mapstructure:"BlockFinality"` //nolint:lll - - // RPCURL is the URL of the RPC server - RPCURL string `mapstructure:"RPCURL"` - - // GetBatchWaitInterval is the time to wait to query for a new batch when there are no more batches available - GetBatchWaitInterval types.Duration `mapstructure:"GetBatchWaitInterval"` -} diff --git a/sequencesender/ethtx.go b/sequencesender/ethtx.go deleted file mode 100644 index 458ad3d1..00000000 --- a/sequencesender/ethtx.go +++ /dev/null @@ -1,398 +0,0 @@ -package sequencesender - -import ( - "context" - "encoding/json" - "errors" - "math" - "math/big" - "os" - "strings" - "sync/atomic" - "time" - - "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" - "github.com/0xPolygon/zkevm-ethtx-manager/types" - "github.com/agglayer/aggkit/log" - "github.com/ethereum/go-ethereum/common" -) - -type ethTxData struct { - Nonce uint64 `json:"nonce"` - Status string `json:"status"` - SentL1Timestamp time.Time `json:"sentL1Timestamp"` - StatusTimestamp time.Time `json:"statusTimestamp"` - FromBatch uint64 `json:"fromBatch"` - ToBatch uint64 `json:"toBatch"` - MinedAtBlock big.Int `json:"minedAtBlock"` - OnMonitor bool `json:"onMonitor"` - To common.Address `json:"to"` - StateHistory []string `json:"stateHistory"` - Txs map[common.Hash]ethTxAdditionalData `json:"txs"` - Gas uint64 `json:"gas"` -} - -type ethTxAdditionalData struct { - GasPrice *big.Int `json:"gasPrice,omitempty"` - RevertMessage string `json:"revertMessage,omitempty"` -} - -// sendTx adds transaction to the ethTxManager to send it to L1 -func (s *SequenceSender) sendTx(ctx context.Context, resend bool, txOldHash *common.Hash, to *common.Address, - fromBatch uint64, toBatch uint64, data []byte, gas uint64) error { - // Params if new tx to send or resend a previous tx - var ( - paramTo *common.Address - paramData []byte - valueFromBatch uint64 - valueToBatch uint64 - valueToAddress common.Address - ) - - if !resend { - paramTo = to - paramData = data - valueFromBatch = fromBatch - valueToBatch = toBatch - } else { - if txOldHash == nil { - log.Errorf("trying to resend a tx with nil hash") - return errors.New("resend tx with nil hash monitor id") - } - oldEthTx := s.ethTransactions[*txOldHash] - paramTo = &oldEthTx.To - paramData = s.ethTxData[*txOldHash] - valueFromBatch = oldEthTx.FromBatch - valueToBatch = oldEthTx.ToBatch - } - if paramTo != nil { - valueToAddress = *paramTo - } - - // Add sequence tx - txHash, err := s.ethTxManager.AddWithGas(ctx, paramTo, big.NewInt(0), paramData, s.cfg.GasOffset, nil, gas) - if err != nil { - log.Errorf("error adding sequence to ethtxmanager: %v", err) - return err - } - - // Add new eth tx - txData := ethTxData{ - SentL1Timestamp: time.Now(), - StatusTimestamp: time.Now(), - Status: "*new", - FromBatch: valueFromBatch, - ToBatch: valueToBatch, - OnMonitor: true, - To: valueToAddress, - Gas: gas, - } - - // Add tx to internal structure - s.mutexEthTx.Lock() - s.ethTransactions[txHash] = &txData - txResults := make(map[common.Hash]types.TxResult, 0) - s.copyTxData(txHash, paramData, txResults) - err = s.getResultAndUpdateEthTx(ctx, txHash) - if err != nil { - log.Errorf("error getting result for tx %v: %v", txHash, err) - } - if !resend { - atomic.StoreUint64(&s.latestSentToL1Batch, valueToBatch) - } else { - s.ethTransactions[*txOldHash].Status = "*resent" - } - s.mutexEthTx.Unlock() - - // Save sent sequences - err = s.saveSentSequencesTransactions(ctx) - if err != nil { - log.Errorf("error saving tx sequence sent, error: %v", err) - } - return nil -} - -// purgeEthTx purges transactions from memory structures -func (s *SequenceSender) purgeEthTx(ctx context.Context) { - // If sequence sending is stopped, do not purge - if s.IsStopped() { - return - } - - // Purge old transactions that are finalized - s.mutexEthTx.Lock() - timePurge := time.Now().Add(-s.cfg.WaitPeriodPurgeTxFile.Duration) - toPurge := make([]common.Hash, 0) - for hash, data := range s.ethTransactions { - if !data.StatusTimestamp.Before(timePurge) { - continue - } - - if !data.OnMonitor || data.Status == types.MonitoredTxStatusFinalized.String() { - toPurge = append(toPurge, hash) - - // Remove from tx monitor - if data.OnMonitor { - err := s.ethTxManager.Remove(ctx, hash) - if err != nil { - log.Warnf("error removing monitor tx %v from ethtxmanager: %v", hash, err) - } else { - log.Infof("removed monitor tx %v from ethtxmanager", hash) - } - } - } - } - - if len(toPurge) > 0 { - var firstPurged uint64 = math.MaxUint64 - var lastPurged uint64 - for i := 0; i < len(toPurge); i++ { - if s.ethTransactions[toPurge[i]].Nonce < firstPurged { - firstPurged = s.ethTransactions[toPurge[i]].Nonce - } - if s.ethTransactions[toPurge[i]].Nonce > lastPurged { - lastPurged = s.ethTransactions[toPurge[i]].Nonce - } - delete(s.ethTransactions, toPurge[i]) - delete(s.ethTxData, toPurge[i]) - } - log.Infof("txs purged count: %d, fromNonce: %d, toNonce: %d", len(toPurge), firstPurged, lastPurged) - } - s.mutexEthTx.Unlock() -} - -// syncEthTxResults syncs results from L1 for transactions in the memory structure -func (s *SequenceSender) syncEthTxResults(ctx context.Context) (uint64, error) { - s.mutexEthTx.Lock() - var ( - txPending uint64 - txSync uint64 - ) - for hash, tx := range s.ethTransactions { - if tx.Status == types.MonitoredTxStatusFinalized.String() { - continue - } - - err := s.getResultAndUpdateEthTx(ctx, hash) - if err != nil { - log.Errorf("error getting result for tx %v: %v", hash, err) - return 0, err - } - - txSync++ - txStatus := types.MonitoredTxStatus(tx.Status) - // Count if it is not in a final state - if tx.OnMonitor && - txStatus != types.MonitoredTxStatusFailed && - txStatus != types.MonitoredTxStatusSafe && - txStatus != types.MonitoredTxStatusFinalized { - txPending++ - } - } - s.mutexEthTx.Unlock() - - // Save updated sequences transactions - err := s.saveSentSequencesTransactions(ctx) - if err != nil { - log.Errorf("error saving tx sequence, error: %v", err) - return 0, err - } - - log.Infof("%d tx results synchronized (%d in pending state)", txSync, txPending) - return txPending, nil -} - -// syncAllEthTxResults syncs all tx results from L1 -func (s *SequenceSender) syncAllEthTxResults(ctx context.Context) error { - // Get all results - results, err := s.ethTxManager.ResultsByStatus(ctx, nil) - if err != nil { - log.Warnf("error getting results for all tx: %v", err) - return err - } - - // Check and update tx status - numResults := len(results) - s.mutexEthTx.Lock() - for _, result := range results { - txSequence, exists := s.ethTransactions[result.ID] - if !exists { - log.Debugf("transaction %v missing in memory structure. Adding it", result.ID) - // No info: from/to batch and the sent timestamp - s.ethTransactions[result.ID] = ðTxData{ - SentL1Timestamp: time.Time{}, - StatusTimestamp: time.Now(), - OnMonitor: true, - Status: "*missing", - } - txSequence = s.ethTransactions[result.ID] - } - - s.updateEthTxResult(txSequence, result) - } - s.mutexEthTx.Unlock() - - // Save updated sequences transactions - err = s.saveSentSequencesTransactions(ctx) - if err != nil { - log.Errorf("error saving tx sequence, error: %v", err) - } - - log.Infof("%d tx results synchronized", numResults) - return nil -} - -// copyTxData copies tx data in the internal structure -func (s *SequenceSender) copyTxData( - txHash common.Hash, txData []byte, txsResults map[common.Hash]types.TxResult, -) { - s.ethTxData[txHash] = make([]byte, len(txData)) - copy(s.ethTxData[txHash], txData) - - s.ethTransactions[txHash].Txs = make(map[common.Hash]ethTxAdditionalData, 0) - for hash, result := range txsResults { - var gasPrice *big.Int - if result.Tx != nil { - gasPrice = result.Tx.GasPrice() - } - - add := ethTxAdditionalData{ - GasPrice: gasPrice, - RevertMessage: result.RevertMessage, - } - s.ethTransactions[txHash].Txs[hash] = add - } -} - -// updateEthTxResult handles updating transaction state -func (s *SequenceSender) updateEthTxResult(txData *ethTxData, txResult types.MonitoredTxResult) { - if txData.Status != txResult.Status.String() { - log.Infof("update transaction %v to state %s", txResult.ID, txResult.Status.String()) - txData.StatusTimestamp = time.Now() - stTrans := txData.StatusTimestamp.Format("2006-01-02T15:04:05.000-07:00") + - ", " + txData.Status + ", " + txResult.Status.String() - txData.Status = txResult.Status.String() - txData.StateHistory = append(txData.StateHistory, stTrans) - - // Manage according to the state - statusConsolidated := txData.Status == types.MonitoredTxStatusSafe.String() || - txData.Status == types.MonitoredTxStatusFinalized.String() - if txData.Status == types.MonitoredTxStatusFailed.String() { - s.logFatalf("transaction %v result failed!") - } else if statusConsolidated && txData.ToBatch >= atomic.LoadUint64(&s.latestVirtualBatchNumber) { - s.latestVirtualTime = txData.StatusTimestamp - } - } - - // Update info received from L1 - txData.Nonce = txResult.Nonce - if txResult.To != nil { - txData.To = *txResult.To - } - if txResult.MinedAtBlockNumber != nil { - txData.MinedAtBlock = *txResult.MinedAtBlockNumber - } - s.copyTxData(txResult.ID, txResult.Data, txResult.Txs) -} - -// getResultAndUpdateEthTx updates the tx status from the ethTxManager -func (s *SequenceSender) getResultAndUpdateEthTx(ctx context.Context, txHash common.Hash) error { - txData, exists := s.ethTransactions[txHash] - if !exists { - s.logger.Errorf("transaction %v not found in memory", txHash) - return errors.New("transaction not found in memory structure") - } - - txResult, err := s.ethTxManager.Result(ctx, txHash) - switch { - case errors.Is(err, ethtxmanager.ErrNotFound): - s.logger.Infof("transaction %v does not exist in ethtxmanager. Marking it", txHash) - txData.OnMonitor = false - // Resend tx - errSend := s.sendTx(ctx, true, &txHash, nil, 0, 0, nil, txData.Gas) - if errSend == nil { - txData.OnMonitor = false - } - - case err != nil: - s.logger.Errorf("error getting result for tx %v: %v", txHash, err) - return err - - default: - s.updateEthTxResult(txData, txResult) - } - - return nil -} - -// loadSentSequencesTransactions loads the file into the memory structure -func (s *SequenceSender) loadSentSequencesTransactions() error { - // Check if file exists - if _, err := os.Stat(s.cfg.SequencesTxFileName); os.IsNotExist(err) { - log.Infof("file not found %s: %v", s.cfg.SequencesTxFileName, err) - return nil - } else if err != nil { - log.Errorf("error opening file %s: %v", s.cfg.SequencesTxFileName, err) - return err - } - - // Read file - data, err := os.ReadFile(s.cfg.SequencesTxFileName) - if err != nil { - log.Errorf("error reading file %s: %v", s.cfg.SequencesTxFileName, err) - return err - } - - // Restore memory structure - s.mutexEthTx.Lock() - err = json.Unmarshal(data, &s.ethTransactions) - s.mutexEthTx.Unlock() - if err != nil { - log.Errorf("error decoding data from %s: %v", s.cfg.SequencesTxFileName, err) - return err - } - - return nil -} - -// saveSentSequencesTransactions saves memory structure into persistent file -func (s *SequenceSender) saveSentSequencesTransactions(ctx context.Context) error { - var err error - - // Purge tx - s.purgeEthTx(ctx) - - // Create file - fileName := s.cfg.SequencesTxFileName[0:strings.IndexRune(s.cfg.SequencesTxFileName, '.')] + ".tmp" - s.sequencesTxFile, err = os.Create(fileName) - if err != nil { - log.Errorf("error creating file %s: %v", fileName, err) - return err - } - defer s.sequencesTxFile.Close() - - // Write data JSON encoded - encoder := json.NewEncoder(s.sequencesTxFile) - encoder.SetIndent("", " ") - s.mutexEthTx.Lock() - err = encoder.Encode(s.ethTransactions) - s.mutexEthTx.Unlock() - if err != nil { - log.Errorf("error writing file %s: %v", fileName, err) - return err - } - - // Rename the new file - err = os.Rename(fileName, s.cfg.SequencesTxFileName) - if err != nil { - log.Errorf("error renaming file %s to %s: %v", fileName, s.cfg.SequencesTxFileName, err) - return err - } - - return nil -} - -// IsStopped returns true in case seqSendingStopped is set to 1, otherwise false -func (s *SequenceSender) IsStopped() bool { - return atomic.LoadUint32(&s.seqSendingStopped) == 1 -} diff --git a/sequencesender/ethtx_test.go b/sequencesender/ethtx_test.go deleted file mode 100644 index e890a4b9..00000000 --- a/sequencesender/ethtx_test.go +++ /dev/null @@ -1,786 +0,0 @@ -package sequencesender - -import ( - "context" - "encoding/json" - "errors" - "math/big" - "os" - "testing" - "time" - - "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" - ethtxtypes "github.com/0xPolygon/zkevm-ethtx-manager/types" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/mocks" - "github.com/ethereum/go-ethereum/common" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func Test_sendTx(t *testing.T) { - t.Parallel() - - addr := common.BytesToAddress([]byte{1, 2, 3}) - hash := common.HexToHash("0x1") - oldHash := common.HexToHash("0x2") - - type args struct { - resend bool - txOldHash *common.Hash - to *common.Address - fromBatch uint64 - toBatch uint64 - data []byte - gas uint64 - } - - type state struct { - currentNonce uint64 - ethTxData map[common.Hash][]byte - ethTransactions map[common.Hash]*ethTxData - latestSentToL1Batch uint64 - } - - tests := []struct { - name string - args args - state state - getEthTxManager func(t *testing.T) *mocks.EthTxManagerMock - expectedState state - expectedErr error - }{ - { - name: "successfully sent", - args: args{ - resend: false, - txOldHash: nil, - to: &addr, - fromBatch: 1, - toBatch: 2, - data: []byte("test"), - gas: 100500, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("AddWithGas", mock.Anything, &addr, big.NewInt(0), []byte("test"), uint64(0), mock.Anything, uint64(100500)).Return(hash, nil) - mngr.On("Result", mock.Anything, hash).Return(ethtxtypes.MonitoredTxResult{ - ID: hash, - Data: []byte{1, 2, 3}, - }, nil) - return mngr - }, - state: state{ - currentNonce: 10, - ethTxData: map[common.Hash][]byte{ - hash: {}, - }, - ethTransactions: map[common.Hash]*ethTxData{ - hash: {}, - }, - latestSentToL1Batch: 0, - }, - expectedState: state{ - currentNonce: 11, - ethTxData: map[common.Hash][]byte{ - hash: {1, 2, 3}, - }, - ethTransactions: map[common.Hash]*ethTxData{ - hash: { - SentL1Timestamp: now, - StatusTimestamp: now, - FromBatch: 1, - ToBatch: 2, - OnMonitor: true, - To: addr, - Gas: 100500, - StateHistory: []string{now.Format("2006-01-02T15:04:05.000-07:00") + ", *new, "}, - Txs: map[common.Hash]ethTxAdditionalData{}, - }, - }, - latestSentToL1Batch: 2, - }, - expectedErr: nil, - }, - { - name: "successfully sent with resend", - args: args{ - resend: true, - txOldHash: &oldHash, - gas: 100500, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("AddWithGas", mock.Anything, &addr, big.NewInt(0), []byte(nil), uint64(0), mock.Anything, uint64(100500)).Return(hash, nil) - mngr.On("Result", mock.Anything, hash).Return(ethtxtypes.MonitoredTxResult{ - ID: hash, - Data: []byte{1, 2, 3}, - }, nil) - return mngr - }, - state: state{ - ethTxData: map[common.Hash][]byte{ - hash: []byte("test"), - }, - ethTransactions: map[common.Hash]*ethTxData{ - oldHash: { - To: addr, - Nonce: 10, - FromBatch: 1, - ToBatch: 2, - }, - }, - latestSentToL1Batch: 0, - }, - expectedState: state{ - currentNonce: 0, - ethTxData: map[common.Hash][]byte{ - hash: {1, 2, 3}, - }, - ethTransactions: map[common.Hash]*ethTxData{ - hash: { - SentL1Timestamp: now, - StatusTimestamp: now, - FromBatch: 1, - ToBatch: 2, - OnMonitor: true, - To: addr, - Gas: 100500, - StateHistory: []string{now.Format("2006-01-02T15:04:05.000-07:00") + ", *new, "}, - Txs: map[common.Hash]ethTxAdditionalData{}, - }, - }, - latestSentToL1Batch: 0, - }, - expectedErr: nil, - }, - { - name: "add with gas returns error", - args: args{ - resend: true, - txOldHash: &oldHash, - gas: 100500, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("AddWithGas", mock.Anything, &addr, big.NewInt(0), []byte(nil), uint64(0), mock.Anything, uint64(100500)).Return(nil, errors.New("failed to add with gas")) - return mngr - }, - state: state{ - ethTxData: map[common.Hash][]byte{ - hash: []byte("test"), - }, - ethTransactions: map[common.Hash]*ethTxData{ - oldHash: { - To: addr, - Nonce: 10, - FromBatch: 1, - ToBatch: 2, - }, - }, - latestSentToL1Batch: 0, - }, - expectedErr: errors.New("failed to add with gas"), - }, - { - name: "empty old hash", - args: args{ - resend: true, - gas: 100500, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - return mngr - }, - state: state{ - ethTxData: map[common.Hash][]byte{ - hash: []byte("test"), - }, - ethTransactions: map[common.Hash]*ethTxData{ - oldHash: { - To: addr, - Nonce: 10, - FromBatch: 1, - ToBatch: 2, - }, - }, - latestSentToL1Batch: 0, - }, - expectedErr: errors.New("resend tx with nil hash monitor id"), - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - tmpFile, err := os.CreateTemp(os.TempDir(), tt.name+".tmp") - require.NoError(t, err) - defer os.RemoveAll(tmpFile.Name() + ".tmp") - - ss := SequenceSender{ - ethTxData: tt.state.ethTxData, - ethTransactions: tt.state.ethTransactions, - ethTxManager: tt.getEthTxManager(t), - latestSentToL1Batch: tt.state.latestSentToL1Batch, - cfg: Config{ - SequencesTxFileName: tmpFile.Name() + ".tmp", - }, - logger: log.GetDefaultLogger(), - } - - err = ss.sendTx(context.Background(), tt.args.resend, tt.args.txOldHash, tt.args.to, tt.args.fromBatch, tt.args.toBatch, tt.args.data, tt.args.gas) - if tt.expectedErr != nil { - require.Equal(t, tt.expectedErr, err) - } else { - require.NoError(t, err) - require.Equal(t, tt.expectedState.ethTxData, ss.ethTxData) - require.Equal(t, len(tt.expectedState.ethTransactions), len(ss.ethTransactions)) - for k, v := range tt.expectedState.ethTransactions { - require.Equal(t, v.Gas, ss.ethTransactions[k].Gas) - require.Equal(t, v.To, ss.ethTransactions[k].To) - require.Equal(t, v.Nonce, ss.ethTransactions[k].Nonce) - require.Equal(t, v.Status, ss.ethTransactions[k].Status) - require.Equal(t, v.FromBatch, ss.ethTransactions[k].FromBatch) - require.Equal(t, v.ToBatch, ss.ethTransactions[k].ToBatch) - require.Equal(t, v.OnMonitor, ss.ethTransactions[k].OnMonitor) - } - require.Equal(t, tt.expectedState.latestSentToL1Batch, ss.latestSentToL1Batch) - } - }) - } -} - -func Test_purgeEthTx(t *testing.T) { - t.Parallel() - - firstTimestamp := time.Now().Add(-time.Hour) - secondTimestamp := time.Now().Add(time.Hour) - - tests := []struct { - name string - seqSendingStopped uint32 - ethTransactions map[common.Hash]*ethTxData - ethTxData map[common.Hash][]byte - getEthTxManager func(t *testing.T) *mocks.EthTxManagerMock - sequenceList []uint64 - expectedEthTransactions map[common.Hash]*ethTxData - expectedEthTxData map[common.Hash][]byte - }{ - { - name: "sequence sender stopped", - seqSendingStopped: 1, - ethTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): { - StatusTimestamp: firstTimestamp, - OnMonitor: true, - Status: ethtxtypes.MonitoredTxStatusFinalized.String(), - }, - }, - ethTxData: map[common.Hash][]byte{ - common.HexToHash("0x1"): {1, 2, 3}, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - return mocks.NewEthTxManagerMock(t) - }, - sequenceList: []uint64{1, 2}, - expectedEthTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): { - StatusTimestamp: firstTimestamp, - OnMonitor: true, - Status: ethtxtypes.MonitoredTxStatusFinalized.String(), - }, - }, - expectedEthTxData: map[common.Hash][]byte{ - common.HexToHash("0x1"): {1, 2, 3}, - }, - }, - { - name: "transactions purged", - ethTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): { - StatusTimestamp: firstTimestamp, - OnMonitor: true, - Status: ethtxtypes.MonitoredTxStatusFinalized.String(), - }, - common.HexToHash("0x2"): { - StatusTimestamp: secondTimestamp, - }, - }, - ethTxData: map[common.Hash][]byte{ - common.HexToHash("0x1"): {1, 2, 3}, - common.HexToHash("0x2"): {4, 5, 6}, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("Remove", mock.Anything, common.HexToHash("0x1")).Return(nil) - return mngr - }, - sequenceList: []uint64{1, 2}, - expectedEthTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x2"): { - StatusTimestamp: secondTimestamp, - }, - }, - expectedEthTxData: map[common.Hash][]byte{ - common.HexToHash("0x2"): {4, 5, 6}, - }, - }, - { - name: "removed with error", - ethTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): { - StatusTimestamp: firstTimestamp, - OnMonitor: true, - Status: ethtxtypes.MonitoredTxStatusFinalized.String(), - }, - common.HexToHash("0x2"): { - StatusTimestamp: secondTimestamp, - }, - }, - ethTxData: map[common.Hash][]byte{ - common.HexToHash("0x1"): {1, 2, 3}, - common.HexToHash("0x2"): {4, 5, 6}, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("Remove", mock.Anything, common.HexToHash("0x1")).Return(errors.New("test err")) - return mngr - }, - sequenceList: []uint64{1, 2}, - expectedEthTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x2"): { - StatusTimestamp: secondTimestamp, - }, - }, - expectedEthTxData: map[common.Hash][]byte{ - common.HexToHash("0x2"): {4, 5, 6}, - }, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - mngr := tt.getEthTxManager(t) - ss := SequenceSender{ - seqSendingStopped: tt.seqSendingStopped, - ethTransactions: tt.ethTransactions, - ethTxData: tt.ethTxData, - ethTxManager: mngr, - logger: log.GetDefaultLogger(), - } - - ss.purgeEthTx(context.Background()) - - mngr.AssertExpectations(t) - require.Equal(t, tt.expectedEthTransactions, ss.ethTransactions) - require.Equal(t, tt.expectedEthTxData, ss.ethTxData) - }) - } -} - -func Test_syncEthTxResults(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - ethTransactions map[common.Hash]*ethTxData - getEthTxManager func(t *testing.T) *mocks.EthTxManagerMock - - expectErr error - expectPendingTxs uint64 - }{ - { - name: "successfully synced", - ethTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): { - StatusTimestamp: time.Now(), - OnMonitor: true, - Status: ethtxtypes.MonitoredTxStatusCreated.String(), - }, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("Result", mock.Anything, common.HexToHash("0x1")).Return(ethtxtypes.MonitoredTxResult{ - ID: common.HexToHash("0x1"), - Data: []byte{1, 2, 3}, - }, nil) - return mngr - }, - expectPendingTxs: 1, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - tmpFile, err := os.CreateTemp(os.TempDir(), tt.name) - require.NoError(t, err) - - mngr := tt.getEthTxManager(t) - ss := SequenceSender{ - ethTransactions: tt.ethTransactions, - ethTxManager: mngr, - ethTxData: make(map[common.Hash][]byte), - cfg: Config{ - SequencesTxFileName: tmpFile.Name() + ".tmp", - }, - logger: log.GetDefaultLogger(), - } - - pendingTxs, err := ss.syncEthTxResults(context.Background()) - if tt.expectErr != nil { - require.Equal(t, tt.expectErr, err) - } else { - require.NoError(t, err) - require.Equal(t, tt.expectPendingTxs, pendingTxs) - } - - mngr.AssertExpectations(t) - - err = os.RemoveAll(tmpFile.Name() + ".tmp") - require.NoError(t, err) - }) - } -} - -func Test_syncAllEthTxResults(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - ethTransactions map[common.Hash]*ethTxData - getEthTxManager func(t *testing.T) *mocks.EthTxManagerMock - - expectErr error - expectPendingTxs uint64 - }{ - { - name: "successfully synced", - ethTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): { - StatusTimestamp: time.Now(), - OnMonitor: true, - Status: ethtxtypes.MonitoredTxStatusCreated.String(), - }, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("ResultsByStatus", mock.Anything, []ethtxtypes.MonitoredTxStatus(nil)).Return([]ethtxtypes.MonitoredTxResult{ - { - ID: common.HexToHash("0x1"), - Data: []byte{1, 2, 3}, - }, - }, nil) - return mngr - }, - expectPendingTxs: 1, - }, - { - name: "successfully synced with missing tx", - ethTransactions: map[common.Hash]*ethTxData{}, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("ResultsByStatus", mock.Anything, []ethtxtypes.MonitoredTxStatus(nil)).Return([]ethtxtypes.MonitoredTxResult{ - { - ID: common.HexToHash("0x1"), - Data: []byte{1, 2, 3}, - }, - }, nil) - return mngr - }, - expectPendingTxs: 1, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - tmpFile, err := os.CreateTemp(os.TempDir(), tt.name) - require.NoError(t, err) - - mngr := tt.getEthTxManager(t) - ss := SequenceSender{ - ethTransactions: tt.ethTransactions, - ethTxManager: mngr, - ethTxData: make(map[common.Hash][]byte), - cfg: Config{ - SequencesTxFileName: tmpFile.Name() + ".tmp", - }, - logger: log.GetDefaultLogger(), - } - - err = ss.syncAllEthTxResults(context.Background()) - if tt.expectErr != nil { - require.Equal(t, tt.expectErr, err) - } else { - require.NoError(t, err) - } - - mngr.AssertExpectations(t) - - err = os.RemoveAll(tmpFile.Name() + ".tmp") - require.NoError(t, err) - }) - } -} - -func Test_copyTxData(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - txHash common.Hash - txData []byte - txsResults map[common.Hash]ethtxtypes.TxResult - ethTxData map[common.Hash][]byte - ethTransactions map[common.Hash]*ethTxData - expectedRthTxData map[common.Hash][]byte - expectedEthTransactions map[common.Hash]*ethTxData - }{ - { - name: "successfully copied", - txHash: common.HexToHash("0x1"), - txData: []byte{1, 2, 3}, - txsResults: map[common.Hash]ethtxtypes.TxResult{ - common.HexToHash("0x1"): {}, - }, - ethTxData: map[common.Hash][]byte{ - common.HexToHash("0x1"): {0, 2, 3}, - }, - ethTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): {}, - }, - expectedRthTxData: map[common.Hash][]byte{ - common.HexToHash("0x1"): {1, 2, 3}, - }, - expectedEthTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): { - Txs: map[common.Hash]ethTxAdditionalData{ - common.HexToHash("0x1"): {}, - }, - }, - }, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - s := SequenceSender{ - ethTxData: tt.ethTxData, - ethTransactions: tt.ethTransactions, - } - - s.copyTxData(tt.txHash, tt.txData, tt.txsResults) - require.Equal(t, tt.expectedRthTxData, s.ethTxData) - require.Equal(t, tt.expectedEthTransactions, s.ethTransactions) - }) - } -} - -func Test_getResultAndUpdateEthTx(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - hash common.Hash - ethTransactions map[common.Hash]*ethTxData - ethTxData map[common.Hash][]byte - getEthTxManager func(t *testing.T) *mocks.EthTxManagerMock - expectedErr error - }{ - { - name: "successfully updated", - hash: common.HexToHash("0x1"), - ethTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): {}, - }, - ethTxData: map[common.Hash][]byte{ - common.HexToHash("0x1"): {}, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("Result", mock.Anything, common.HexToHash("0x1")).Return(ethtxtypes.MonitoredTxResult{ - ID: common.HexToHash("0x1"), - Data: []byte{1, 2, 3}, - }, nil) - return mngr - }, - expectedErr: nil, - }, - { - name: "not found", - hash: common.HexToHash("0x1"), - ethTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): { - Gas: 100500, - }, - }, - ethTxData: map[common.Hash][]byte{ - common.HexToHash("0x1"): {}, - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("Result", mock.Anything, common.HexToHash("0x1")).Return(ethtxtypes.MonitoredTxResult{}, ethtxmanager.ErrNotFound) - mngr.On("AddWithGas", mock.Anything, mock.Anything, big.NewInt(0), mock.Anything, mock.Anything, mock.Anything, uint64(100500)).Return(common.Hash{}, nil) - mngr.On("Result", mock.Anything, common.Hash{}).Return(ethtxtypes.MonitoredTxResult{ - ID: common.HexToHash("0x1"), - Data: []byte{1, 2, 3}, - }, nil) - return mngr - }, - expectedErr: nil, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - tmpFile, err := os.CreateTemp(os.TempDir(), tt.name+".tmp") - require.NoError(t, err) - defer os.RemoveAll(tmpFile.Name() + ".tmp") - - ss := SequenceSender{ - ethTransactions: tt.ethTransactions, - ethTxData: tt.ethTxData, - ethTxManager: tt.getEthTxManager(t), - cfg: Config{ - SequencesTxFileName: tmpFile.Name() + ".tmp", - }, - logger: log.GetDefaultLogger(), - } - - err = ss.getResultAndUpdateEthTx(context.Background(), tt.hash) - if tt.expectedErr != nil { - require.Equal(t, tt.expectedErr, err) - } else { - require.NoError(t, err) - } - }) - } -} - -func Test_loadSentSequencesTransactions(t *testing.T) { - t.Parallel() - - tx := ðTxData{ - FromBatch: 1, - ToBatch: 2, - OnMonitor: true, - To: common.BytesToAddress([]byte{1, 2, 3}), - Gas: 100500, - StateHistory: []string{"2021-09-01T15:04:05.000-07:00, *new, "}, - Txs: map[common.Hash]ethTxAdditionalData{}, - } - - tests := []struct { - name string - getFilename func(t *testing.T) string - expectEthTransactions map[common.Hash]*ethTxData - expectErr error - }{ - { - name: "successfully loaded", - getFilename: func(t *testing.T) string { - t.Helper() - - tmpFile, err := os.CreateTemp(os.TempDir(), "test") - require.NoError(t, err) - - ethTxDataBytes, err := json.Marshal(map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): tx, - }) - require.NoError(t, err) - - _, err = tmpFile.Write(ethTxDataBytes) - require.NoError(t, err) - - t.Cleanup(func() { - err := os.Remove(tmpFile.Name()) - require.NoError(t, err) - }) - - return tmpFile.Name() - }, - expectEthTransactions: map[common.Hash]*ethTxData{ - common.HexToHash("0x1"): tx, - }, - }, - { - name: "file does not exist", - getFilename: func(t *testing.T) string { - t.Helper() - - return "does not exist.tmp" - }, - expectEthTransactions: map[common.Hash]*ethTxData{}, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - s := SequenceSender{ - cfg: Config{ - SequencesTxFileName: tt.getFilename(t), - }, - ethTransactions: map[common.Hash]*ethTxData{}, - logger: log.GetDefaultLogger(), - } - - err := s.loadSentSequencesTransactions() - if tt.expectErr != nil { - require.Equal(t, tt.expectErr, err) - } else { - require.NoError(t, err) - require.Equal(t, tt.expectEthTransactions, s.ethTransactions) - } - }) - } -} diff --git a/sequencesender/mocks/mock_etherman.go b/sequencesender/mocks/mock_etherman.go deleted file mode 100644 index 298d96c3..00000000 --- a/sequencesender/mocks/mock_etherman.go +++ /dev/null @@ -1,271 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - big "math/big" - - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// EthermanMock is an autogenerated mock type for the Etherman type -type EthermanMock struct { - mock.Mock -} - -type EthermanMock_Expecter struct { - mock *mock.Mock -} - -func (_m *EthermanMock) EXPECT() *EthermanMock_Expecter { - return &EthermanMock_Expecter{mock: &_m.Mock} -} - -// CurrentNonce provides a mock function with given fields: ctx, address -func (_m *EthermanMock) CurrentNonce(ctx context.Context, address common.Address) (uint64, error) { - ret := _m.Called(ctx, address) - - if len(ret) == 0 { - panic("no return value specified for CurrentNonce") - } - - var r0 uint64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, common.Address) (uint64, error)); ok { - return rf(ctx, address) - } - if rf, ok := ret.Get(0).(func(context.Context, common.Address) uint64); ok { - r0 = rf(ctx, address) - } else { - r0 = ret.Get(0).(uint64) - } - - if rf, ok := ret.Get(1).(func(context.Context, common.Address) error); ok { - r1 = rf(ctx, address) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthermanMock_CurrentNonce_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CurrentNonce' -type EthermanMock_CurrentNonce_Call struct { - *mock.Call -} - -// CurrentNonce is a helper method to define mock.On call -// - ctx context.Context -// - address common.Address -func (_e *EthermanMock_Expecter) CurrentNonce(ctx interface{}, address interface{}) *EthermanMock_CurrentNonce_Call { - return &EthermanMock_CurrentNonce_Call{Call: _e.mock.On("CurrentNonce", ctx, address)} -} - -func (_c *EthermanMock_CurrentNonce_Call) Run(run func(ctx context.Context, address common.Address)) *EthermanMock_CurrentNonce_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(common.Address)) - }) - return _c -} - -func (_c *EthermanMock_CurrentNonce_Call) Return(_a0 uint64, _a1 error) *EthermanMock_CurrentNonce_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthermanMock_CurrentNonce_Call) RunAndReturn(run func(context.Context, common.Address) (uint64, error)) *EthermanMock_CurrentNonce_Call { - _c.Call.Return(run) - return _c -} - -// EstimateGas provides a mock function with given fields: ctx, from, to, value, data -func (_m *EthermanMock) EstimateGas(ctx context.Context, from common.Address, to *common.Address, value *big.Int, data []byte) (uint64, error) { - ret := _m.Called(ctx, from, to, value, data) - - if len(ret) == 0 { - panic("no return value specified for EstimateGas") - } - - var r0 uint64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, common.Address, *common.Address, *big.Int, []byte) (uint64, error)); ok { - return rf(ctx, from, to, value, data) - } - if rf, ok := ret.Get(0).(func(context.Context, common.Address, *common.Address, *big.Int, []byte) uint64); ok { - r0 = rf(ctx, from, to, value, data) - } else { - r0 = ret.Get(0).(uint64) - } - - if rf, ok := ret.Get(1).(func(context.Context, common.Address, *common.Address, *big.Int, []byte) error); ok { - r1 = rf(ctx, from, to, value, data) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthermanMock_EstimateGas_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'EstimateGas' -type EthermanMock_EstimateGas_Call struct { - *mock.Call -} - -// EstimateGas is a helper method to define mock.On call -// - ctx context.Context -// - from common.Address -// - to *common.Address -// - value *big.Int -// - data []byte -func (_e *EthermanMock_Expecter) EstimateGas(ctx interface{}, from interface{}, to interface{}, value interface{}, data interface{}) *EthermanMock_EstimateGas_Call { - return &EthermanMock_EstimateGas_Call{Call: _e.mock.On("EstimateGas", ctx, from, to, value, data)} -} - -func (_c *EthermanMock_EstimateGas_Call) Run(run func(ctx context.Context, from common.Address, to *common.Address, value *big.Int, data []byte)) *EthermanMock_EstimateGas_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(common.Address), args[2].(*common.Address), args[3].(*big.Int), args[4].([]byte)) - }) - return _c -} - -func (_c *EthermanMock_EstimateGas_Call) Return(_a0 uint64, _a1 error) *EthermanMock_EstimateGas_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthermanMock_EstimateGas_Call) RunAndReturn(run func(context.Context, common.Address, *common.Address, *big.Int, []byte) (uint64, error)) *EthermanMock_EstimateGas_Call { - _c.Call.Return(run) - return _c -} - -// GetLatestBatchNumber provides a mock function with no fields -func (_m *EthermanMock) GetLatestBatchNumber() (uint64, error) { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GetLatestBatchNumber") - } - - var r0 uint64 - var r1 error - if rf, ok := ret.Get(0).(func() (uint64, error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() uint64); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(uint64) - } - - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthermanMock_GetLatestBatchNumber_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLatestBatchNumber' -type EthermanMock_GetLatestBatchNumber_Call struct { - *mock.Call -} - -// GetLatestBatchNumber is a helper method to define mock.On call -func (_e *EthermanMock_Expecter) GetLatestBatchNumber() *EthermanMock_GetLatestBatchNumber_Call { - return &EthermanMock_GetLatestBatchNumber_Call{Call: _e.mock.On("GetLatestBatchNumber")} -} - -func (_c *EthermanMock_GetLatestBatchNumber_Call) Run(run func()) *EthermanMock_GetLatestBatchNumber_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *EthermanMock_GetLatestBatchNumber_Call) Return(_a0 uint64, _a1 error) *EthermanMock_GetLatestBatchNumber_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthermanMock_GetLatestBatchNumber_Call) RunAndReturn(run func() (uint64, error)) *EthermanMock_GetLatestBatchNumber_Call { - _c.Call.Return(run) - return _c -} - -// GetLatestBlockHeader provides a mock function with given fields: ctx -func (_m *EthermanMock) GetLatestBlockHeader(ctx context.Context) (*types.Header, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for GetLatestBlockHeader") - } - - var r0 *types.Header - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*types.Header, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *types.Header); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Header) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthermanMock_GetLatestBlockHeader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLatestBlockHeader' -type EthermanMock_GetLatestBlockHeader_Call struct { - *mock.Call -} - -// GetLatestBlockHeader is a helper method to define mock.On call -// - ctx context.Context -func (_e *EthermanMock_Expecter) GetLatestBlockHeader(ctx interface{}) *EthermanMock_GetLatestBlockHeader_Call { - return &EthermanMock_GetLatestBlockHeader_Call{Call: _e.mock.On("GetLatestBlockHeader", ctx)} -} - -func (_c *EthermanMock_GetLatestBlockHeader_Call) Run(run func(ctx context.Context)) *EthermanMock_GetLatestBlockHeader_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *EthermanMock_GetLatestBlockHeader_Call) Return(_a0 *types.Header, _a1 error) *EthermanMock_GetLatestBlockHeader_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthermanMock_GetLatestBlockHeader_Call) RunAndReturn(run func(context.Context) (*types.Header, error)) *EthermanMock_GetLatestBlockHeader_Call { - _c.Call.Return(run) - return _c -} - -// NewEthermanMock creates a new instance of EthermanMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewEthermanMock(t interface { - mock.TestingT - Cleanup(func()) -}) *EthermanMock { - mock := &EthermanMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/mocks/mock_ethtxmanager.go b/sequencesender/mocks/mock_ethtxmanager.go deleted file mode 100644 index b8a58d0d..00000000 --- a/sequencesender/mocks/mock_ethtxmanager.go +++ /dev/null @@ -1,302 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - big "math/big" - - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - types "github.com/ethereum/go-ethereum/core/types" - - zkevm_ethtx_managertypes "github.com/0xPolygon/zkevm-ethtx-manager/types" -) - -// EthTxManagerMock is an autogenerated mock type for the EthTxManager type -type EthTxManagerMock struct { - mock.Mock -} - -type EthTxManagerMock_Expecter struct { - mock *mock.Mock -} - -func (_m *EthTxManagerMock) EXPECT() *EthTxManagerMock_Expecter { - return &EthTxManagerMock_Expecter{mock: &_m.Mock} -} - -// AddWithGas provides a mock function with given fields: ctx, to, value, data, gasOffset, sidecar, gas -func (_m *EthTxManagerMock) AddWithGas(ctx context.Context, to *common.Address, value *big.Int, data []byte, gasOffset uint64, sidecar *types.BlobTxSidecar, gas uint64) (common.Hash, error) { - ret := _m.Called(ctx, to, value, data, gasOffset, sidecar, gas) - - if len(ret) == 0 { - panic("no return value specified for AddWithGas") - } - - var r0 common.Hash - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar, uint64) (common.Hash, error)); ok { - return rf(ctx, to, value, data, gasOffset, sidecar, gas) - } - if rf, ok := ret.Get(0).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar, uint64) common.Hash); ok { - r0 = rf(ctx, to, value, data, gasOffset, sidecar, gas) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(common.Hash) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar, uint64) error); ok { - r1 = rf(ctx, to, value, data, gasOffset, sidecar, gas) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthTxManagerMock_AddWithGas_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddWithGas' -type EthTxManagerMock_AddWithGas_Call struct { - *mock.Call -} - -// AddWithGas is a helper method to define mock.On call -// - ctx context.Context -// - to *common.Address -// - value *big.Int -// - data []byte -// - gasOffset uint64 -// - sidecar *types.BlobTxSidecar -// - gas uint64 -func (_e *EthTxManagerMock_Expecter) AddWithGas(ctx interface{}, to interface{}, value interface{}, data interface{}, gasOffset interface{}, sidecar interface{}, gas interface{}) *EthTxManagerMock_AddWithGas_Call { - return &EthTxManagerMock_AddWithGas_Call{Call: _e.mock.On("AddWithGas", ctx, to, value, data, gasOffset, sidecar, gas)} -} - -func (_c *EthTxManagerMock_AddWithGas_Call) Run(run func(ctx context.Context, to *common.Address, value *big.Int, data []byte, gasOffset uint64, sidecar *types.BlobTxSidecar, gas uint64)) *EthTxManagerMock_AddWithGas_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*common.Address), args[2].(*big.Int), args[3].([]byte), args[4].(uint64), args[5].(*types.BlobTxSidecar), args[6].(uint64)) - }) - return _c -} - -func (_c *EthTxManagerMock_AddWithGas_Call) Return(_a0 common.Hash, _a1 error) *EthTxManagerMock_AddWithGas_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthTxManagerMock_AddWithGas_Call) RunAndReturn(run func(context.Context, *common.Address, *big.Int, []byte, uint64, *types.BlobTxSidecar, uint64) (common.Hash, error)) *EthTxManagerMock_AddWithGas_Call { - _c.Call.Return(run) - return _c -} - -// Remove provides a mock function with given fields: ctx, hash -func (_m *EthTxManagerMock) Remove(ctx context.Context, hash common.Hash) error { - ret := _m.Called(ctx, hash) - - if len(ret) == 0 { - panic("no return value specified for Remove") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, common.Hash) error); ok { - r0 = rf(ctx, hash) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// EthTxManagerMock_Remove_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Remove' -type EthTxManagerMock_Remove_Call struct { - *mock.Call -} - -// Remove is a helper method to define mock.On call -// - ctx context.Context -// - hash common.Hash -func (_e *EthTxManagerMock_Expecter) Remove(ctx interface{}, hash interface{}) *EthTxManagerMock_Remove_Call { - return &EthTxManagerMock_Remove_Call{Call: _e.mock.On("Remove", ctx, hash)} -} - -func (_c *EthTxManagerMock_Remove_Call) Run(run func(ctx context.Context, hash common.Hash)) *EthTxManagerMock_Remove_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(common.Hash)) - }) - return _c -} - -func (_c *EthTxManagerMock_Remove_Call) Return(_a0 error) *EthTxManagerMock_Remove_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *EthTxManagerMock_Remove_Call) RunAndReturn(run func(context.Context, common.Hash) error) *EthTxManagerMock_Remove_Call { - _c.Call.Return(run) - return _c -} - -// Result provides a mock function with given fields: ctx, hash -func (_m *EthTxManagerMock) Result(ctx context.Context, hash common.Hash) (zkevm_ethtx_managertypes.MonitoredTxResult, error) { - ret := _m.Called(ctx, hash) - - if len(ret) == 0 { - panic("no return value specified for Result") - } - - var r0 zkevm_ethtx_managertypes.MonitoredTxResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, common.Hash) (zkevm_ethtx_managertypes.MonitoredTxResult, error)); ok { - return rf(ctx, hash) - } - if rf, ok := ret.Get(0).(func(context.Context, common.Hash) zkevm_ethtx_managertypes.MonitoredTxResult); ok { - r0 = rf(ctx, hash) - } else { - r0 = ret.Get(0).(zkevm_ethtx_managertypes.MonitoredTxResult) - } - - if rf, ok := ret.Get(1).(func(context.Context, common.Hash) error); ok { - r1 = rf(ctx, hash) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthTxManagerMock_Result_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Result' -type EthTxManagerMock_Result_Call struct { - *mock.Call -} - -// Result is a helper method to define mock.On call -// - ctx context.Context -// - hash common.Hash -func (_e *EthTxManagerMock_Expecter) Result(ctx interface{}, hash interface{}) *EthTxManagerMock_Result_Call { - return &EthTxManagerMock_Result_Call{Call: _e.mock.On("Result", ctx, hash)} -} - -func (_c *EthTxManagerMock_Result_Call) Run(run func(ctx context.Context, hash common.Hash)) *EthTxManagerMock_Result_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(common.Hash)) - }) - return _c -} - -func (_c *EthTxManagerMock_Result_Call) Return(_a0 zkevm_ethtx_managertypes.MonitoredTxResult, _a1 error) *EthTxManagerMock_Result_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthTxManagerMock_Result_Call) RunAndReturn(run func(context.Context, common.Hash) (zkevm_ethtx_managertypes.MonitoredTxResult, error)) *EthTxManagerMock_Result_Call { - _c.Call.Return(run) - return _c -} - -// ResultsByStatus provides a mock function with given fields: ctx, status -func (_m *EthTxManagerMock) ResultsByStatus(ctx context.Context, status []zkevm_ethtx_managertypes.MonitoredTxStatus) ([]zkevm_ethtx_managertypes.MonitoredTxResult, error) { - ret := _m.Called(ctx, status) - - if len(ret) == 0 { - panic("no return value specified for ResultsByStatus") - } - - var r0 []zkevm_ethtx_managertypes.MonitoredTxResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []zkevm_ethtx_managertypes.MonitoredTxStatus) ([]zkevm_ethtx_managertypes.MonitoredTxResult, error)); ok { - return rf(ctx, status) - } - if rf, ok := ret.Get(0).(func(context.Context, []zkevm_ethtx_managertypes.MonitoredTxStatus) []zkevm_ethtx_managertypes.MonitoredTxResult); ok { - r0 = rf(ctx, status) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]zkevm_ethtx_managertypes.MonitoredTxResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []zkevm_ethtx_managertypes.MonitoredTxStatus) error); ok { - r1 = rf(ctx, status) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// EthTxManagerMock_ResultsByStatus_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ResultsByStatus' -type EthTxManagerMock_ResultsByStatus_Call struct { - *mock.Call -} - -// ResultsByStatus is a helper method to define mock.On call -// - ctx context.Context -// - status []zkevm_ethtx_managertypes.MonitoredTxStatus -func (_e *EthTxManagerMock_Expecter) ResultsByStatus(ctx interface{}, status interface{}) *EthTxManagerMock_ResultsByStatus_Call { - return &EthTxManagerMock_ResultsByStatus_Call{Call: _e.mock.On("ResultsByStatus", ctx, status)} -} - -func (_c *EthTxManagerMock_ResultsByStatus_Call) Run(run func(ctx context.Context, status []zkevm_ethtx_managertypes.MonitoredTxStatus)) *EthTxManagerMock_ResultsByStatus_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]zkevm_ethtx_managertypes.MonitoredTxStatus)) - }) - return _c -} - -func (_c *EthTxManagerMock_ResultsByStatus_Call) Return(_a0 []zkevm_ethtx_managertypes.MonitoredTxResult, _a1 error) *EthTxManagerMock_ResultsByStatus_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *EthTxManagerMock_ResultsByStatus_Call) RunAndReturn(run func(context.Context, []zkevm_ethtx_managertypes.MonitoredTxStatus) ([]zkevm_ethtx_managertypes.MonitoredTxResult, error)) *EthTxManagerMock_ResultsByStatus_Call { - _c.Call.Return(run) - return _c -} - -// Start provides a mock function with no fields -func (_m *EthTxManagerMock) Start() { - _m.Called() -} - -// EthTxManagerMock_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start' -type EthTxManagerMock_Start_Call struct { - *mock.Call -} - -// Start is a helper method to define mock.On call -func (_e *EthTxManagerMock_Expecter) Start() *EthTxManagerMock_Start_Call { - return &EthTxManagerMock_Start_Call{Call: _e.mock.On("Start")} -} - -func (_c *EthTxManagerMock_Start_Call) Run(run func()) *EthTxManagerMock_Start_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *EthTxManagerMock_Start_Call) Return() *EthTxManagerMock_Start_Call { - _c.Call.Return() - return _c -} - -func (_c *EthTxManagerMock_Start_Call) RunAndReturn(run func()) *EthTxManagerMock_Start_Call { - _c.Run(run) - return _c -} - -// NewEthTxManagerMock creates a new instance of EthTxManagerMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewEthTxManagerMock(t interface { - mock.TestingT - Cleanup(func()) -}) *EthTxManagerMock { - mock := &EthTxManagerMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/mocks/mock_rpc.go b/sequencesender/mocks/mock_rpc.go deleted file mode 100644 index 8dd60588..00000000 --- a/sequencesender/mocks/mock_rpc.go +++ /dev/null @@ -1,153 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - mock "github.com/stretchr/testify/mock" - - types "github.com/agglayer/aggkit/rpc/types" -) - -// RPCInterfaceMock is an autogenerated mock type for the RPCInterface type -type RPCInterfaceMock struct { - mock.Mock -} - -type RPCInterfaceMock_Expecter struct { - mock *mock.Mock -} - -func (_m *RPCInterfaceMock) EXPECT() *RPCInterfaceMock_Expecter { - return &RPCInterfaceMock_Expecter{mock: &_m.Mock} -} - -// GetBatch provides a mock function with given fields: batchNumber -func (_m *RPCInterfaceMock) GetBatch(batchNumber uint64) (*types.RPCBatch, error) { - ret := _m.Called(batchNumber) - - if len(ret) == 0 { - panic("no return value specified for GetBatch") - } - - var r0 *types.RPCBatch - var r1 error - if rf, ok := ret.Get(0).(func(uint64) (*types.RPCBatch, error)); ok { - return rf(batchNumber) - } - if rf, ok := ret.Get(0).(func(uint64) *types.RPCBatch); ok { - r0 = rf(batchNumber) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.RPCBatch) - } - } - - if rf, ok := ret.Get(1).(func(uint64) error); ok { - r1 = rf(batchNumber) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RPCInterfaceMock_GetBatch_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetBatch' -type RPCInterfaceMock_GetBatch_Call struct { - *mock.Call -} - -// GetBatch is a helper method to define mock.On call -// - batchNumber uint64 -func (_e *RPCInterfaceMock_Expecter) GetBatch(batchNumber interface{}) *RPCInterfaceMock_GetBatch_Call { - return &RPCInterfaceMock_GetBatch_Call{Call: _e.mock.On("GetBatch", batchNumber)} -} - -func (_c *RPCInterfaceMock_GetBatch_Call) Run(run func(batchNumber uint64)) *RPCInterfaceMock_GetBatch_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(uint64)) - }) - return _c -} - -func (_c *RPCInterfaceMock_GetBatch_Call) Return(_a0 *types.RPCBatch, _a1 error) *RPCInterfaceMock_GetBatch_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RPCInterfaceMock_GetBatch_Call) RunAndReturn(run func(uint64) (*types.RPCBatch, error)) *RPCInterfaceMock_GetBatch_Call { - _c.Call.Return(run) - return _c -} - -// GetWitness provides a mock function with given fields: batchNumber, fullWitness -func (_m *RPCInterfaceMock) GetWitness(batchNumber uint64, fullWitness bool) ([]byte, error) { - ret := _m.Called(batchNumber, fullWitness) - - if len(ret) == 0 { - panic("no return value specified for GetWitness") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(uint64, bool) ([]byte, error)); ok { - return rf(batchNumber, fullWitness) - } - if rf, ok := ret.Get(0).(func(uint64, bool) []byte); ok { - r0 = rf(batchNumber, fullWitness) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(uint64, bool) error); ok { - r1 = rf(batchNumber, fullWitness) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RPCInterfaceMock_GetWitness_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetWitness' -type RPCInterfaceMock_GetWitness_Call struct { - *mock.Call -} - -// GetWitness is a helper method to define mock.On call -// - batchNumber uint64 -// - fullWitness bool -func (_e *RPCInterfaceMock_Expecter) GetWitness(batchNumber interface{}, fullWitness interface{}) *RPCInterfaceMock_GetWitness_Call { - return &RPCInterfaceMock_GetWitness_Call{Call: _e.mock.On("GetWitness", batchNumber, fullWitness)} -} - -func (_c *RPCInterfaceMock_GetWitness_Call) Run(run func(batchNumber uint64, fullWitness bool)) *RPCInterfaceMock_GetWitness_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(uint64), args[1].(bool)) - }) - return _c -} - -func (_c *RPCInterfaceMock_GetWitness_Call) Return(_a0 []byte, _a1 error) *RPCInterfaceMock_GetWitness_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RPCInterfaceMock_GetWitness_Call) RunAndReturn(run func(uint64, bool) ([]byte, error)) *RPCInterfaceMock_GetWitness_Call { - _c.Call.Return(run) - return _c -} - -// NewRPCInterfaceMock creates a new instance of RPCInterfaceMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewRPCInterfaceMock(t interface { - mock.TestingT - Cleanup(func()) -}) *RPCInterfaceMock { - mock := &RPCInterfaceMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/mocks/mock_txbuilder.go b/sequencesender/mocks/mock_txbuilder.go deleted file mode 100644 index 5bb152b8..00000000 --- a/sequencesender/mocks/mock_txbuilder.go +++ /dev/null @@ -1,367 +0,0 @@ -// Code generated by mockery v2.40.1. DO NOT EDIT. - -package mocks - -import ( - context "context" - - common "github.com/ethereum/go-ethereum/common" - - datastream "github.com/agglayer/aggkit/state/datastream" - - mock "github.com/stretchr/testify/mock" - - seqsendertypes "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - - txbuilder "github.com/agglayer/aggkit/sequencesender/txbuilder" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// TxBuilderMock is an autogenerated mock type for the TxBuilder type -type TxBuilderMock struct { - mock.Mock -} - -type TxBuilderMock_Expecter struct { - mock *mock.Mock -} - -func (_m *TxBuilderMock) EXPECT() *TxBuilderMock_Expecter { - return &TxBuilderMock_Expecter{mock: &_m.Mock} -} - -// BuildSequenceBatchesTx provides a mock function with given fields: ctx, sequences -func (_m *TxBuilderMock) BuildSequenceBatchesTx(ctx context.Context, sequences seqsendertypes.Sequence) (*types.Transaction, error) { - ret := _m.Called(ctx, sequences) - - if len(ret) == 0 { - panic("no return value specified for BuildSequenceBatchesTx") - } - - var r0 *types.Transaction - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, seqsendertypes.Sequence) (*types.Transaction, error)); ok { - return rf(ctx, sequences) - } - if rf, ok := ret.Get(0).(func(context.Context, seqsendertypes.Sequence) *types.Transaction); ok { - r0 = rf(ctx, sequences) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Transaction) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, seqsendertypes.Sequence) error); ok { - r1 = rf(ctx, sequences) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// TxBuilderMock_BuildSequenceBatchesTx_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BuildSequenceBatchesTx' -type TxBuilderMock_BuildSequenceBatchesTx_Call struct { - *mock.Call -} - -// BuildSequenceBatchesTx is a helper method to define mock.On call -// - ctx context.Context -// - sequences seqsendertypes.Sequence -func (_e *TxBuilderMock_Expecter) BuildSequenceBatchesTx(ctx interface{}, sequences interface{}) *TxBuilderMock_BuildSequenceBatchesTx_Call { - return &TxBuilderMock_BuildSequenceBatchesTx_Call{Call: _e.mock.On("BuildSequenceBatchesTx", ctx, sequences)} -} - -func (_c *TxBuilderMock_BuildSequenceBatchesTx_Call) Run(run func(ctx context.Context, sequences seqsendertypes.Sequence)) *TxBuilderMock_BuildSequenceBatchesTx_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(seqsendertypes.Sequence)) - }) - return _c -} - -func (_c *TxBuilderMock_BuildSequenceBatchesTx_Call) Return(_a0 *types.Transaction, _a1 error) *TxBuilderMock_BuildSequenceBatchesTx_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *TxBuilderMock_BuildSequenceBatchesTx_Call) RunAndReturn(run func(context.Context, seqsendertypes.Sequence) (*types.Transaction, error)) *TxBuilderMock_BuildSequenceBatchesTx_Call { - _c.Call.Return(run) - return _c -} - -// NewBatchFromL2Block provides a mock function with given fields: l2Block -func (_m *TxBuilderMock) NewBatchFromL2Block(l2Block *datastream.L2Block) seqsendertypes.Batch { - ret := _m.Called(l2Block) - - if len(ret) == 0 { - panic("no return value specified for NewBatchFromL2Block") - } - - var r0 seqsendertypes.Batch - if rf, ok := ret.Get(0).(func(*datastream.L2Block) seqsendertypes.Batch); ok { - r0 = rf(l2Block) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(seqsendertypes.Batch) - } - } - - return r0 -} - -// TxBuilderMock_NewBatchFromL2Block_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewBatchFromL2Block' -type TxBuilderMock_NewBatchFromL2Block_Call struct { - *mock.Call -} - -// NewBatchFromL2Block is a helper method to define mock.On call -// - l2Block *datastream.L2Block -func (_e *TxBuilderMock_Expecter) NewBatchFromL2Block(l2Block interface{}) *TxBuilderMock_NewBatchFromL2Block_Call { - return &TxBuilderMock_NewBatchFromL2Block_Call{Call: _e.mock.On("NewBatchFromL2Block", l2Block)} -} - -func (_c *TxBuilderMock_NewBatchFromL2Block_Call) Run(run func(l2Block *datastream.L2Block)) *TxBuilderMock_NewBatchFromL2Block_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*datastream.L2Block)) - }) - return _c -} - -func (_c *TxBuilderMock_NewBatchFromL2Block_Call) Return(_a0 seqsendertypes.Batch) *TxBuilderMock_NewBatchFromL2Block_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxBuilderMock_NewBatchFromL2Block_Call) RunAndReturn(run func(*datastream.L2Block) seqsendertypes.Batch) *TxBuilderMock_NewBatchFromL2Block_Call { - _c.Call.Return(run) - return _c -} - -// NewSequence provides a mock function with given fields: ctx, batches, coinbase -func (_m *TxBuilderMock) NewSequence(ctx context.Context, batches []seqsendertypes.Batch, coinbase common.Address) (seqsendertypes.Sequence, error) { - ret := _m.Called(ctx, batches, coinbase) - - if len(ret) == 0 { - panic("no return value specified for NewSequence") - } - - var r0 seqsendertypes.Sequence - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []seqsendertypes.Batch, common.Address) (seqsendertypes.Sequence, error)); ok { - return rf(ctx, batches, coinbase) - } - if rf, ok := ret.Get(0).(func(context.Context, []seqsendertypes.Batch, common.Address) seqsendertypes.Sequence); ok { - r0 = rf(ctx, batches, coinbase) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(seqsendertypes.Sequence) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []seqsendertypes.Batch, common.Address) error); ok { - r1 = rf(ctx, batches, coinbase) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// TxBuilderMock_NewSequence_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewSequence' -type TxBuilderMock_NewSequence_Call struct { - *mock.Call -} - -// NewSequence is a helper method to define mock.On call -// - ctx context.Context -// - batches []seqsendertypes.Batch -// - coinbase common.Address -func (_e *TxBuilderMock_Expecter) NewSequence(ctx interface{}, batches interface{}, coinbase interface{}) *TxBuilderMock_NewSequence_Call { - return &TxBuilderMock_NewSequence_Call{Call: _e.mock.On("NewSequence", ctx, batches, coinbase)} -} - -func (_c *TxBuilderMock_NewSequence_Call) Run(run func(ctx context.Context, batches []seqsendertypes.Batch, coinbase common.Address)) *TxBuilderMock_NewSequence_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]seqsendertypes.Batch), args[2].(common.Address)) - }) - return _c -} - -func (_c *TxBuilderMock_NewSequence_Call) Return(_a0 seqsendertypes.Sequence, _a1 error) *TxBuilderMock_NewSequence_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *TxBuilderMock_NewSequence_Call) RunAndReturn(run func(context.Context, []seqsendertypes.Batch, common.Address) (seqsendertypes.Sequence, error)) *TxBuilderMock_NewSequence_Call { - _c.Call.Return(run) - return _c -} - -// NewSequenceIfWorthToSend provides a mock function with given fields: ctx, sequenceBatches, l2Coinbase, batchNumber -func (_m *TxBuilderMock) NewSequenceIfWorthToSend(ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64) (seqsendertypes.Sequence, error) { - ret := _m.Called(ctx, sequenceBatches, l2Coinbase, batchNumber) - - if len(ret) == 0 { - panic("no return value specified for NewSequenceIfWorthToSend") - } - - var r0 seqsendertypes.Sequence - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []seqsendertypes.Batch, common.Address, uint64) (seqsendertypes.Sequence, error)); ok { - return rf(ctx, sequenceBatches, l2Coinbase, batchNumber) - } - if rf, ok := ret.Get(0).(func(context.Context, []seqsendertypes.Batch, common.Address, uint64) seqsendertypes.Sequence); ok { - r0 = rf(ctx, sequenceBatches, l2Coinbase, batchNumber) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(seqsendertypes.Sequence) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []seqsendertypes.Batch, common.Address, uint64) error); ok { - r1 = rf(ctx, sequenceBatches, l2Coinbase, batchNumber) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// TxBuilderMock_NewSequenceIfWorthToSend_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewSequenceIfWorthToSend' -type TxBuilderMock_NewSequenceIfWorthToSend_Call struct { - *mock.Call -} - -// NewSequenceIfWorthToSend is a helper method to define mock.On call -// - ctx context.Context -// - sequenceBatches []seqsendertypes.Batch -// - l2Coinbase common.Address -// - batchNumber uint64 -func (_e *TxBuilderMock_Expecter) NewSequenceIfWorthToSend(ctx interface{}, sequenceBatches interface{}, l2Coinbase interface{}, batchNumber interface{}) *TxBuilderMock_NewSequenceIfWorthToSend_Call { - return &TxBuilderMock_NewSequenceIfWorthToSend_Call{Call: _e.mock.On("NewSequenceIfWorthToSend", ctx, sequenceBatches, l2Coinbase, batchNumber)} -} - -func (_c *TxBuilderMock_NewSequenceIfWorthToSend_Call) Run(run func(ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64)) *TxBuilderMock_NewSequenceIfWorthToSend_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]seqsendertypes.Batch), args[2].(common.Address), args[3].(uint64)) - }) - return _c -} - -func (_c *TxBuilderMock_NewSequenceIfWorthToSend_Call) Return(_a0 seqsendertypes.Sequence, _a1 error) *TxBuilderMock_NewSequenceIfWorthToSend_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *TxBuilderMock_NewSequenceIfWorthToSend_Call) RunAndReturn(run func(context.Context, []seqsendertypes.Batch, common.Address, uint64) (seqsendertypes.Sequence, error)) *TxBuilderMock_NewSequenceIfWorthToSend_Call { - _c.Call.Return(run) - return _c -} - -// SetCondNewSeq provides a mock function with given fields: cond -func (_m *TxBuilderMock) SetCondNewSeq(cond txbuilder.CondNewSequence) txbuilder.CondNewSequence { - ret := _m.Called(cond) - - if len(ret) == 0 { - panic("no return value specified for SetCondNewSeq") - } - - var r0 txbuilder.CondNewSequence - if rf, ok := ret.Get(0).(func(txbuilder.CondNewSequence) txbuilder.CondNewSequence); ok { - r0 = rf(cond) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(txbuilder.CondNewSequence) - } - } - - return r0 -} - -// TxBuilderMock_SetCondNewSeq_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetCondNewSeq' -type TxBuilderMock_SetCondNewSeq_Call struct { - *mock.Call -} - -// SetCondNewSeq is a helper method to define mock.On call -// - cond txbuilder.CondNewSequence -func (_e *TxBuilderMock_Expecter) SetCondNewSeq(cond interface{}) *TxBuilderMock_SetCondNewSeq_Call { - return &TxBuilderMock_SetCondNewSeq_Call{Call: _e.mock.On("SetCondNewSeq", cond)} -} - -func (_c *TxBuilderMock_SetCondNewSeq_Call) Run(run func(cond txbuilder.CondNewSequence)) *TxBuilderMock_SetCondNewSeq_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(txbuilder.CondNewSequence)) - }) - return _c -} - -func (_c *TxBuilderMock_SetCondNewSeq_Call) Return(_a0 txbuilder.CondNewSequence) *TxBuilderMock_SetCondNewSeq_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxBuilderMock_SetCondNewSeq_Call) RunAndReturn(run func(txbuilder.CondNewSequence) txbuilder.CondNewSequence) *TxBuilderMock_SetCondNewSeq_Call { - _c.Call.Return(run) - return _c -} - -// String provides a mock function with given fields: -func (_m *TxBuilderMock) String() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for String") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// TxBuilderMock_String_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'String' -type TxBuilderMock_String_Call struct { - *mock.Call -} - -// String is a helper method to define mock.On call -func (_e *TxBuilderMock_Expecter) String() *TxBuilderMock_String_Call { - return &TxBuilderMock_String_Call{Call: _e.mock.On("String")} -} - -func (_c *TxBuilderMock_String_Call) Run(run func()) *TxBuilderMock_String_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *TxBuilderMock_String_Call) Return(_a0 string) *TxBuilderMock_String_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxBuilderMock_String_Call) RunAndReturn(run func() string) *TxBuilderMock_String_Call { - _c.Call.Return(run) - return _c -} - -// NewTxBuilderMock creates a new instance of TxBuilderMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewTxBuilderMock(t interface { - mock.TestingT - Cleanup(func()) -}) *TxBuilderMock { - mock := &TxBuilderMock{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/seqsendertypes/types.go b/sequencesender/seqsendertypes/types.go deleted file mode 100644 index 5d903dc5..00000000 --- a/sequencesender/seqsendertypes/types.go +++ /dev/null @@ -1,43 +0,0 @@ -package seqsendertypes - -import ( - "github.com/ethereum/go-ethereum/common" -) - -type Batch interface { - // underlyingType *ethmantypes.Batch - DeepCopy() Batch - LastCoinbase() common.Address - ForcedBatchTimestamp() uint64 - ForcedGlobalExitRoot() common.Hash - ForcedBlockHashL1() common.Hash - L2Data() []byte - LastL2BLockTimestamp() uint64 - BatchNumber() uint64 - GlobalExitRoot() common.Hash - L1InfoTreeIndex() uint32 - - String() string - - // WRITE - SetL2Data(data []byte) - SetLastCoinbase(address common.Address) - SetLastL2BLockTimestamp(ts uint64) - SetL1InfoTreeIndex(index uint32) -} - -type Sequence interface { - IndexL1InfoRoot() uint32 - MaxSequenceTimestamp() uint64 - L1InfoRoot() common.Hash - Batches() []Batch - FirstBatch() Batch - LastBatch() Batch - Len() int - L2Coinbase() common.Address - LastVirtualBatchNumber() uint64 - - String() string - // WRITE - SetLastVirtualBatchNumber(batchNumber uint64) -} diff --git a/sequencesender/sequencesender.go b/sequencesender/sequencesender.go deleted file mode 100644 index e6644121..00000000 --- a/sequencesender/sequencesender.go +++ /dev/null @@ -1,556 +0,0 @@ -package sequencesender - -import ( - "context" - "errors" - "fmt" - "math/big" - "os" - "sync" - "sync/atomic" - "time" - - "github.com/0xPolygon/zkevm-ethtx-manager/ethtxmanager" - ethtxlog "github.com/0xPolygon/zkevm-ethtx-manager/log" - ethtxtypes "github.com/0xPolygon/zkevm-ethtx-manager/types" - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/rpc" - "github.com/agglayer/aggkit/rpc/types" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/state" - "github.com/ethereum/go-ethereum/common" - ethtypes "github.com/ethereum/go-ethereum/core/types" -) - -const ten = 10 - -// EthTxManager represents the eth tx manager interface -type EthTxManager interface { - Start() - AddWithGas( - ctx context.Context, - to *common.Address, - value *big.Int, - data []byte, - gasOffset uint64, - sidecar *ethtypes.BlobTxSidecar, - gas uint64, - ) (common.Hash, error) - Remove(ctx context.Context, hash common.Hash) error - ResultsByStatus(ctx context.Context, status []ethtxtypes.MonitoredTxStatus) ([]ethtxtypes.MonitoredTxResult, error) - Result(ctx context.Context, hash common.Hash) (ethtxtypes.MonitoredTxResult, error) -} - -// Etherman represents the etherman behaviour -type Etherman interface { - CurrentNonce(ctx context.Context, address common.Address) (uint64, error) - GetLatestBlockHeader(ctx context.Context) (*ethtypes.Header, error) - EstimateGas(ctx context.Context, from common.Address, to *common.Address, value *big.Int, data []byte) (uint64, error) - GetLatestBatchNumber() (uint64, error) -} - -// RPCInterface represents the RPC interface -type RPCInterface interface { - GetBatch(batchNumber uint64) (*types.RPCBatch, error) - GetWitness(batchNumber uint64, fullWitness bool) ([]byte, error) -} - -// SequenceSender represents a sequence sender -type SequenceSender struct { - cfg Config - logger *log.Logger - ethTxManager EthTxManager - etherman Etherman - latestVirtualBatchNumber uint64 // Latest virtualized batch obtained from L1 - latestVirtualTime time.Time // Latest virtual batch timestamp - latestSentToL1Batch uint64 // Latest batch sent to L1 - sequenceList []uint64 // Sequence of batch number to be send to L1 - sequenceData map[uint64]*sequenceData // All the batch data indexed by batch number - mutexSequence sync.Mutex // Mutex to access sequenceData and sequenceList - ethTransactions map[common.Hash]*ethTxData // All the eth tx sent to L1 indexed by hash - ethTxData map[common.Hash][]byte // Tx data send to or received from L1 - mutexEthTx sync.Mutex // Mutex to access ethTransactions - sequencesTxFile *os.File // Persistence of sent transactions - validStream bool // Not valid while receiving data before the desired batch - seqSendingStopped uint32 // If there is a critical error - TxBuilder txbuilder.TxBuilder - rpcClient RPCInterface -} - -type sequenceData struct { - batchClosed bool - batch seqsendertypes.Batch - batchRaw *state.BatchRawV2 -} - -// New inits sequence sender -func New(cfg Config, logger *log.Logger, - etherman *etherman.Client, txBuilder txbuilder.TxBuilder) (*SequenceSender, error) { - // Create sequencesender - s := SequenceSender{ - cfg: cfg, - logger: logger, - etherman: etherman, - ethTransactions: make(map[common.Hash]*ethTxData), - ethTxData: make(map[common.Hash][]byte), - sequenceData: make(map[uint64]*sequenceData), - validStream: false, - TxBuilder: txBuilder, - rpcClient: rpc.NewBatchEndpoints(cfg.RPCURL), - } - - logger.Infof("TxBuilder configuration: %s", txBuilder.String()) - - // Restore pending sent sequences - err := s.loadSentSequencesTransactions() - if err != nil { - s.logger.Fatalf("error restoring sent sequences from file", err) - return nil, err - } - - // Create ethtxmanager client - cfg.EthTxManager.Log = ethtxlog.Config{ - Environment: ethtxlog.LogEnvironment(cfg.Log.Environment), - Level: cfg.Log.Level, - Outputs: cfg.Log.Outputs, - } - - s.ethTxManager, err = ethtxmanager.New(cfg.EthTxManager) - if err != nil { - s.logger.Fatalf("error creating ethtxmanager client: %v", err) - return nil, err - } - - return &s, nil -} - -// Start starts the sequence sender -func (s *SequenceSender) Start(ctx context.Context) { - // Start ethtxmanager client - go s.ethTxManager.Start() - - // Get latest virtual state batch from L1 - err := s.updateLatestVirtualBatch() - if err != nil { - s.logger.Fatalf("error getting latest sequenced batch, error: %v", err) - } - - // Sync all monitored sent L1 tx - err = s.syncAllEthTxResults(ctx) - if err != nil { - s.logger.Fatalf("failed to sync monitored tx results, error: %v", err) - } - - // Current batch to sequence - atomic.StoreUint64(&s.latestSentToL1Batch, atomic.LoadUint64(&s.latestVirtualBatchNumber)) - - // Start retrieving batches from RPC - go func() { - err := s.batchRetrieval(ctx) - if err != nil { - s.logFatalf("error retrieving batches from RPC: %v", err) - } - }() - - // Start sequence sending - go s.sequenceSending(ctx) -} - -// batchRetrieval keeps reading batches from the RPC -func (s *SequenceSender) batchRetrieval(ctx context.Context) error { - ticker := time.NewTicker(s.cfg.GetBatchWaitInterval.Duration) - defer ticker.Stop() - - currentBatchNumber := atomic.LoadUint64(&s.latestVirtualBatchNumber) + 1 - for { - select { - case <-ctx.Done(): - s.logger.Info("context cancelled, stopping batch retrieval") - return ctx.Err() - default: - // Try to retrieve batch from RPC - rpcBatch, err := s.rpcClient.GetBatch(currentBatchNumber) - if err != nil { - if errors.Is(err, ethtxmanager.ErrNotFound) { - s.logger.Infof("batch %d not found in RPC", currentBatchNumber) - } else { - s.logger.Errorf("error getting batch %d from RPC: %v", currentBatchNumber, err) - } - <-ticker.C - continue - } - - // Check if the batch is closed - if !rpcBatch.IsClosed() { - s.logger.Infof("batch %d is not closed yet", currentBatchNumber) - <-ticker.C - continue - } - - // Process and decode the batch - if err := s.populateSequenceData(rpcBatch, currentBatchNumber); err != nil { - return err - } - - // Increment the batch number for the next iteration - currentBatchNumber++ - } - } -} - -func (s *SequenceSender) populateSequenceData(rpcBatch *types.RPCBatch, batchNumber uint64) error { - s.mutexSequence.Lock() - defer s.mutexSequence.Unlock() - - s.sequenceList = append(s.sequenceList, batchNumber) - - // Decode batch to retrieve the l1 info tree index - batchRaw, err := state.DecodeBatchV2(rpcBatch.L2Data()) - if err != nil { - s.logger.Errorf("Failed to decode batch data for batch %d, err: %v", batchNumber, err) - return err - } - - if len(batchRaw.Blocks) > 0 { - rpcBatch.SetL1InfoTreeIndex(batchRaw.Blocks[len(batchRaw.Blocks)-1].IndexL1InfoTree) - } - - s.sequenceData[batchNumber] = &sequenceData{ - batchClosed: rpcBatch.IsClosed(), - batch: rpcBatch, - batchRaw: batchRaw, - } - - return nil -} - -// sequenceSending starts loop to check if there are sequences to send and sends them if it's convenient -func (s *SequenceSender) sequenceSending(ctx context.Context) { - // Create a ticker that fires every WaitPeriodSendSequence - ticker := time.NewTicker(s.cfg.WaitPeriodSendSequence.Duration) - defer ticker.Stop() - - for { - select { - case <-ctx.Done(): - s.logger.Info("context canceled, stopping sequence sending") - return - - case <-ticker.C: - // Trigger the sequence sending when the ticker fires - s.tryToSendSequence(ctx) - } - } -} - -// purgeSequences purges batches from memory structures -func (s *SequenceSender) purgeSequences() { - // If sequence sending is stopped, do not purge - if s.IsStopped() { - return - } - - // Purge the information of batches that are already virtualized - s.mutexSequence.Lock() - defer s.mutexSequence.Unlock() - truncateUntil := 0 - toPurge := make([]uint64, 0) - for i, batchNumber := range s.sequenceList { - if batchNumber <= atomic.LoadUint64(&s.latestVirtualBatchNumber) { - truncateUntil = i + 1 - toPurge = append(toPurge, batchNumber) - } - } - - if len(toPurge) > 0 { - s.sequenceList = s.sequenceList[truncateUntil:] - - firstPurged := toPurge[0] - lastPurged := toPurge[len(toPurge)-1] - for _, batchNum := range toPurge { - delete(s.sequenceData, batchNum) - } - s.logger.Infof("batches purged count: %d, fromBatch: %d, toBatch: %d", len(toPurge), firstPurged, lastPurged) - } -} - -// tryToSendSequence checks if there is a sequence and it's worth it to send to L1 -func (s *SequenceSender) tryToSendSequence(ctx context.Context) { - // Update latest virtual batch - s.logger.Infof("updating virtual batch") - err := s.updateLatestVirtualBatch() - if err != nil { - return - } - - // Check if the sequence sending is stopped - if s.IsStopped() { - s.logger.Warnf("sending is stopped!") - return - } - - // Update state of transactions - s.logger.Infof("updating tx results") - pendingTxsCount, err := s.syncEthTxResults(ctx) - if err != nil { - return - } - - // Check if reached the maximum number of pending transactions - if pendingTxsCount >= s.cfg.MaxPendingTx { - s.logger.Infof("max number of pending txs (%d) reached. Waiting for some to be completed", pendingTxsCount) - return - } - - // Check if should send sequence to L1 - s.logger.Infof("getting sequences to send") - sequence, err := s.getSequencesToSend(ctx) - if err != nil || sequence == nil || sequence.Len() == 0 { - if err != nil { - s.logger.Errorf("error getting sequences: %v", err) - } - return - } - - // Send sequences to L1 - firstBatch := sequence.FirstBatch() - lastBatch := sequence.LastBatch() - - s.logger.Debugf(sequence.String()) - s.logger.Infof("sending sequences to L1. From batch %d to batch %d", firstBatch.BatchNumber(), lastBatch.BatchNumber()) - - // Wait until last L1 block timestamp is L1BlockTimestampMargin seconds above the timestamp - // of the last L2 block in the sequence - timeMargin := int64(s.cfg.L1BlockTimestampMargin.Seconds()) - - err = s.waitForMargin(ctx, lastBatch, timeMargin, "L1 block block timestamp", - func() (uint64, error) { - lastL1BlockHeader, err := s.etherman.GetLatestBlockHeader(ctx) - if err != nil { - return 0, err - } - - return lastL1BlockHeader.Time, nil - }) - if err != nil { - s.logger.Errorf("error waiting for L1 block time margin: %v", err) - return - } - - // Sanity check: Wait until the current time is also L1BlockTimestampMargin seconds above the last L2 block timestamp - err = s.waitForMargin(ctx, lastBatch, timeMargin, "current time", - func() (uint64, error) { return uint64(time.Now().Unix()), nil }) - if err != nil { - s.logger.Errorf("error waiting for current time margin: %v", err) - return - } - - // Send sequences to L1 - s.logger.Debugf(sequence.String()) - s.logger.Infof("sending sequences to L1. From batch %d to batch %d", firstBatch.BatchNumber(), lastBatch.BatchNumber()) - - tx, err := s.TxBuilder.BuildSequenceBatchesTx(ctx, sequence) - if err != nil { - s.logger.Errorf("error building sequenceBatches tx: %v", err) - return - } - - // Get latest virtual state batch from L1 - err = s.updateLatestVirtualBatch() - if err != nil { - s.logger.Fatalf("error getting latest sequenced batch, error: %v", err) - } - - sequence.SetLastVirtualBatchNumber(atomic.LoadUint64(&s.latestVirtualBatchNumber)) - - gas, err := s.etherman.EstimateGas(ctx, s.cfg.SenderAddress, tx.To(), nil, tx.Data()) - if err != nil { - s.logger.Errorf("error estimating gas: ", err) - return - } - - // Add sequence tx - err = s.sendTx(ctx, false, nil, tx.To(), firstBatch.BatchNumber(), lastBatch.BatchNumber(), tx.Data(), gas) - if err != nil { - return - } - - // Purge sequences data from memory - s.purgeSequences() -} - -// waitForMargin ensures that the time difference between the last L2 block and the current -// timestamp exceeds the time margin before proceeding. It checks immediately, and if not -// satisfied, it waits using a ticker and rechecks periodically. -// -// Params: -// - ctx: Context to handle cancellation. -// - lastBatch: The last batch in the sequence. -// - timeMargin: Required time difference in seconds. -// - description: A description for logging purposes. -// - getTimeFn: Function to get the current time (e.g., L1 block time or current time). -func (s *SequenceSender) waitForMargin(ctx context.Context, lastBatch seqsendertypes.Batch, - timeMargin int64, description string, getTimeFn func() (uint64, error)) error { - referentTime, err := getTimeFn() - if err != nil { - return err - } - - lastL2BlockTimestamp := lastBatch.LastL2BLockTimestamp() - elapsed, waitTime := marginTimeElapsed(lastL2BlockTimestamp, referentTime, timeMargin) - if elapsed { - s.logger.Infof("time difference for %s exceeds %d seconds, proceeding (batch number: %d, last l2 block ts: %d)", - description, timeMargin, lastBatch.BatchNumber(), lastL2BlockTimestamp) - return nil - } - - s.logger.Infof("waiting %d seconds for %s, margin less than %d seconds (batch number: %d, last l2 block ts: %d)", - waitTime, description, timeMargin, lastBatch.BatchNumber(), lastL2BlockTimestamp) - ticker := time.NewTicker(time.Duration(waitTime) * time.Second) - defer ticker.Stop() - - for { - select { - case <-ctx.Done(): - s.logger.Infof("context canceled during %s wait (batch number: %d, last l2 block ts: %d)", - description, lastBatch.BatchNumber(), lastL2BlockTimestamp) - return ctx.Err() - - case <-ticker.C: - referentTime, err = getTimeFn() - if err != nil { - return err - } - - elapsed, waitTime = marginTimeElapsed(lastL2BlockTimestamp, referentTime, timeMargin) - if elapsed { - s.logger.Infof("time margin for %s now exceeds %d seconds, proceeding (batch number: %d, last l2 block ts: %d)", - description, timeMargin, lastBatch.BatchNumber(), lastL2BlockTimestamp) - return nil - } - - s.logger.Infof( - "waiting another %d seconds for %s, margin still less than %d seconds (batch number: %d, last l2 block ts: %d)", - waitTime, description, timeMargin, lastBatch.BatchNumber(), lastL2BlockTimestamp) - ticker.Reset(time.Duration(waitTime) * time.Second) - } - } -} - -func (s *SequenceSender) getSequencesToSend(ctx context.Context) (seqsendertypes.Sequence, error) { - // Add sequences until too big for a single L1 tx or last batch is reached - s.mutexSequence.Lock() - defer s.mutexSequence.Unlock() - var prevCoinbase common.Address - sequenceBatches := make([]seqsendertypes.Batch, 0) - for _, batchNumber := range s.sequenceList { - if batchNumber <= atomic.LoadUint64(&s.latestVirtualBatchNumber) || - batchNumber <= atomic.LoadUint64(&s.latestSentToL1Batch) { - continue - } - - // Check if the next batch belongs to a new forkid, in this case we need to stop sequencing as we need to - // wait the upgrade of forkid is completed and s.cfg.NumBatchForkIdUpgrade is disabled (=0) again - if s.cfg.ForkUpgradeBatchNumber != 0 && batchNumber == (s.cfg.ForkUpgradeBatchNumber+1) { - return nil, fmt.Errorf( - "aborting sequencing process as we reached the batch %d where a new forkid is applied (upgrade)", - s.cfg.ForkUpgradeBatchNumber+1, - ) - } - - // New potential batch to add to the sequence - batch := s.sequenceData[batchNumber].batch.DeepCopy() - - // If the coinbase changes, the sequence ends here - if len(sequenceBatches) > 0 && batch.LastCoinbase() != prevCoinbase { - s.logger.Infof( - "batch with different coinbase (batch %v, sequence %v), sequence will be sent to this point", - prevCoinbase, batch.LastCoinbase, - ) - return s.TxBuilder.NewSequence(ctx, sequenceBatches, s.cfg.L2Coinbase) - } - prevCoinbase = batch.LastCoinbase() - - // Add new sequence batch - sequenceBatches = append(sequenceBatches, batch) - - newSeq, err := s.TxBuilder.NewSequenceIfWorthToSend(ctx, sequenceBatches, s.cfg.L2Coinbase, batchNumber) - if err != nil { - return nil, err - } - if newSeq != nil { - return newSeq, nil - } - - // Check if the current batch is the last before a change to a new forkid - // In this case we need to close and send the sequence to L1 - if s.cfg.ForkUpgradeBatchNumber != 0 && batchNumber == s.cfg.ForkUpgradeBatchNumber { - s.logger.Infof("sequence should be sent to L1, as we have reached the batch %d "+ - "from which a new forkid is applied (upgrade)", - s.cfg.ForkUpgradeBatchNumber, - ) - return s.TxBuilder.NewSequence(ctx, sequenceBatches, s.cfg.L2Coinbase) - } - } - - // Reached the latest batch. Decide if it's worth to send the sequence, or wait for new batches - if len(sequenceBatches) == 0 { - s.logger.Infof("no batches to be sequenced") - return nil, nil - } - - if s.latestVirtualTime.Before(time.Now().Add(-s.cfg.LastBatchVirtualizationTimeMaxWaitPeriod.Duration)) { - s.logger.Infof("sequence should be sent, too much time without sending anything to L1") - return s.TxBuilder.NewSequence(ctx, sequenceBatches, s.cfg.L2Coinbase) - } - - s.logger.Infof("not enough time has passed since last batch was virtualized and the sequence could be bigger") - return nil, nil -} - -// updateLatestVirtualBatch queries the value in L1 and updates the latest virtual batch field -func (s *SequenceSender) updateLatestVirtualBatch() error { - // Get latest virtual state batch from L1 - latestVirtualBatchNumber, err := s.etherman.GetLatestBatchNumber() - if err != nil { - s.logger.Errorf("error getting latest virtual batch, error: %v", err) - return errors.New("fail to get latest virtual batch") - } - - atomic.StoreUint64(&s.latestVirtualBatchNumber, latestVirtualBatchNumber) - s.logger.Infof("latest virtual batch is %d", latestVirtualBatchNumber) - - return nil -} - -// logFatalf logs error, activates flag to stop sequencing, and remains in an infinite loop -func (s *SequenceSender) logFatalf(template string, args ...interface{}) { - atomic.StoreUint32(&s.seqSendingStopped, 1) - for { - s.logger.Errorf(template, args...) - s.logger.Errorf("sequence sending stopped.") - time.Sleep(ten * time.Second) - } -} - -// marginTimeElapsed checks if the time between currentTime and l2BlockTimestamp is greater than timeMargin. -// If it's greater returns true, otherwise it returns false and the waitTime needed to achieve this timeMargin -func marginTimeElapsed(l2BlockTimestamp uint64, currentTime uint64, timeMargin int64) (bool, int64) { - if int64(l2BlockTimestamp)-timeMargin > int64(currentTime) { - return true, 0 - } - - timeDiff := int64(currentTime) - int64(l2BlockTimestamp) - - // If the difference is less than the required margin, return false and calculate the remaining wait time - if timeDiff < timeMargin { - // Calculate the wait time needed to reach the timeMargin - waitTime := timeMargin - timeDiff - return false, waitTime - } - - // Time difference is greater than or equal to timeMargin, no need to wait - return true, 0 -} diff --git a/sequencesender/sequencesender_test.go b/sequencesender/sequencesender_test.go deleted file mode 100644 index 27072b1d..00000000 --- a/sequencesender/sequencesender_test.go +++ /dev/null @@ -1,619 +0,0 @@ -package sequencesender - -import ( - "errors" - "math/big" - "os" - "testing" - "time" - - ethtxtypes "github.com/0xPolygon/zkevm-ethtx-manager/types" - types2 "github.com/agglayer/aggkit/config/types" - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/log" - rpctypes "github.com/agglayer/aggkit/rpc/types" - "github.com/agglayer/aggkit/sequencesender/mocks" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/state" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" - "golang.org/x/net/context" -) - -const ( - txStreamEncoded1 = "f86508843b9aca0082520894617b3a3528f9cdd6630fd3301b9c8911f7bf063d0a808207f5a0579b72a1c1ffdd845fba45317540982109298e2ec8d67ddf2cdaf22e80903677a01831e9a01291c7ea246742a5b5a543ca6938bfc3f6958c22be06fad99274e4ac" - txStreamEncoded2 = "f86509843b9aca0082520894617b3a3528f9cdd6630fd3301b9c8911f7bf063d0a808207f5a0908a522075e09485166ffa7630cd2b7013897fa1f1238013677d6f0a86efb3d2a0068b12435fcdc8ee254f3b1df8c5b29ed691eeee6065704f061130935976ca99" - txStreamEncoded3 = "b8b402f8b101268505d21dba0085076c363d8982dc60941929761e87667283f087ea9ab8370c174681b4e980b844095ea7b300000000000000000000000080a64c6d7f12c47b7c66c5b4e20e72bc1fcd5d9effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc001a0dd4db494969139a120e8721842455ec13f82757a4fc49b66d447c7d32d095a1da06ef54068a9aa67ecc4f52d885299a04feb6f3531cdfc771f1412cd3331d1ba4c" -) - -var ( - now = time.Now() -) - -func TestMain(t *testing.M) { - t.Run() -} - -func Test_encoding(t *testing.T) { - tx1, err := state.DecodeTx(txStreamEncoded1) - require.NoError(t, err) - tx2, err := state.DecodeTx(txStreamEncoded2) - require.NoError(t, err) - tx3, err := state.DecodeTx(txStreamEncoded3) - require.NoError(t, err) - - txTest := state.L2TxRaw{ - EfficiencyPercentage: 129, - TxAlreadyEncoded: false, - Tx: tx1, - } - txTestEncoded := make([]byte, 0) - txTestEncoded, err = txTest.Encode(txTestEncoded) - require.NoError(t, err) - log.Debugf("%s", common.Bytes2Hex(txTestEncoded)) - - batch := state.BatchRawV2{ - Blocks: []state.L2BlockRaw{ - { - ChangeL2BlockHeader: state.ChangeL2BlockHeader{ - DeltaTimestamp: 3633752, - IndexL1InfoTree: 0, - }, - Transactions: []state.L2TxRaw{ - { - EfficiencyPercentage: 129, - TxAlreadyEncoded: false, - Tx: tx1, - }, - { - EfficiencyPercentage: 97, - TxAlreadyEncoded: false, - Tx: tx2, - }, - { - EfficiencyPercentage: 97, - TxAlreadyEncoded: false, - Tx: tx3, - }, - }, - }, - }, - } - - encodedBatch, err := state.EncodeBatchV2(&batch) - require.NoError(t, err) - - decodedBatch, err := state.DecodeBatchV2(encodedBatch) - require.NoError(t, err) - - require.Equal(t, batch.String(), decodedBatch.String()) -} - -func Test_Start(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - getEthTxManager func(t *testing.T) *mocks.EthTxManagerMock - getEtherman func(t *testing.T) *mocks.EthermanMock - getRPC func(t *testing.T) *mocks.RPCInterfaceMock - batchWaitDuration types2.Duration - expectNonce uint64 - expectLastVirtualBatch uint64 - expectFromStreamBatch uint64 - expectWipBatch uint64 - expectLatestSentToL1Batch uint64 - }{ - { - name: "successfully started", - getEtherman: func(t *testing.T) *mocks.EthermanMock { - t.Helper() - - mngr := mocks.NewEthermanMock(t) - mngr.On("GetLatestBatchNumber").Return(uint64(1), nil) - return mngr - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("Start").Return(nil) - mngr.On("ResultsByStatus", mock.Anything, []ethtxtypes.MonitoredTxStatus(nil)).Return(nil, nil) - return mngr - }, - getRPC: func(t *testing.T) *mocks.RPCInterfaceMock { - t.Helper() - - mngr := mocks.NewRPCInterfaceMock(t) - mngr.On("GetBatch", mock.Anything).Return(&rpctypes.RPCBatch{}, nil) - return mngr - }, - - batchWaitDuration: types2.NewDuration(time.Millisecond), - expectNonce: 3, - expectLastVirtualBatch: 1, - expectFromStreamBatch: 1, - expectWipBatch: 2, - expectLatestSentToL1Batch: 1, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - tmpFile, err := os.CreateTemp(os.TempDir(), tt.name+".tmp") - require.NoError(t, err) - defer os.RemoveAll(tmpFile.Name() + ".tmp") - - s := SequenceSender{ - etherman: tt.getEtherman(t), - ethTxManager: tt.getEthTxManager(t), - cfg: Config{ - SequencesTxFileName: tmpFile.Name() + ".tmp", - GetBatchWaitInterval: tt.batchWaitDuration, - WaitPeriodSendSequence: types2.NewDuration(1 * time.Millisecond), - }, - logger: log.GetDefaultLogger(), - rpcClient: tt.getRPC(t), - } - - ctx, cancel := context.WithCancel(context.Background()) - s.Start(ctx) - time.Sleep(time.Second) - cancel() - time.Sleep(time.Second) - - require.Equal(t, tt.expectLatestSentToL1Batch, s.latestSentToL1Batch) - }) - } -} - -func Test_purgeSequences(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - seqSendingStopped uint32 - sequenceList []uint64 - sequenceData map[uint64]*sequenceData - latestVirtualBatchNumber uint64 - expectedSequenceList []uint64 - expectedSequenceData map[uint64]*sequenceData - }{ - { - name: "sequences purged when seqSendingStopped", - seqSendingStopped: 1, - sequenceList: []uint64{1, 2}, - sequenceData: map[uint64]*sequenceData{ - 1: {}, - 2: {}, - }, - expectedSequenceList: []uint64{1, 2}, - expectedSequenceData: map[uint64]*sequenceData{ - 1: {}, - 2: {}, - }, - }, - { - name: "no sequences purged", - seqSendingStopped: 0, - sequenceList: []uint64{4, 5}, - sequenceData: map[uint64]*sequenceData{ - 4: {}, - 5: {}, - }, - expectedSequenceList: []uint64{4, 5}, - expectedSequenceData: map[uint64]*sequenceData{ - 4: {}, - 5: {}, - }, - }, - { - name: "sequences purged", - seqSendingStopped: 0, - sequenceList: []uint64{4, 5, 6}, - sequenceData: map[uint64]*sequenceData{ - 4: {}, - 5: {}, - 6: {}, - }, - latestVirtualBatchNumber: 5, - expectedSequenceList: []uint64{6}, - expectedSequenceData: map[uint64]*sequenceData{ - 6: {}, - }, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - ss := SequenceSender{ - seqSendingStopped: tt.seqSendingStopped, - sequenceList: tt.sequenceList, - sequenceData: tt.sequenceData, - latestVirtualBatchNumber: tt.latestVirtualBatchNumber, - logger: log.GetDefaultLogger(), - } - - ss.purgeSequences() - - require.Equal(t, tt.expectedSequenceList, ss.sequenceList) - require.Equal(t, tt.expectedSequenceData, ss.sequenceData) - }) - } -} - -func Test_tryToSendSequence(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - getEthTxManager func(t *testing.T) *mocks.EthTxManagerMock - getEtherman func(t *testing.T) *mocks.EthermanMock - getTxBuilder func(t *testing.T) *mocks.TxBuilderMock - maxPendingTxn uint64 - sequenceList []uint64 - latestSentToL1Batch uint64 - sequenceData map[uint64]*sequenceData - ethTransactions map[common.Hash]*ethTxData - ethTxData map[common.Hash][]byte - - expectErr error - }{ - { - name: "successfully sent", - getEtherman: func(t *testing.T) *mocks.EthermanMock { - t.Helper() - - mngr := mocks.NewEthermanMock(t) - mngr.On("GetLatestBatchNumber").Return(uint64(1), nil) - return mngr - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - return mngr - }, - getTxBuilder: func(t *testing.T) *mocks.TxBuilderMock { - t.Helper() - - mngr := mocks.NewTxBuilderMock(t) - mngr.On("NewSequenceIfWorthToSend", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(txbuilder.NewBananaSequence(etherman.SequenceBanana{}), nil) - return mngr - }, - maxPendingTxn: 10, - sequenceList: []uint64{2}, - latestSentToL1Batch: 1, - sequenceData: map[uint64]*sequenceData{ - 2: { - batchClosed: true, - batch: txbuilder.NewBananaBatch(ðerman.Batch{}), - }, - }, - }, - { - name: "successfully sent new sequence", - getEtherman: func(t *testing.T) *mocks.EthermanMock { - t.Helper() - - mngr := mocks.NewEthermanMock(t) - mngr.On("GetLatestBatchNumber").Return(uint64(1), nil) - mngr.On("GetLatestBlockHeader", mock.Anything).Return(&types.Header{ - Number: big.NewInt(1), - }, nil) - mngr.On("EstimateGas", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(uint64(100500), nil) - return mngr - }, - getEthTxManager: func(t *testing.T) *mocks.EthTxManagerMock { - t.Helper() - - mngr := mocks.NewEthTxManagerMock(t) - mngr.On("AddWithGas", mock.Anything, mock.Anything, big.NewInt(0), mock.Anything, mock.Anything, mock.Anything, uint64(100500)).Return(common.Hash{}, nil) - mngr.On("Result", mock.Anything, common.Hash{}).Return(ethtxtypes.MonitoredTxResult{ - ID: common.Hash{}, - Data: []byte{1, 2, 3}, - }, nil) - return mngr - }, - getTxBuilder: func(t *testing.T) *mocks.TxBuilderMock { - t.Helper() - - mngr := mocks.NewTxBuilderMock(t) - mngr.On("NewSequenceIfWorthToSend", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, nil) - mngr.On("NewSequence", mock.Anything, mock.Anything, mock.Anything).Return(txbuilder.NewBananaSequence(etherman.SequenceBanana{ - Batches: []etherman.Batch{{ - BatchNumber: 2, - }}, - }), nil) - mngr.On("BuildSequenceBatchesTx", mock.Anything, mock.Anything).Return(types.NewTx(&types.LegacyTx{}), nil) - return mngr - }, - maxPendingTxn: 10, - sequenceList: []uint64{2}, - latestSentToL1Batch: 1, - sequenceData: map[uint64]*sequenceData{ - 2: { - batchClosed: true, - batch: txbuilder.NewBananaBatch(ðerman.Batch{}), - }, - }, - ethTransactions: map[common.Hash]*ethTxData{}, - ethTxData: map[common.Hash][]byte{}, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - tmpFile, err := os.CreateTemp(os.TempDir(), tt.name+".tmp") - require.NoError(t, err) - defer os.RemoveAll(tmpFile.Name() + ".tmp") - - s := SequenceSender{ - ethTxManager: tt.getEthTxManager(t), - etherman: tt.getEtherman(t), - TxBuilder: tt.getTxBuilder(t), - cfg: Config{ - SequencesTxFileName: tmpFile.Name() + ".tmp", - MaxPendingTx: tt.maxPendingTxn, - WaitPeriodSendSequence: types2.NewDuration(time.Millisecond), - }, - sequenceList: tt.sequenceList, - latestSentToL1Batch: tt.latestSentToL1Batch, - sequenceData: tt.sequenceData, - ethTransactions: tt.ethTransactions, - ethTxData: tt.ethTxData, - logger: log.GetDefaultLogger(), - } - - s.tryToSendSequence(context.Background()) - }) - } -} - -func Test_getSequencesToSend(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - sequenceList []uint64 - latestSentToL1Batch uint64 - forkUpgradeBatchNumber uint64 - sequenceData map[uint64]*sequenceData - getTxBuilder func(t *testing.T) *mocks.TxBuilderMock - expectedSequence seqsendertypes.Sequence - expectedErr error - }{ - { - name: "successfully get sequence", - sequenceList: []uint64{2}, - latestSentToL1Batch: 1, - sequenceData: map[uint64]*sequenceData{ - 2: { - batchClosed: true, - batch: txbuilder.NewBananaBatch(ðerman.Batch{}), - }, - }, - getTxBuilder: func(t *testing.T) *mocks.TxBuilderMock { - t.Helper() - - mngr := mocks.NewTxBuilderMock(t) - mngr.On("NewSequenceIfWorthToSend", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(txbuilder.NewBananaSequence(etherman.SequenceBanana{ - Batches: []etherman.Batch{{ - BatchNumber: 2, - }}, - }), nil) - return mngr - }, - expectedSequence: txbuilder.NewBananaSequence(etherman.SequenceBanana{ - Batches: []etherman.Batch{{ - BatchNumber: 2, - }}, - }), - expectedErr: nil, - }, - { - name: "different coinbase", - sequenceList: []uint64{2, 3}, - latestSentToL1Batch: 1, - sequenceData: map[uint64]*sequenceData{ - 2: { - batchClosed: true, - batch: txbuilder.NewBananaBatch(ðerman.Batch{}), - }, - 3: { - batchClosed: true, - batch: txbuilder.NewBananaBatch(ðerman.Batch{ - LastCoinbase: common.HexToAddress("0x2"), - }), - }, - }, - getTxBuilder: func(t *testing.T) *mocks.TxBuilderMock { - t.Helper() - - mngr := mocks.NewTxBuilderMock(t) - mngr.On("NewSequenceIfWorthToSend", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, nil) - mngr.On("NewSequence", mock.Anything, mock.Anything, mock.Anything).Return(txbuilder.NewBananaSequence(etherman.SequenceBanana{ - Batches: []etherman.Batch{{ - BatchNumber: 2, - }}, - }), nil) - return mngr - }, - expectedSequence: txbuilder.NewBananaSequence(etherman.SequenceBanana{ - Batches: []etherman.Batch{{ - BatchNumber: 2, - }}, - }), - expectedErr: nil, - }, - { - name: "NewSequenceIfWorthToSend return error", - sequenceList: []uint64{2}, - latestSentToL1Batch: 1, - sequenceData: map[uint64]*sequenceData{ - 2: { - batchClosed: true, - batch: txbuilder.NewBananaBatch(ðerman.Batch{}), - }, - }, - getTxBuilder: func(t *testing.T) *mocks.TxBuilderMock { - t.Helper() - - mngr := mocks.NewTxBuilderMock(t) - mngr.On("NewSequenceIfWorthToSend", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, errors.New("test error")) - return mngr - }, - expectedErr: errors.New("test error"), - }, - { - name: "fork upgrade", - sequenceList: []uint64{2}, - latestSentToL1Batch: 1, - forkUpgradeBatchNumber: 2, - sequenceData: map[uint64]*sequenceData{ - 2: { - batchClosed: true, - batch: txbuilder.NewBananaBatch(ðerman.Batch{}), - }, - }, - getTxBuilder: func(t *testing.T) *mocks.TxBuilderMock { - t.Helper() - - mngr := mocks.NewTxBuilderMock(t) - mngr.On("NewSequenceIfWorthToSend", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, nil) - mngr.On("NewSequence", mock.Anything, mock.Anything, mock.Anything).Return(txbuilder.NewBananaSequence(etherman.SequenceBanana{ - Batches: []etherman.Batch{{ - BatchNumber: 2, - }}, - }), nil) - return mngr - }, - expectedSequence: txbuilder.NewBananaSequence(etherman.SequenceBanana{ - Batches: []etherman.Batch{{ - BatchNumber: 2, - }}, - }), - expectedErr: nil, - }, - { - name: "fork upgrade passed", - sequenceList: []uint64{2}, - latestSentToL1Batch: 1, - forkUpgradeBatchNumber: 1, - sequenceData: map[uint64]*sequenceData{ - 2: { - batchClosed: true, - batch: txbuilder.NewBananaBatch(ðerman.Batch{}), - }, - }, - getTxBuilder: func(t *testing.T) *mocks.TxBuilderMock { - t.Helper() - - mngr := mocks.NewTxBuilderMock(t) - return mngr - }, - expectedErr: errors.New("aborting sequencing process as we reached the batch 2 where a new forkid is applied (upgrade)"), - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - ss := SequenceSender{ - sequenceList: tt.sequenceList, - latestSentToL1Batch: tt.latestSentToL1Batch, - cfg: Config{ - ForkUpgradeBatchNumber: tt.forkUpgradeBatchNumber, - }, - sequenceData: tt.sequenceData, - TxBuilder: tt.getTxBuilder(t), - logger: log.GetDefaultLogger(), - } - - sequence, err := ss.getSequencesToSend(context.Background()) - if tt.expectedErr != nil { - require.Equal(t, tt.expectedErr, err) - } else { - require.NoError(t, err) - require.Equal(t, tt.expectedSequence, sequence) - } - }) - } -} - -func Test_marginTimeElapsed(t *testing.T) { - t.Parallel() - - type args struct { - l2BlockTimestamp uint64 - currentTime uint64 - timeMargin int64 - } - tests := []struct { - name string - args args - expectedIsElapsed bool - expectedWaitTime int64 - }{ - { - name: "time elapsed", - args: args{ - l2BlockTimestamp: 100, - currentTime: 200, - timeMargin: 50, - }, - expectedIsElapsed: true, - expectedWaitTime: 0, - }, - { - name: "time not elapsed", - args: args{ - l2BlockTimestamp: 100, - currentTime: 200, - timeMargin: 150, - }, - expectedIsElapsed: false, - expectedWaitTime: 50, - }, - { - name: "l2 block in the future (time margin not enough)", - args: args{ - l2BlockTimestamp: 300, - currentTime: 200, - timeMargin: 50, - }, - expectedIsElapsed: true, - expectedWaitTime: 0, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - isElapsed, waitTime := marginTimeElapsed(tt.args.l2BlockTimestamp, tt.args.currentTime, tt.args.timeMargin) - require.Equal(t, tt.expectedIsElapsed, isElapsed, "marginTimeElapsed() isElapsed = %t, want %t", isElapsed, tt.expectedIsElapsed) - require.Equal(t, tt.expectedWaitTime, waitTime, "marginTimeElapsed() got1 = %v, want %v", waitTime, tt.expectedWaitTime) - }) - } -} diff --git a/sequencesender/txbuilder/banana_base.go b/sequencesender/txbuilder/banana_base.go deleted file mode 100644 index 67374415..00000000 --- a/sequencesender/txbuilder/banana_base.go +++ /dev/null @@ -1,247 +0,0 @@ -package txbuilder - -import ( - "context" - "errors" - "fmt" - "math/big" - - aggkitcommon "github.com/agglayer/aggkit/common" - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/l1infotreesync" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" -) - -type rollupBananaBaseContractor interface { - LastAccInputHash(opts *bind.CallOpts) ([32]byte, error) -} - -type globalExitRootBananaContractor interface { - L1InfoRootMap(opts *bind.CallOpts, index uint32) ([32]byte, error) - String() string -} - -type l1InfoSyncer interface { - GetLatestInfoUntilBlock(ctx context.Context, blockNum uint64) (*l1infotreesync.L1InfoTreeLeaf, error) - GetInitL1InfoRootMap(ctx context.Context) (*l1infotreesync.L1InfoTreeInitial, error) -} - -type l1Client interface { - HeaderByNumber(ctx context.Context, number *big.Int) (*types.Header, error) -} - -type TxBuilderBananaBase struct { - logger *log.Logger - rollupContract rollupBananaBaseContractor - globalExitRootContract globalExitRootBananaContractor - l1InfoTree l1InfoSyncer - ethClient l1Client - blockFinality *big.Int - opts bind.TransactOpts -} - -func NewTxBuilderBananaBase( - logger *log.Logger, - rollupContract rollupBananaBaseContractor, - gerContract globalExitRootBananaContractor, - l1InfoTree l1InfoSyncer, - ethClient l1Client, - blockFinality *big.Int, - opts bind.TransactOpts, -) *TxBuilderBananaBase { - return &TxBuilderBananaBase{ - logger: logger, - rollupContract: rollupContract, - globalExitRootContract: gerContract, - l1InfoTree: l1InfoTree, - ethClient: ethClient, - blockFinality: blockFinality, - opts: opts, - } -} - -func (t *TxBuilderBananaBase) NewBatchFromL2Block(l2Block *datastream.L2Block) seqsendertypes.Batch { - batch := ðerman.Batch{ - LastL2BLockTimestamp: l2Block.Timestamp, - BatchNumber: l2Block.BatchNumber, - L1InfoTreeIndex: l2Block.L1InfotreeIndex, - LastCoinbase: common.BytesToAddress(l2Block.Coinbase), - GlobalExitRoot: common.BytesToHash(l2Block.GlobalExitRoot), - } - return NewBananaBatch(batch) -} - -func getHighestL1InfoIndex(batches []etherman.Batch) uint32 { - var highestL1Index uint32 - for _, b := range batches { - if highestL1Index < b.L1InfoTreeIndex { - highestL1Index = b.L1InfoTreeIndex - } - } - return highestL1Index -} - -// Returns CounterL1InfoRoot to use for this batch -func (t *TxBuilderBananaBase) GetCounterL1InfoRoot(ctx context.Context, highestL1IndexInBatch uint32) (uint32, error) { - header, err := t.ethClient.HeaderByNumber(ctx, t.blockFinality) - if err != nil { - return 0, fmt.Errorf("error calling HeaderByNumber, with block finality %d: %w", t.blockFinality.Int64(), err) - } - var resL1InfoCounter uint32 - - info, err := t.l1InfoTree.GetLatestInfoUntilBlock(ctx, header.Number.Uint64()) - if err == nil { - resL1InfoCounter = info.L1InfoTreeIndex + 1 - } - if errors.Is(err, l1infotreesync.ErrNotFound) { - // There are no L1 Info tree leaves yet, so we can try to use L1InfoRootMap event - l1infotreeInitial, err := t.l1InfoTree.GetInitL1InfoRootMap(ctx) - if l1infotreeInitial == nil || err != nil { - return 0, fmt.Errorf("error no leaves on L1InfoTree yet and GetInitL1InfoRootMap fails: %w", err) - } - // We use this leaf as first one - resL1InfoCounter = l1infotreeInitial.LeafCount - } else if err != nil { - return 0, fmt.Errorf("error calling GetLatestInfoUntilBlock with block num %d: %w", header.Number.Uint64(), err) - } - // special case: there are no leaves in L1InfoTree yet - if resL1InfoCounter == 0 && highestL1IndexInBatch == 0 { - log.Infof("No L1 Info tree leaves yet, batch use no leaf") - return resL1InfoCounter, nil - } - if resL1InfoCounter > highestL1IndexInBatch { - return resL1InfoCounter, nil - } - - return 0, fmt.Errorf( - "sequence contained an L1 Info tree index (%d) that is greater than the one synced with the desired finality (%d)", - highestL1IndexInBatch, resL1InfoCounter, - ) -} - -func (t *TxBuilderBananaBase) CheckL1InfoTreeLeafCounterVsInitL1InfoMap(ctx context.Context, leafCounter uint32) error { - l1infotreeInitial, err := t.l1InfoTree.GetInitL1InfoRootMap(ctx) - if err != nil { - return fmt.Errorf("l1InfoTree.GetInitL1InfoRootMap fails: %w", err) - } - if l1infotreeInitial == nil { - log.Warnf("No InitL1InfoRootMap found, skipping check") - return nil - } - if leafCounter < l1infotreeInitial.LeafCount { - return fmt.Errorf("cant use this leafCounter because is previous to first value on contract Map"+ - "leafCounter(%d) < l1infotreeInitial.LeafCount(%d)", leafCounter, l1infotreeInitial.LeafCount) - } - return nil -} - -func (t *TxBuilderBananaBase) NewSequence( - ctx context.Context, batches []seqsendertypes.Batch, coinbase common.Address, -) (seqsendertypes.Sequence, error) { - ethBatches := toEthermanBatches(batches) - sequence := etherman.NewSequenceBanana(ethBatches, coinbase) - greatestL1Index := getHighestL1InfoIndex(sequence.Batches) - - counterL1InfoRoot, err := t.GetCounterL1InfoRoot(ctx, greatestL1Index) - if err != nil { - log.Errorf("error getting CounterL1InfoRoot: %s", err) - return nil, err - } - sequence.CounterL1InfoRoot = counterL1InfoRoot - l1InfoRoot, err := t.getL1InfoRoot(sequence.CounterL1InfoRoot) - if err != nil { - log.Errorf("error getting L1InfoRootMap: %s", err) - return nil, err - } - err = t.CheckL1InfoTreeLeafCounterVsInitL1InfoMap(ctx, sequence.CounterL1InfoRoot) - if err != nil { - log.Errorf("error checking L1InfoTreeLeafCounterVsInitL1InfoMap: %s", err) - return nil, err - } - sequence.L1InfoRoot = l1InfoRoot - - accInputHash, err := t.rollupContract.LastAccInputHash(&bind.CallOpts{Pending: false}) - if err != nil { - log.Errorf("error getting LastAccInputHash: %s", err) - return nil, err - } - - oldAccInputHash := common.BytesToHash(accInputHash[:]) // copy it - - for _, batch := range sequence.Batches { - infoRootHash := sequence.L1InfoRoot - timestamp := sequence.MaxSequenceTimestamp - blockHash := common.Hash{} - - if batch.ForcedBatchTimestamp > 0 { - infoRootHash = batch.ForcedGlobalExitRoot - timestamp = batch.ForcedBatchTimestamp - blockHash = batch.ForcedBlockHashL1 - } - - accInputHash = aggkitcommon.CalculateAccInputHash( - t.logger, accInputHash, batch.L2Data, infoRootHash, timestamp, batch.LastCoinbase, blockHash, - ) - } - - sequence.OldAccInputHash = oldAccInputHash - sequence.AccInputHash = accInputHash - res := NewBananaSequence(*sequence) - return res, nil -} - -func (t *TxBuilderBananaBase) getL1InfoRoot(counterL1InfoRoot uint32) (common.Hash, error) { - return t.globalExitRootContract.L1InfoRootMap(&bind.CallOpts{Pending: false}, counterL1InfoRoot) -} - -func convertToSequenceBanana(sequences seqsendertypes.Sequence) (etherman.SequenceBanana, error) { - seqEth, ok := sequences.(*BananaSequence) - if !ok { - log.Error("sequences is not a BananaSequence") - return etherman.SequenceBanana{}, fmt.Errorf("sequences is not a BananaSequence") - } - - ethermanSequence := etherman.SequenceBanana{ - OldAccInputHash: seqEth.SequenceBanana.OldAccInputHash, - AccInputHash: seqEth.SequenceBanana.AccInputHash, - L1InfoRoot: seqEth.SequenceBanana.L1InfoRoot, - MaxSequenceTimestamp: seqEth.SequenceBanana.MaxSequenceTimestamp, - CounterL1InfoRoot: seqEth.SequenceBanana.CounterL1InfoRoot, - L2Coinbase: seqEth.SequenceBanana.L2Coinbase, - } - - for _, batch := range sequences.Batches() { - ethBatch := toEthermanBatch(batch) - ethermanSequence.Batches = append(ethermanSequence.Batches, ethBatch) - } - - return ethermanSequence, nil -} - -func toEthermanBatch(batch seqsendertypes.Batch) etherman.Batch { - return etherman.Batch{ - L2Data: batch.L2Data(), - LastCoinbase: batch.LastCoinbase(), - ForcedGlobalExitRoot: batch.ForcedGlobalExitRoot(), - ForcedBlockHashL1: batch.ForcedBlockHashL1(), - ForcedBatchTimestamp: batch.ForcedBatchTimestamp(), - BatchNumber: batch.BatchNumber(), - L1InfoTreeIndex: batch.L1InfoTreeIndex(), - LastL2BLockTimestamp: batch.LastL2BLockTimestamp(), - GlobalExitRoot: batch.GlobalExitRoot(), - } -} - -func toEthermanBatches(batch []seqsendertypes.Batch) []etherman.Batch { - result := make([]etherman.Batch, len(batch)) - for i, b := range batch { - result[i] = toEthermanBatch(b) - } - - return result -} diff --git a/sequencesender/txbuilder/banana_base_test.go b/sequencesender/txbuilder/banana_base_test.go deleted file mode 100644 index fdf208f1..00000000 --- a/sequencesender/txbuilder/banana_base_test.go +++ /dev/null @@ -1,162 +0,0 @@ -package txbuilder_test - -import ( - "context" - "fmt" - "math/big" - "testing" - - "github.com/agglayer/aggkit/l1infotreesync" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/sequencesender/txbuilder/mocks_txbuilder" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func TestBananaBaseNewSequenceEmpty(t *testing.T) { - testData := newBananaBaseTestData(t) - testData.l1Client.On("HeaderByNumber", mock.Anything, mock.Anything). - Return(&types.Header{Number: big.NewInt(69)}, nil) - testData.getContract.On("L1InfoRootMap", mock.Anything, uint32(70)). - Return([32]byte{}, nil) - testData.l1InfoTreeSync.On("GetLatestInfoUntilBlock", mock.Anything, mock.Anything). - Return(&l1infotreesync.L1InfoTreeLeaf{L1InfoTreeIndex: 69}, nil) - lastAcc := common.HexToHash("0x8aca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41") - testData.rollupContract.EXPECT().LastAccInputHash(mock.Anything).Return(lastAcc, nil) - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(mock.Anything).Return(nil, nil) - seq, err := testData.sut.NewSequence(context.TODO(), nil, common.Address{}) - require.NotNil(t, seq) - require.NoError(t, err) -} - -func TestBananaBaseNewSequenceErrorHeaderByNumber(t *testing.T) { - testData := newBananaBaseTestData(t) - testData.l1Client.On("HeaderByNumber", mock.Anything, mock.Anything). - Return(nil, fmt.Errorf("error")) - seq, err := testData.sut.NewSequence(context.TODO(), nil, common.Address{}) - require.Nil(t, seq) - require.Error(t, err) -} - -func TestBananaBaseNewBatchFromL2Block(t *testing.T) { - testData := newBananaBaseTestData(t) - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 2, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batch := testData.sut.NewBatchFromL2Block(l2Block) - require.NotNil(t, batch) - require.Equal(t, l2Block.Timestamp, batch.LastL2BLockTimestamp()) - require.Equal(t, l2Block.BatchNumber, batch.BatchNumber()) - require.Equal(t, l2Block.L1InfotreeIndex, batch.L1InfoTreeIndex()) - require.Equal(t, common.BytesToAddress(l2Block.Coinbase), batch.LastCoinbase()) - require.Equal(t, common.BytesToHash(l2Block.GlobalExitRoot), batch.GlobalExitRoot()) -} - -func TestBananaBaseNewSequenceBatch(t *testing.T) { - testData := newBananaBaseTestData(t) - testData.l1Client.On("HeaderByNumber", mock.Anything, mock.Anything). - Return(&types.Header{Number: big.NewInt(69)}, nil) - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 2, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(mock.Anything).Return(nil, nil).Once() - - batch := testData.sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{batch} - lastAcc := common.HexToHash("0x8aca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41") - testData.rollupContract.EXPECT().LastAccInputHash(mock.Anything).Return(lastAcc, nil) - l1infoRoot := common.HexToHash("0x66ca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41") - testData.l1InfoTreeSync.On("GetLatestInfoUntilBlock", mock.Anything, mock.Anything). - Return(&l1infotreesync.L1InfoTreeLeaf{L1InfoTreeIndex: 7}, nil) - testData.getContract.EXPECT().L1InfoRootMap(mock.Anything, uint32(8)).Return(l1infoRoot, nil) - - seq, err := testData.sut.NewSequence(context.TODO(), batches, common.Address{}) - require.NotNil(t, seq) - require.NoError(t, err) - // TODO: check that the seq have the right values -} - -func TestBananaEmptyL1InfoTree(t *testing.T) { - testData := newBananaBaseTestData(t) - - testData.l1Client.On("HeaderByNumber", mock.Anything, mock.Anything). - Return(&types.Header{Number: big.NewInt(69)}, nil) - testData.l1InfoTreeSync.EXPECT().GetLatestInfoUntilBlock(testData.ctx, uint64(69)).Return(nil, l1infotreesync.ErrNotFound) - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(testData.ctx).Return(&l1infotreesync.L1InfoTreeInitial{LeafCount: 10}, nil) - - leafCounter, err := testData.sut.GetCounterL1InfoRoot(testData.ctx, 0) - require.NoError(t, err) - require.Equal(t, uint32(10), leafCounter) -} - -func TestCheckL1InfoTreeLeafCounterVsInitL1InfoMap(t *testing.T) { - testData := newBananaBaseTestData(t) - - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(testData.ctx).Return(&l1infotreesync.L1InfoTreeInitial{LeafCount: 10}, nil) - err := testData.sut.CheckL1InfoTreeLeafCounterVsInitL1InfoMap(testData.ctx, 10) - require.NoError(t, err, "10 == 10 so is accepted") - - err = testData.sut.CheckL1InfoTreeLeafCounterVsInitL1InfoMap(testData.ctx, 9) - require.Error(t, err, "9 < 10 so is rejected") - - err = testData.sut.CheckL1InfoTreeLeafCounterVsInitL1InfoMap(testData.ctx, 11) - require.NoError(t, err, "11 > 10 so is accepted") -} - -func TestCheckL1InfoTreeLeafCounterVsInitL1InfoMapNotFound(t *testing.T) { - testData := newBananaBaseTestData(t) - - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(testData.ctx).Return(nil, nil) - err := testData.sut.CheckL1InfoTreeLeafCounterVsInitL1InfoMap(testData.ctx, 10) - require.NoError(t, err, "10 == 10 so is accepted") -} - -type testDataBananaBase struct { - rollupContract *mocks_txbuilder.RollupBananaBaseContractor - getContract *mocks_txbuilder.GlobalExitRootBananaContractor - opts bind.TransactOpts - sut *txbuilder.TxBuilderBananaBase - l1InfoTreeSync *mocks_txbuilder.L1InfoSyncer - l1Client *mocks_txbuilder.L1Client - ctx context.Context -} - -func newBananaBaseTestData(t *testing.T) *testDataBananaBase { - t.Helper() - - zkevmContractMock := mocks_txbuilder.NewRollupBananaBaseContractor(t) - gerContractMock := mocks_txbuilder.NewGlobalExitRootBananaContractor(t) - opts := bind.TransactOpts{} - l1Client := mocks_txbuilder.NewL1Client(t) - l1InfoSyncer := mocks_txbuilder.NewL1InfoSyncer(t) - sut := txbuilder.NewTxBuilderBananaBase( - log.GetDefaultLogger(), - zkevmContractMock, - gerContractMock, - l1InfoSyncer, l1Client, big.NewInt(0), opts, - ) - require.NotNil(t, sut) - return &testDataBananaBase{ - rollupContract: zkevmContractMock, - getContract: gerContractMock, - opts: opts, - sut: sut, - l1InfoTreeSync: l1InfoSyncer, - l1Client: l1Client, - ctx: context.TODO(), - } -} diff --git a/sequencesender/txbuilder/banana_types.go b/sequencesender/txbuilder/banana_types.go deleted file mode 100644 index 8b0cf5c8..00000000 --- a/sequencesender/txbuilder/banana_types.go +++ /dev/null @@ -1,184 +0,0 @@ -package txbuilder - -import ( - "fmt" - - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/state" - "github.com/ethereum/go-ethereum/common" -) - -type BananaBatch struct { - etherman.Batch -} - -type BananaSequence struct { - etherman.SequenceBanana -} - -func NewBananaBatch(batch *etherman.Batch) *BananaBatch { - return &BananaBatch{*batch} -} - -func NewBananaSequence(ult etherman.SequenceBanana) *BananaSequence { - return &BananaSequence{ult} -} - -func (b *BananaSequence) IndexL1InfoRoot() uint32 { - return b.SequenceBanana.CounterL1InfoRoot -} - -func (b *BananaSequence) MaxSequenceTimestamp() uint64 { - return b.SequenceBanana.MaxSequenceTimestamp -} - -func (b *BananaSequence) L1InfoRoot() common.Hash { - return b.SequenceBanana.L1InfoRoot -} - -func (b *BananaSequence) Batches() []seqsendertypes.Batch { - res := make([]seqsendertypes.Batch, len(b.SequenceBanana.Batches)) - for i, batch := range b.SequenceBanana.Batches { - res[i] = &BananaBatch{batch} - } - return res -} - -func (b *BananaSequence) FirstBatch() seqsendertypes.Batch { - return &BananaBatch{b.SequenceBanana.Batches[0]} -} - -func (b *BananaSequence) LastBatch() seqsendertypes.Batch { - return &BananaBatch{b.SequenceBanana.Batches[b.Len()-1]} -} - -func (b *BananaSequence) Len() int { - return len(b.SequenceBanana.Batches) -} - -func (b *BananaSequence) String() string { - res := fmt.Sprintf( - "Seq/Banana: L2Coinbase: %s, OldAccInputHash: %x, AccInputHash: %x, L1InfoRoot: %x, "+ - "MaxSequenceTimestamp: %d, IndexL1InfoRoot: %d", - b.L2Coinbase().String(), b.OldAccInputHash.String(), b.AccInputHash.String(), b.L1InfoRoot().String(), - b.MaxSequenceTimestamp(), b.IndexL1InfoRoot(), - ) - - for i, batch := range b.Batches() { - res += fmt.Sprintf("\n\tBatch %d: %s", i, batch.String()) - } - return res -} - -func (b *BananaSequence) L2Coinbase() common.Address { - return b.SequenceBanana.L2Coinbase -} - -func (b *BananaBatch) LastCoinbase() common.Address { - return b.Batch.LastCoinbase -} - -func (b *BananaBatch) ForcedBatchTimestamp() uint64 { - return b.Batch.ForcedBatchTimestamp -} - -func (b *BananaBatch) ForcedGlobalExitRoot() common.Hash { - return b.Batch.ForcedGlobalExitRoot -} - -func (b *BananaBatch) ForcedBlockHashL1() common.Hash { - return b.Batch.ForcedBlockHashL1 -} - -func (b *BananaBatch) L2Data() []byte { - return b.Batch.L2Data -} - -func (b *BananaBatch) LastL2BLockTimestamp() uint64 { - return b.Batch.LastL2BLockTimestamp -} - -func (b *BananaBatch) BatchNumber() uint64 { - return b.Batch.BatchNumber -} - -func (b BananaBatch) DeepCopy() seqsendertypes.Batch { - return &BananaBatch{b.Batch} -} - -func (b *BananaBatch) SetL2Data(data []byte) { - b.Batch.L2Data = data -} - -func (b *BananaBatch) SetLastCoinbase(address common.Address) { - b.Batch.LastCoinbase = address -} - -func (b *BananaBatch) SetLastL2BLockTimestamp(ts uint64) { - b.Batch.LastL2BLockTimestamp = ts -} - -func (b *BananaBatch) SetL1InfoTreeIndex(index uint32) { - b.Batch.L1InfoTreeIndex = index -} - -func (b *BananaBatch) GlobalExitRoot() common.Hash { - return b.Batch.GlobalExitRoot -} - -func (b *BananaBatch) L1InfoTreeIndex() uint32 { - return b.Batch.L1InfoTreeIndex -} - -func (b *BananaBatch) String() string { - return fmt.Sprintf("Batch/Banana: LastCoinbase: %s, ForcedBatchTimestamp: %d, ForcedGlobalExitRoot: %x, "+ - "ForcedBlockHashL1: %x, L2Data: %x, LastL2BLockTimestamp: %d, BatchNumber: %d, "+ - "GlobalExitRoot: %x, L1InfoTreeIndex: %d", - b.LastCoinbase().String(), b.ForcedBatchTimestamp(), b.ForcedGlobalExitRoot().String(), - b.ForcedBlockHashL1().String(), b.L2Data(), b.LastL2BLockTimestamp(), b.BatchNumber(), - b.GlobalExitRoot().String(), b.L1InfoTreeIndex(), - ) -} - -func (b *BananaSequence) LastVirtualBatchNumber() uint64 { - return b.SequenceBanana.LastVirtualBatchNumber -} - -func (b *BananaSequence) SetLastVirtualBatchNumber(batchNumber uint64) { - b.SequenceBanana.LastVirtualBatchNumber = batchNumber -} - -func calculateMaxL1InfoTreeIndexInsideL2Data(l2data []byte) (uint32, error) { - batchRawV2, err := state.DecodeBatchV2(l2data) - if err != nil { - return 0, fmt.Errorf("calculateMaxL1InfoTreeIndexInsideL2Data: error decoding batchL2Data, err:%w", err) - } - if batchRawV2 == nil { - return 0, fmt.Errorf("calculateMaxL1InfoTreeIndexInsideL2Data: batchRawV2 is nil") - } - maxIndex := uint32(0) - for _, block := range batchRawV2.Blocks { - if block.IndexL1InfoTree > maxIndex { - maxIndex = block.IndexL1InfoTree - } - } - return maxIndex, nil -} - -func calculateMaxL1InfoTreeIndexInsideSequence(seq *etherman.SequenceBanana) (uint32, error) { - if seq == nil { - return 0, fmt.Errorf("calculateMaxL1InfoTreeIndexInsideSequence: seq is nil") - } - maxIndex := uint32(0) - for _, batch := range seq.Batches { - index, err := calculateMaxL1InfoTreeIndexInsideL2Data(batch.L2Data) - if err != nil { - return 0, fmt.Errorf("calculateMaxL1InfoTreeIndexInsideBatches: error getting batch L1InfoTree , err:%w", err) - } - if index > maxIndex { - maxIndex = index - } - } - return maxIndex, nil -} diff --git a/sequencesender/txbuilder/banana_validium.go b/sequencesender/txbuilder/banana_validium.go deleted file mode 100644 index 69a94536..00000000 --- a/sequencesender/txbuilder/banana_validium.go +++ /dev/null @@ -1,153 +0,0 @@ -package txbuilder - -import ( - "context" - "fmt" - "math/big" - - "github.com/0xPolygon/cdk-contracts-tooling/contracts/banana/polygonvalidiumetrog" - "github.com/agglayer/aggkit/dataavailability" - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/crypto" -) - -type TxBuilderBananaValidium struct { - TxBuilderBananaBase - da dataavailability.SequenceSenderBanana - condNewSeq CondNewSequence - rollupContract rollupBananaValidiumContractor -} - -type rollupBananaValidiumContractor interface { - rollupBananaBaseContractor - SequenceBatchesValidium( - opts *bind.TransactOpts, - batches []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, - indexL1InfoRoot uint32, - maxSequenceTimestamp uint64, - expectedFinalAccInputHash [32]byte, - l2Coinbase common.Address, - dataAvailabilityMessage []byte, - ) (*types.Transaction, error) -} - -func NewTxBuilderBananaValidium( - logger *log.Logger, - rollupContract rollupBananaValidiumContractor, - gerContract globalExitRootBananaContractor, - da dataavailability.SequenceSenderBanana, opts bind.TransactOpts, maxBatchesForL1 uint64, - l1InfoTree l1InfoSyncer, - ethClient l1Client, - blockFinality *big.Int, -) *TxBuilderBananaValidium { - txBuilderBase := *NewTxBuilderBananaBase(logger, rollupContract, - gerContract, l1InfoTree, ethClient, blockFinality, opts) - - return &TxBuilderBananaValidium{ - TxBuilderBananaBase: txBuilderBase, - da: da, - condNewSeq: NewConditionalNewSequenceNumBatches(maxBatchesForL1), - rollupContract: rollupContract, - } -} - -func (t *TxBuilderBananaValidium) NewSequenceIfWorthToSend( - ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64, -) (seqsendertypes.Sequence, error) { - return t.condNewSeq.NewSequenceIfWorthToSend(ctx, t, sequenceBatches, l2Coinbase) -} - -// SetCondNewSeq allow to override the default conditional for new sequence -func (t *TxBuilderBananaValidium) SetCondNewSeq(cond CondNewSequence) CondNewSequence { - previous := t.condNewSeq - t.condNewSeq = cond - return previous -} - -func (t *TxBuilderBananaValidium) BuildSequenceBatchesTx( - ctx context.Context, sequences seqsendertypes.Sequence, -) (*types.Transaction, error) { - // TODO: param sender - // Post sequences to DA backend - var dataAvailabilityMessage []byte - var err error - ethseq, err := convertToSequenceBanana(sequences) - if err != nil { - t.logger.Error("error converting sequences to etherman: ", err) - return nil, err - } - - dataAvailabilityMessage, err = t.da.PostSequenceBanana(ctx, ethseq) - if err != nil { - t.logger.Error("error posting sequences to the data availability protocol: ", err) - return nil, err - } - if dataAvailabilityMessage == nil { - err := fmt.Errorf("data availability message is nil") - t.logger.Error("error posting sequences to the data availability protocol: ", err.Error()) - return nil, err - } - - // Build sequence data - tx, err := t.internalBuildSequenceBatchesTx(ethseq, dataAvailabilityMessage) - if err != nil { - t.logger.Errorf("error estimating new sequenceBatches to add to ethtxmanager: ", err) - return nil, err - } - return tx, nil -} - -// BuildSequenceBatchesTx builds a tx to be sent to the PoE SC method SequenceBatches. -func (t *TxBuilderBananaValidium) internalBuildSequenceBatchesTx(sequence etherman.SequenceBanana, - dataAvailabilityMessage []byte) (*types.Transaction, error) { - newopts := t.opts - newopts.NoSend = true - - // force nonce, gas limit and gas price to avoid querying it from the chain - newopts.Nonce = big.NewInt(1) - newopts.GasLimit = uint64(1) - newopts.GasPrice = big.NewInt(1) - - return t.sequenceBatchesValidium(newopts, sequence, dataAvailabilityMessage) -} - -func (t *TxBuilderBananaValidium) sequenceBatchesValidium( - opts bind.TransactOpts, sequence etherman.SequenceBanana, dataAvailabilityMessage []byte, -) (*types.Transaction, error) { - batches := make([]polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, len(sequence.Batches)) - for i, batch := range sequence.Batches { - var ger common.Hash - if batch.ForcedBatchTimestamp > 0 { - ger = batch.ForcedGlobalExitRoot - } - - batches[i] = polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData{ - TransactionsHash: crypto.Keccak256Hash(batch.L2Data), - ForcedGlobalExitRoot: ger, - ForcedTimestamp: batch.ForcedBatchTimestamp, - ForcedBlockHashL1: batch.ForcedBlockHashL1, - } - } - - t.logger.Infof("building banana sequence tx. AccInputHash: %s", sequence.AccInputHash.Hex()) - tx, err := t.rollupContract.SequenceBatchesValidium( - &opts, batches, sequence.CounterL1InfoRoot, sequence.MaxSequenceTimestamp, - sequence.AccInputHash, sequence.L2Coinbase, dataAvailabilityMessage, - ) - if err != nil { - t.logger.Debugf("Batches to send: %+v", batches) - t.logger.Debug("l2CoinBase: ", sequence.L2Coinbase) - t.logger.Debug("Sequencer address: ", opts.From) - } - - return tx, err -} - -func (t *TxBuilderBananaValidium) String() string { - return "Banana/Validium" -} diff --git a/sequencesender/txbuilder/banana_validium_test.go b/sequencesender/txbuilder/banana_validium_test.go deleted file mode 100644 index 390522a2..00000000 --- a/sequencesender/txbuilder/banana_validium_test.go +++ /dev/null @@ -1,139 +0,0 @@ -package txbuilder_test - -import ( - "context" - "fmt" - "math/big" - "strings" - "testing" - - "github.com/agglayer/aggkit/dataavailability/mocks_da" - "github.com/agglayer/aggkit/l1infotreesync" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/sequencesender/txbuilder/mocks_txbuilder" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func TestBananaValidiumName(t *testing.T) { - testData := newBananaValidiumTestData(t, txbuilder.MaxBatchesForL1Disabled) - require.NotNil(t, testData.sut) - require.True(t, strings.Contains(testData.sut.String(), "Banana")) - require.True(t, strings.Contains(testData.sut.String(), "Validium")) -} - -func TestBananaValidiumBuildSequenceBatchesTxSequenceErrorsFromDA(t *testing.T) { - testData := newBananaValidiumTestData(t, txbuilder.MaxBatchesForL1Disabled) - testData.l1Client.On("HeaderByNumber", mock.Anything, mock.Anything). - Return(&types.Header{Number: big.NewInt(69)}, nil) - testData.l1InfoTreeSync.On("GetLatestInfoUntilBlock", mock.Anything, mock.Anything). - Return(&l1infotreesync.L1InfoTreeLeaf{L1InfoTreeIndex: 7}, nil) - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(mock.Anything).Return(nil, nil) - - seq, err := newSequenceBananaValidiumForTest(testData) - require.NoError(t, err) - ctx := context.TODO() - testData.da.EXPECT().PostSequenceBanana(ctx, mock.Anything).Return(nil, nil).Once() - - _, err = testData.sut.BuildSequenceBatchesTx(ctx, seq) - require.Error(t, err, "data availability message is nil") - - testData.da.EXPECT().PostSequenceBanana(ctx, mock.Anything).Return(nil, fmt.Errorf("test error")) - _, err = testData.sut.BuildSequenceBatchesTx(ctx, seq) - require.Error(t, err, "error posting sequences to the data availability protocol: test error") -} - -func TestBananaValidiumBuildSequenceBatchesTxSequenceDAOk(t *testing.T) { - testData := newBananaValidiumTestData(t, txbuilder.MaxBatchesForL1Disabled) - testData.l1Client.On("HeaderByNumber", mock.Anything, mock.Anything). - Return(&types.Header{Number: big.NewInt(69)}, nil) - testData.l1InfoTreeSync.On("GetLatestInfoUntilBlock", mock.Anything, mock.Anything). - Return(&l1infotreesync.L1InfoTreeLeaf{L1InfoTreeIndex: 7}, nil) - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(mock.Anything).Return(nil, nil) - - seq, err := newSequenceBananaValidiumForTest(testData) - require.NoError(t, err) - ctx := context.TODO() - daMessage := []byte{1} - testData.da.EXPECT().PostSequenceBanana(ctx, mock.Anything).Return(daMessage, nil) - inner := &types.LegacyTx{} - seqBatchesTx := types.NewTx(inner) - testData.rollupContract.EXPECT().SequenceBatchesValidium(mock.MatchedBy(func(opts *bind.TransactOpts) bool { - return opts.NoSend == true - }), mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, daMessage).Return(seqBatchesTx, nil).Once() - tx, err := testData.sut.BuildSequenceBatchesTx(ctx, seq) - require.NoError(t, err) - require.Equal(t, seqBatchesTx, tx) -} - -type testDataBananaValidium struct { - rollupContract *mocks_txbuilder.RollupBananaValidiumContractor - getContract *mocks_txbuilder.GlobalExitRootBananaContractor - cond *mocks_txbuilder.CondNewSequence - da *mocks_da.SequenceSenderBanana - opts bind.TransactOpts - sut *txbuilder.TxBuilderBananaValidium - l1InfoTreeSync *mocks_txbuilder.L1InfoSyncer - l1Client *mocks_txbuilder.L1Client -} - -func newBananaValidiumTestData(t *testing.T, maxBatchesForL1 uint64) *testDataBananaValidium { - t.Helper() - - zkevmContractMock := mocks_txbuilder.NewRollupBananaValidiumContractor(t) - gerContractMock := mocks_txbuilder.NewGlobalExitRootBananaContractor(t) - condMock := mocks_txbuilder.NewCondNewSequence(t) - daMock := mocks_da.NewSequenceSenderBanana(t) - l1Client := mocks_txbuilder.NewL1Client(t) - l1InfoSyncer := mocks_txbuilder.NewL1InfoSyncer(t) - - opts := bind.TransactOpts{} - sut := txbuilder.NewTxBuilderBananaValidium( - log.GetDefaultLogger(), - zkevmContractMock, - gerContractMock, - daMock, - opts, - maxBatchesForL1, - l1InfoSyncer, - l1Client, - big.NewInt(0), - ) - require.NotNil(t, sut) - sut.SetCondNewSeq(condMock) - return &testDataBananaValidium{ - rollupContract: zkevmContractMock, - getContract: gerContractMock, - cond: condMock, - da: daMock, - opts: opts, - sut: sut, - l1InfoTreeSync: l1InfoSyncer, - l1Client: l1Client, - } -} - -func newSequenceBananaValidiumForTest(testData *testDataBananaValidium) (seqsendertypes.Sequence, error) { - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 1, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batch := testData.sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{ - batch, - } - lastAcc := common.HexToHash("0x8aca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41") - testData.rollupContract.EXPECT().LastAccInputHash(mock.Anything).Return(lastAcc, nil).Once() - l1infoRoot := common.HexToHash("0x66ca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41") - testData.getContract.EXPECT().L1InfoRootMap(mock.Anything, uint32(8)).Return(l1infoRoot, nil).Once() - return testData.sut.NewSequence(context.TODO(), batches, common.Address{}) -} diff --git a/sequencesender/txbuilder/banana_zkevm.go b/sequencesender/txbuilder/banana_zkevm.go deleted file mode 100644 index 1151fcdf..00000000 --- a/sequencesender/txbuilder/banana_zkevm.go +++ /dev/null @@ -1,128 +0,0 @@ -package txbuilder - -import ( - "context" - "math/big" - - "github.com/0xPolygon/cdk-contracts-tooling/contracts/banana/polygonvalidiumetrog" - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" -) - -type TxBuilderBananaZKEVM struct { - TxBuilderBananaBase - condNewSeq CondNewSequence - rollupContract rollupBananaZKEVMContractor -} - -type rollupBananaZKEVMContractor interface { - rollupBananaBaseContractor - SequenceBatches( - opts *bind.TransactOpts, - batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, - indexL1InfoRoot uint32, - maxSequenceTimestamp uint64, - expectedFinalAccInputHash [32]byte, - l2Coinbase common.Address, - ) (*types.Transaction, error) -} - -type globalExitRootBananaZKEVMContractor interface { - globalExitRootBananaContractor -} - -func NewTxBuilderBananaZKEVM( - logger *log.Logger, - rollupContract rollupBananaZKEVMContractor, - gerContract globalExitRootBananaZKEVMContractor, - opts bind.TransactOpts, - maxTxSizeForL1 uint64, - l1InfoTree l1InfoSyncer, - ethClient l1Client, - blockFinality *big.Int, -) *TxBuilderBananaZKEVM { - txBuilderBase := *NewTxBuilderBananaBase(logger, rollupContract, - gerContract, l1InfoTree, ethClient, blockFinality, opts) - - return &TxBuilderBananaZKEVM{ - TxBuilderBananaBase: txBuilderBase, - condNewSeq: NewConditionalNewSequenceMaxSize(maxTxSizeForL1), - rollupContract: rollupContract, - } -} - -func (t *TxBuilderBananaZKEVM) NewSequenceIfWorthToSend( - ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64, -) (seqsendertypes.Sequence, error) { - return t.condNewSeq.NewSequenceIfWorthToSend(ctx, t, sequenceBatches, l2Coinbase) -} - -// SetCondNewSeq allow to override the default conditional for new sequence -func (t *TxBuilderBananaZKEVM) SetCondNewSeq(cond CondNewSequence) CondNewSequence { - previous := t.condNewSeq - t.condNewSeq = cond - return previous -} - -func (t *TxBuilderBananaZKEVM) BuildSequenceBatchesTx( - ctx context.Context, sequences seqsendertypes.Sequence, -) (*types.Transaction, error) { - var err error - ethseq, err := convertToSequenceBanana(sequences) - if err != nil { - t.logger.Error("error converting sequences to etherman: ", err) - return nil, err - } - newopts := t.opts - newopts.NoSend = true - - // force nonce, gas limit and gas price to avoid querying it from the chain - newopts.Nonce = big.NewInt(1) - newopts.GasLimit = uint64(1) - newopts.GasPrice = big.NewInt(1) - // Build sequence data - tx, err := t.sequenceBatchesRollup(newopts, ethseq) - if err != nil { - t.logger.Errorf("error estimating new sequenceBatches to add to ethtxmanager: ", err) - return nil, err - } - return tx, nil -} - -func (t *TxBuilderBananaZKEVM) sequenceBatchesRollup( - opts bind.TransactOpts, sequence etherman.SequenceBanana, -) (*types.Transaction, error) { - batches := make([]polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, len(sequence.Batches)) - for i, batch := range sequence.Batches { - var ger common.Hash - if batch.ForcedBatchTimestamp > 0 { - ger = batch.ForcedGlobalExitRoot - } - - batches[i] = polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData{ - Transactions: batch.L2Data, - ForcedGlobalExitRoot: ger, - ForcedTimestamp: batch.ForcedBatchTimestamp, - ForcedBlockHashL1: batch.ForcedBlockHashL1, - } - } - - tx, err := t.rollupContract.SequenceBatches( - &opts, batches, sequence.CounterL1InfoRoot, sequence.MaxSequenceTimestamp, sequence.AccInputHash, sequence.L2Coinbase, - ) - if err != nil { - t.logger.Debugf("Batches to send: %+v", batches) - t.logger.Debug("l2CoinBase: ", sequence.L2Coinbase) - t.logger.Debug("Sequencer address: ", opts.From) - } - - return tx, err -} - -func (t *TxBuilderBananaZKEVM) String() string { - return "Banana/ZKEVM" -} diff --git a/sequencesender/txbuilder/banana_zkevm_test.go b/sequencesender/txbuilder/banana_zkevm_test.go deleted file mode 100644 index a05d23c4..00000000 --- a/sequencesender/txbuilder/banana_zkevm_test.go +++ /dev/null @@ -1,137 +0,0 @@ -package txbuilder_test - -import ( - "context" - "fmt" - "math/big" - "strings" - "testing" - - "github.com/agglayer/aggkit/l1infotreesync" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/sequencesender/txbuilder/mocks_txbuilder" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func TestBananaZkevmName(t *testing.T) { - testData := newBananaZKEVMTestData(t, txbuilder.MaxTxSizeForL1Disabled) - require.True(t, strings.Contains(testData.sut.String(), "Banana")) - require.True(t, strings.Contains(testData.sut.String(), "ZKEVM")) -} - -func TestBananaZkevmNewSequenceIfWorthToSend(t *testing.T) { - testData := newBananaZKEVMTestData(t, txbuilder.MaxTxSizeForL1Disabled) - - testSequenceIfWorthToSendNoNewSeq(t, testData.sut) - testSequenceIfWorthToSendErr(t, testData.sut) - testSetCondNewSeq(t, testData.sut) -} - -func TestBananaZkevmBuildSequenceBatchesTxOk(t *testing.T) { - testData := newBananaZKEVMTestData(t, txbuilder.MaxTxSizeForL1Disabled) - testData.l1Client.On("HeaderByNumber", mock.Anything, mock.Anything). - Return(&types.Header{Number: big.NewInt(69)}, nil) - testData.l1InfoTreeSync.On("GetLatestInfoUntilBlock", mock.Anything, mock.Anything). - Return(&l1infotreesync.L1InfoTreeLeaf{L1InfoTreeIndex: 7}, nil) - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(mock.Anything).Return(nil, nil) - - seq, err := newSequenceBananaZKEVMForTest(testData) - require.NoError(t, err) - - inner := &types.LegacyTx{} - tx := types.NewTx(inner) - - // It check that SequenceBatches is not going to be send - testData.rollupContract.EXPECT().SequenceBatches(mock.MatchedBy(func(opts *bind.TransactOpts) bool { - return opts.NoSend == true - }), mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(tx, nil).Once() - returnTx, err := testData.sut.BuildSequenceBatchesTx(context.TODO(), seq) - require.NoError(t, err) - require.Equal(t, tx, returnTx) -} - -func TestBananaZkevmBuildSequenceBatchesTxErr(t *testing.T) { - testData := newBananaZKEVMTestData(t, txbuilder.MaxTxSizeForL1Disabled) - testData.l1Client.On("HeaderByNumber", mock.Anything, mock.Anything). - Return(&types.Header{Number: big.NewInt(69)}, nil) - testData.l1InfoTreeSync.On("GetLatestInfoUntilBlock", mock.Anything, mock.Anything). - Return(&l1infotreesync.L1InfoTreeLeaf{L1InfoTreeIndex: 7}, nil) - testData.l1InfoTreeSync.EXPECT().GetInitL1InfoRootMap(mock.Anything).Return(nil, nil) - - seq, err := newSequenceBananaZKEVMForTest(testData) - require.NoError(t, err) - - err = fmt.Errorf("test-error") - testData.rollupContract.EXPECT().SequenceBatches(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, err).Once() - returnedTx, returnedErr := testData.sut.BuildSequenceBatchesTx(context.TODO(), seq) - require.ErrorContains(t, returnedErr, err.Error()) - require.Nil(t, returnedTx) -} - -type testDataBananaZKEVM struct { - rollupContract *mocks_txbuilder.RollupBananaZKEVMContractor - getContract *mocks_txbuilder.GlobalExitRootBananaContractor - cond *mocks_txbuilder.CondNewSequence - opts bind.TransactOpts - sut *txbuilder.TxBuilderBananaZKEVM - l1InfoTreeSync *mocks_txbuilder.L1InfoSyncer - l1Client *mocks_txbuilder.L1Client -} - -func newBananaZKEVMTestData(t *testing.T, maxTxSizeForL1 uint64) *testDataBananaZKEVM { - t.Helper() - - zkevmContractMock := mocks_txbuilder.NewRollupBananaZKEVMContractor(t) - gerContractMock := mocks_txbuilder.NewGlobalExitRootBananaContractor(t) - condMock := mocks_txbuilder.NewCondNewSequence(t) - opts := bind.TransactOpts{} - l1Client := mocks_txbuilder.NewL1Client(t) - l1InfoSyncer := mocks_txbuilder.NewL1InfoSyncer(t) - sut := txbuilder.NewTxBuilderBananaZKEVM( - log.GetDefaultLogger(), - zkevmContractMock, - gerContractMock, - opts, - maxTxSizeForL1, - l1InfoSyncer, - l1Client, - big.NewInt(0), - ) - require.NotNil(t, sut) - sut.SetCondNewSeq(condMock) - return &testDataBananaZKEVM{ - rollupContract: zkevmContractMock, - getContract: gerContractMock, - cond: condMock, - opts: opts, - sut: sut, - l1InfoTreeSync: l1InfoSyncer, - l1Client: l1Client, - } -} - -func newSequenceBananaZKEVMForTest(testData *testDataBananaZKEVM) (seqsendertypes.Sequence, error) { - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 1, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batch := testData.sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{ - batch, - } - lastAcc := common.HexToHash("0x8aca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41") - testData.rollupContract.EXPECT().LastAccInputHash(mock.Anything).Return(lastAcc, nil).Once() - l1infoRoot := common.HexToHash("0x66ca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41") - testData.getContract.EXPECT().L1InfoRootMap(mock.Anything, uint32(8)).Return(l1infoRoot, nil).Once() - return testData.sut.NewSequence(context.TODO(), batches, common.Address{}) -} diff --git a/sequencesender/txbuilder/elderberry_base.go b/sequencesender/txbuilder/elderberry_base.go deleted file mode 100644 index f8d8b307..00000000 --- a/sequencesender/txbuilder/elderberry_base.go +++ /dev/null @@ -1,63 +0,0 @@ -package txbuilder - -import ( - "context" - - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" -) - -type TxBuilderElderberryBase struct { - logger *log.Logger - opts bind.TransactOpts -} - -func NewTxBuilderElderberryBase(logger *log.Logger, opts bind.TransactOpts) *TxBuilderElderberryBase { - return &TxBuilderElderberryBase{ - logger: logger, - opts: opts, - } -} - -// SetAuth sets the auth for the tx builder -func (t *TxBuilderElderberryBase) SetAuth(auth *bind.TransactOpts) { - t.opts = *auth -} - -func (t *TxBuilderElderberryBase) NewSequence( - ctx context.Context, batches []seqsendertypes.Batch, coinbase common.Address, -) (seqsendertypes.Sequence, error) { - seq := ElderberrySequence{ - l2Coinbase: coinbase, - batches: batches, - } - return &seq, nil -} - -func (t *TxBuilderElderberryBase) NewBatchFromL2Block(l2Block *datastream.L2Block) seqsendertypes.Batch { - batch := ðerman.Batch{ - LastL2BLockTimestamp: l2Block.Timestamp, - BatchNumber: l2Block.BatchNumber, - L1InfoTreeIndex: l2Block.L1InfotreeIndex, - LastCoinbase: common.BytesToAddress(l2Block.Coinbase), - GlobalExitRoot: common.BytesToHash(l2Block.GlobalExitRoot), - } - return NewBananaBatch(batch) -} - -func getLastSequencedBatchNumber(sequences seqsendertypes.Sequence) uint64 { - if sequences.Len() == 0 { - return 0 - } - if sequences.FirstBatch().BatchNumber() == 0 { - panic("First batch number is 0, that is not allowed!") - } - if sequences.LastVirtualBatchNumber() != 0 { - return sequences.LastVirtualBatchNumber() - } - return sequences.FirstBatch().BatchNumber() - 1 -} diff --git a/sequencesender/txbuilder/elderberry_base_test.go b/sequencesender/txbuilder/elderberry_base_test.go deleted file mode 100644 index 46a16148..00000000 --- a/sequencesender/txbuilder/elderberry_base_test.go +++ /dev/null @@ -1,101 +0,0 @@ -package txbuilder - -import ( - "context" - "testing" - - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/stretchr/testify/require" -) - -func TestElderberryBaseNewSequence(t *testing.T) { - opts := bind.TransactOpts{} - sut := NewTxBuilderElderberryBase(log.GetDefaultLogger(), opts) - require.NotNil(t, sut) - seq, err := sut.NewSequence(context.TODO(), nil, common.Address{}) - require.NotNil(t, seq) - require.NoError(t, err) -} - -func TestElderberryBaseNewBatchFromL2Block(t *testing.T) { - sut := newElderberryBaseSUT(t) - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 2, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batch := sut.NewBatchFromL2Block(l2Block) - require.NotNil(t, batch) - require.Equal(t, l2Block.Timestamp, batch.LastL2BLockTimestamp()) - require.Equal(t, l2Block.BatchNumber, batch.BatchNumber()) - require.Equal(t, l2Block.L1InfotreeIndex, batch.L1InfoTreeIndex()) - require.Equal(t, common.BytesToAddress(l2Block.Coinbase), batch.LastCoinbase()) - require.Equal(t, common.BytesToHash(l2Block.GlobalExitRoot), batch.GlobalExitRoot()) -} - -func TestElderberryBasegetLastSequencedBatchNumberEmpty(t *testing.T) { - sut := newElderberryBaseSUT(t) - seq, err := sut.NewSequence(context.TODO(), nil, common.Address{}) - require.NoError(t, err) - - require.Equal(t, uint64(0), getLastSequencedBatchNumber(seq)) -} - -func TestElderberryBasegetLastSequencedBatch1Batch(t *testing.T) { - sut := newElderberryBaseSUT(t) - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 2, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batchElder := sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{ - batchElder, - } - - seq, err := sut.NewSequence(context.TODO(), batches, common.Address{}) - require.NoError(t, err) - - require.Equal(t, l2Block.BatchNumber-1, getLastSequencedBatchNumber(seq)) -} - -func TestElderberryBaseGetLastSequencedBatchFirstBatchIsZeroThrowAPanic(t *testing.T) { - sut := newElderberryBaseSUT(t) - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 0, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batchElder := sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{ - batchElder, - } - - seq, err := sut.NewSequence(context.TODO(), batches, common.Address{}) - require.NoError(t, err) - defer func() { - if r := recover(); r == nil { - t.Errorf("The code did not panic") - } - }() - getLastSequencedBatchNumber(seq) -} - -func newElderberryBaseSUT(t *testing.T) *TxBuilderElderberryBase { - t.Helper() - - opts := bind.TransactOpts{} - sut := NewTxBuilderElderberryBase(log.GetDefaultLogger(), opts) - require.NotNil(t, sut) - return sut -} diff --git a/sequencesender/txbuilder/elderberry_types.go b/sequencesender/txbuilder/elderberry_types.go deleted file mode 100644 index 8800ab21..00000000 --- a/sequencesender/txbuilder/elderberry_types.go +++ /dev/null @@ -1,72 +0,0 @@ -package txbuilder - -import ( - "fmt" - "log" - - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/ethereum/go-ethereum/common" -) - -type ElderberrySequence struct { - l2Coinbase common.Address - batches []seqsendertypes.Batch - lastVirtualBatchNumber uint64 -} - -func NewElderberrySequence(batches []seqsendertypes.Batch, l2Coinbase common.Address) *ElderberrySequence { - return &ElderberrySequence{ - l2Coinbase: l2Coinbase, - batches: batches, - } -} - -func (b *ElderberrySequence) IndexL1InfoRoot() uint32 { - log.Fatal("Elderberry Sequence does not have IndexL1InfoRoot") - return 0 -} - -func (b *ElderberrySequence) MaxSequenceTimestamp() uint64 { - return b.LastBatch().LastL2BLockTimestamp() -} - -func (b *ElderberrySequence) L1InfoRoot() common.Hash { - log.Fatal("Elderberry Sequence does not have L1InfoRoot") - return common.Hash{} -} - -func (b *ElderberrySequence) Batches() []seqsendertypes.Batch { - return b.batches -} - -func (b *ElderberrySequence) FirstBatch() seqsendertypes.Batch { - return b.batches[0] -} - -func (b *ElderberrySequence) LastBatch() seqsendertypes.Batch { - return b.batches[b.Len()-1] -} - -func (b *ElderberrySequence) Len() int { - return len(b.batches) -} - -func (b *ElderberrySequence) L2Coinbase() common.Address { - return b.l2Coinbase -} - -func (b *ElderberrySequence) String() string { - res := fmt.Sprintf("Seq/Elderberry: L2Coinbase: %s, Batches: %d", b.l2Coinbase.String(), len(b.batches)) - for i, batch := range b.Batches() { - res += fmt.Sprintf("\n\tBatch %d: %s", i, batch.String()) - } - return res -} - -func (b *ElderberrySequence) SetLastVirtualBatchNumber(batchNumber uint64) { - b.lastVirtualBatchNumber = batchNumber -} - -func (b *ElderberrySequence) LastVirtualBatchNumber() uint64 { - return b.lastVirtualBatchNumber -} diff --git a/sequencesender/txbuilder/elderberry_validium.go b/sequencesender/txbuilder/elderberry_validium.go deleted file mode 100644 index ff0cf693..00000000 --- a/sequencesender/txbuilder/elderberry_validium.go +++ /dev/null @@ -1,134 +0,0 @@ -package txbuilder - -import ( - "context" - "encoding/hex" - "fmt" - "math/big" - - "github.com/0xPolygon/cdk-contracts-tooling/contracts/elderberry/polygonvalidiumetrog" - "github.com/agglayer/aggkit/dataavailability" - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/etherman/contracts" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/crypto" -) - -type TxBuilderElderberryValidium struct { - TxBuilderElderberryBase - da dataavailability.SequenceSenderElderberry - condNewSeq CondNewSequence - rollupContract rollupElderberryValidiumContractor -} - -type rollupElderberryValidiumContractor interface { - SequenceBatchesValidium( - opts *bind.TransactOpts, - batches []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, - maxSequenceTimestamp uint64, - initSequencedBatch uint64, - l2Coinbase common.Address, - dataAvailabilityMessage []byte, - ) (*types.Transaction, error) -} - -func NewTxBuilderElderberryValidium( - logger *log.Logger, - zkevm contracts.RollupElderberryType, - da dataavailability.SequenceSenderElderberry, - opts bind.TransactOpts, maxBatchesForL1 uint64) *TxBuilderElderberryValidium { - return &TxBuilderElderberryValidium{ - da: da, - TxBuilderElderberryBase: *NewTxBuilderElderberryBase(logger, opts), - condNewSeq: NewConditionalNewSequenceNumBatches(maxBatchesForL1), - rollupContract: zkevm, - } -} -func (t *TxBuilderElderberryValidium) NewSequenceIfWorthToSend( - ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64, -) (seqsendertypes.Sequence, error) { - return t.condNewSeq.NewSequenceIfWorthToSend(ctx, t, sequenceBatches, l2Coinbase) -} - -// SetCondNewSeq allow to override the default conditional for new sequence -func (t *TxBuilderElderberryValidium) SetCondNewSeq(cond CondNewSequence) CondNewSequence { - previous := t.condNewSeq - t.condNewSeq = cond - return previous -} - -func (t *TxBuilderElderberryValidium) BuildSequenceBatchesTx( - ctx context.Context, sequences seqsendertypes.Sequence, -) (*types.Transaction, error) { - if sequences == nil || sequences.Len() == 0 { - return nil, fmt.Errorf("can't sequence an empty sequence") - } - batchesData := convertToBatchesData(sequences) - dataAvailabilityMessage, err := t.da.PostSequenceElderberry(ctx, batchesData) - if err != nil { - t.logger.Error("error posting sequences to the data availability protocol: ", err) - return nil, err - } - if dataAvailabilityMessage == nil { - err := fmt.Errorf("data availability message is nil") - t.logger.Error("error posting sequences to the data availability protocol: ", err.Error()) - return nil, err - } - newopts := t.opts - newopts.NoSend = true - - // force nonce, gas limit and gas price to avoid querying it from the chain - newopts.Nonce = big.NewInt(1) - newopts.GasLimit = uint64(1) - newopts.GasPrice = big.NewInt(1) - - return t.buildSequenceBatchesTxValidium(&newopts, sequences, dataAvailabilityMessage) -} - -func (t *TxBuilderElderberryValidium) buildSequenceBatchesTxValidium(opts *bind.TransactOpts, - sequences seqsendertypes.Sequence, dataAvailabilityMessage []byte) (*types.Transaction, error) { - batches := make([]polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, sequences.Len()) - for i, seq := range sequences.Batches() { - var ger common.Hash - if seq.ForcedBatchTimestamp() > 0 { - ger = seq.GlobalExitRoot() - } - batches[i] = polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData{ - TransactionsHash: crypto.Keccak256Hash(seq.L2Data()), - ForcedGlobalExitRoot: ger, - ForcedTimestamp: seq.ForcedBatchTimestamp(), - ForcedBlockHashL1: seq.ForcedBlockHashL1(), - } - } - lastSequencedBatchNumber := getLastSequencedBatchNumber(sequences) - t.logger.Infof("SequenceBatchesValidium(from=%s, len(batches)=%d, MaxSequenceTimestamp=%d, "+ - "lastSequencedBatchNumber=%d, L2Coinbase=%s, dataAvailabilityMessage=%s)", - t.opts.From.String(), len(batches), sequences.MaxSequenceTimestamp(), lastSequencedBatchNumber, - sequences.L2Coinbase().String(), hex.EncodeToString(dataAvailabilityMessage), - ) - tx, err := t.rollupContract.SequenceBatchesValidium(opts, batches, sequences.MaxSequenceTimestamp(), - lastSequencedBatchNumber, sequences.L2Coinbase(), dataAvailabilityMessage) - if err != nil { - if parsedErr, ok := etherman.TryParseError(err); ok { - err = parsedErr - } - } - - return tx, err -} - -func (t *TxBuilderElderberryValidium) String() string { - return "Elderberry/Validium" -} - -func convertToBatchesData(sequences seqsendertypes.Sequence) [][]byte { - batches := make([][]byte, sequences.Len()) - for i, batch := range sequences.Batches() { - batches[i] = batch.L2Data() - } - return batches -} diff --git a/sequencesender/txbuilder/elderberry_validium_test.go b/sequencesender/txbuilder/elderberry_validium_test.go deleted file mode 100644 index ee6b10f8..00000000 --- a/sequencesender/txbuilder/elderberry_validium_test.go +++ /dev/null @@ -1,119 +0,0 @@ -package txbuilder_test - -import ( - "context" - "fmt" - "math/big" - "strings" - "testing" - - "github.com/0xPolygon/cdk-contracts-tooling/contracts/elderberry/polygonvalidiumetrog" - "github.com/agglayer/aggkit/dataavailability/mocks_da" - "github.com/agglayer/aggkit/etherman/contracts" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func TestElderberryValidiumName(t *testing.T) { - testData := newElderberryValidiumSUT(t) - require.NotNil(t, testData.sut) - require.True(t, strings.Contains(testData.sut.String(), "Elderberry")) - require.True(t, strings.Contains(testData.sut.String(), "Validium")) -} - -func TestElderberryValidiumBuildSequenceBatchesTxEmtpySequence(t *testing.T) { - testData := newElderberryValidiumSUT(t) - ctx := context.TODO() - _, err := testData.sut.BuildSequenceBatchesTx(ctx, nil) - require.Error(t, err) - - seq, err := testData.sut.NewSequence(context.TODO(), nil, common.Address{}) - require.NoError(t, err) - _, err = testData.sut.BuildSequenceBatchesTx(ctx, seq) - require.Error(t, err) -} - -func TestElderberryValidiumBuildSequenceBatchesTxSequenceErrorsFromDA(t *testing.T) { - testData := newElderberryValidiumSUT(t) - ctx := context.TODO() - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 1, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batchElder := testData.sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{ - batchElder, - } - seq, err := testData.sut.NewSequence(context.TODO(), batches, common.Address{}) - require.NoError(t, err) - testData.mockDA.EXPECT().PostSequenceElderberry(ctx, mock.Anything).Return(nil, nil) - _, err = testData.sut.BuildSequenceBatchesTx(ctx, seq) - require.Error(t, err, "data availability message is nil") - testData.mockDA.EXPECT().PostSequenceElderberry(ctx, mock.Anything).Return(nil, fmt.Errorf("test error")) - _, err = testData.sut.BuildSequenceBatchesTx(ctx, seq) - require.Error(t, err, "error posting sequences to the data availability protocol: test error") -} - -func TestElderberryValidiumBuildSequenceBatchesTxSequenceDAOk(t *testing.T) { - testData := newElderberryValidiumSUT(t) - ctx := context.TODO() - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 1, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batchElder := testData.sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{ - batchElder, - } - seq, err := testData.sut.NewSequence(context.TODO(), batches, common.Address{}) - require.NoError(t, err) - testData.mockDA.EXPECT().PostSequenceElderberry(ctx, mock.Anything).Return([]byte{1}, nil) - tx, err := testData.sut.BuildSequenceBatchesTx(ctx, seq) - require.NoError(t, err) - require.NotNil(t, tx) -} - -func TestElderberryValidiumNewSequenceIfWorthToSend(t *testing.T) { - testData := newElderberryValidiumSUT(t) - testSequenceIfWorthToSendNoNewSeq(t, testData.sut) - testSequenceIfWorthToSendErr(t, testData.sut) - testSetCondNewSeq(t, testData.sut) -} - -type testDataElderberryValidium struct { - mockDA *mocks_da.SequenceSenderElderberry - sut *txbuilder.TxBuilderElderberryValidium -} - -func newElderberryValidiumSUT(t *testing.T) *testDataElderberryValidium { - t.Helper() - - zkevmContract, err := contracts.NewContractMagic[contracts.RollupElderberryType](polygonvalidiumetrog.NewPolygonvalidiumetrog, common.Address{}, nil, contracts.ContractNameRollup, contracts.VersionElderberry) - require.NoError(t, err) - privateKey, err := crypto.HexToECDSA("64e679029f5032046955d41713dcc4b565de77ab891748d31bcf38864b54c175") - require.NoError(t, err) - opts, err := bind.NewKeyedTransactorWithChainID(privateKey, big.NewInt(1)) - require.NoError(t, err) - - da := mocks_da.NewSequenceSenderElderberry(t) - - sut := txbuilder.NewTxBuilderElderberryValidium(log.GetDefaultLogger(), *zkevmContract, da, *opts, uint64(100)) - require.NotNil(t, sut) - return &testDataElderberryValidium{ - mockDA: da, - sut: sut, - } -} diff --git a/sequencesender/txbuilder/elderberry_zkevm.go b/sequencesender/txbuilder/elderberry_zkevm.go deleted file mode 100644 index 53b22f58..00000000 --- a/sequencesender/txbuilder/elderberry_zkevm.go +++ /dev/null @@ -1,113 +0,0 @@ -package txbuilder - -import ( - "context" - "fmt" - "math/big" - - "github.com/0xPolygon/cdk-contracts-tooling/contracts/elderberry/polygonvalidiumetrog" - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" -) - -type TxBuilderElderberryZKEVM struct { - TxBuilderElderberryBase - condNewSeq CondNewSequence - rollupContract rollupElderberryZKEVMContractor -} - -type rollupElderberryZKEVMContractor interface { - SequenceBatches( - opts *bind.TransactOpts, - batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, - maxSequenceTimestamp uint64, - initSequencedBatch uint64, - l2Coinbase common.Address, - ) (*types.Transaction, error) -} - -func NewTxBuilderElderberryZKEVM( - logger *log.Logger, zkevm rollupElderberryZKEVMContractor, - opts bind.TransactOpts, maxTxSizeForL1 uint64, -) *TxBuilderElderberryZKEVM { - return &TxBuilderElderberryZKEVM{ - TxBuilderElderberryBase: *NewTxBuilderElderberryBase(logger, opts), - condNewSeq: NewConditionalNewSequenceMaxSize(maxTxSizeForL1), - rollupContract: zkevm, - } -} - -func (t *TxBuilderElderberryZKEVM) NewSequenceIfWorthToSend( - ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64, -) (seqsendertypes.Sequence, error) { - return t.condNewSeq.NewSequenceIfWorthToSend(ctx, t, sequenceBatches, l2Coinbase) -} - -// SetCondNewSeq allow to override the default conditional for new sequence -func (t *TxBuilderElderberryZKEVM) SetCondNewSeq(cond CondNewSequence) CondNewSequence { - previous := t.condNewSeq - t.condNewSeq = cond - return previous -} - -func (t *TxBuilderElderberryZKEVM) BuildSequenceBatchesTx( - ctx context.Context, sequences seqsendertypes.Sequence, -) (*types.Transaction, error) { - newopts := t.opts - newopts.NoSend = true - - // force nonce, gas limit and gas price to avoid querying it from the chain - newopts.Nonce = big.NewInt(1) - newopts.GasLimit = uint64(1) - newopts.GasPrice = big.NewInt(1) - - return t.sequenceBatchesRollup(newopts, sequences) -} - -func (t *TxBuilderElderberryZKEVM) sequenceBatchesRollup( - opts bind.TransactOpts, sequences seqsendertypes.Sequence, -) (*types.Transaction, error) { - if sequences == nil || sequences.Len() == 0 { - return nil, fmt.Errorf("can't sequence an empty sequence") - } - batches := make([]polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, sequences.Len()) - for i, seq := range sequences.Batches() { - var ger common.Hash - if seq.ForcedBatchTimestamp() > 0 { - ger = seq.GlobalExitRoot() - } - - batches[i] = polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData{ - Transactions: seq.L2Data(), - ForcedGlobalExitRoot: ger, - ForcedTimestamp: seq.ForcedBatchTimestamp(), - // TODO: Check that is ok to use ForcedBlockHashL1 instead PrevBlockHash - ForcedBlockHashL1: seq.ForcedBlockHashL1(), - } - } - lastSequencedBatchNumber := getLastSequencedBatchNumber(sequences) - tx, err := t.rollupContract.SequenceBatches( - &opts, batches, sequences.MaxSequenceTimestamp(), lastSequencedBatchNumber, sequences.L2Coinbase(), - ) - if err != nil { - t.warningMessage(batches, sequences.L2Coinbase(), &opts) - if parsedErr, ok := etherman.TryParseError(err); ok { - err = parsedErr - } - } - - return tx, err -} - -func (t *TxBuilderElderberryZKEVM) warningMessage( - batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, l2Coinbase common.Address, opts *bind.TransactOpts) { - t.logger.Warnf("Sequencer address: ", opts.From, "l2CoinBase: ", l2Coinbase, " Batches to send: %+v", batches) -} - -func (t *TxBuilderElderberryZKEVM) String() string { - return "Elderberry/ZKEVM" -} diff --git a/sequencesender/txbuilder/elderberry_zkevm_test.go b/sequencesender/txbuilder/elderberry_zkevm_test.go deleted file mode 100644 index fdc8e40e..00000000 --- a/sequencesender/txbuilder/elderberry_zkevm_test.go +++ /dev/null @@ -1,113 +0,0 @@ -package txbuilder_test - -import ( - "context" - "math/big" - "strings" - "testing" - - "github.com/0xPolygon/cdk-contracts-tooling/contracts/elderberry/polygonvalidiumetrog" - "github.com/agglayer/aggkit/etherman/contracts" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" - "github.com/stretchr/testify/require" -) - -func TestElderberryZkevmName(t *testing.T) { - zkevmContract := contracts.RollupElderberryType{} - opts := bind.TransactOpts{} - sut := txbuilder.NewTxBuilderElderberryZKEVM(log.GetDefaultLogger(), zkevmContract, opts, 100) - require.NotNil(t, sut) - require.True(t, strings.Contains(sut.String(), "Elderberry")) - require.True(t, strings.Contains(sut.String(), "ZKEVM")) -} - -func TestElderberryZkevmNewSequence(t *testing.T) { - zkevmContract := contracts.RollupElderberryType{} - opts := bind.TransactOpts{} - sut := txbuilder.NewTxBuilderElderberryZKEVM(log.GetDefaultLogger(), zkevmContract, opts, 100) - require.NotNil(t, sut) - seq, err := sut.NewSequence(context.TODO(), nil, common.Address{}) - require.NoError(t, err) - require.NotNil(t, seq) -} - -func TestElderberryZkevmBuildSequenceBatchesTxEmtpySequence(t *testing.T) { - sut := newElderberryZkevmSUT(t) - ctx := context.TODO() - _, err := sut.BuildSequenceBatchesTx(ctx, nil) - require.Error(t, err) - - seq, err := sut.NewSequence(context.TODO(), nil, common.Address{}) - require.NoError(t, err) - _, err = sut.BuildSequenceBatchesTx(ctx, seq) - require.Error(t, err) -} - -func TestElderberryZkevmBuildSequenceBatchesTxSequence1Batch(t *testing.T) { - sut := newElderberryZkevmSUT(t) - ctx := context.TODO() - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 1, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batchElder := sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{ - batchElder, - } - seq, err := sut.NewSequence(context.TODO(), batches, common.Address{}) - require.NoError(t, err) - _, err = sut.BuildSequenceBatchesTx(ctx, seq) - require.NoError(t, err) -} - -// This have to signer so produce an error -func TestElderberryZkevmBuildSequenceBatchesTxSequence1BatchError(t *testing.T) { - sut := newElderberryZkevmSUT(t) - sut.SetAuth(&bind.TransactOpts{}) - ctx := context.TODO() - l2Block := &datastream.L2Block{ - Timestamp: 1, - BatchNumber: 1, - L1InfotreeIndex: 3, - Coinbase: []byte{1, 2, 3}, - GlobalExitRoot: []byte{4, 5, 6}, - } - batchElder := sut.NewBatchFromL2Block(l2Block) - batches := []seqsendertypes.Batch{ - batchElder, - } - seq, err := sut.NewSequence(context.TODO(), batches, common.Address{}) - require.NoError(t, err) - _, err = sut.BuildSequenceBatchesTx(ctx, seq) - require.Error(t, err) -} - -func TestElderberryZkevmNewSequenceIfWorthToSend(t *testing.T) { - sut := newElderberryZkevmSUT(t) - testSequenceIfWorthToSendNoNewSeq(t, sut) - testSequenceIfWorthToSendErr(t, sut) - testSetCondNewSeq(t, sut) -} - -func newElderberryZkevmSUT(t *testing.T) *txbuilder.TxBuilderElderberryZKEVM { - t.Helper() - - zkevmContract, err := contracts.NewContractMagic[contracts.RollupElderberryType](polygonvalidiumetrog.NewPolygonvalidiumetrog, common.Address{}, nil, contracts.ContractNameRollup, contracts.VersionElderberry) - require.NoError(t, err) - privateKey, err := crypto.HexToECDSA("64e679029f5032046955d41713dcc4b565de77ab891748d31bcf38864b54c175") - require.NoError(t, err) - opts, err := bind.NewKeyedTransactorWithChainID(privateKey, big.NewInt(1)) - require.NoError(t, err) - sut := txbuilder.NewTxBuilderElderberryZKEVM(log.GetDefaultLogger(), *zkevmContract, *opts, 100) - require.NotNil(t, sut) - return sut -} diff --git a/sequencesender/txbuilder/interface.go b/sequencesender/txbuilder/interface.go deleted file mode 100644 index 1d018cdb..00000000 --- a/sequencesender/txbuilder/interface.go +++ /dev/null @@ -1,38 +0,0 @@ -package txbuilder - -import ( - "context" - "fmt" - - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/state/datastream" - "github.com/ethereum/go-ethereum/common" - ethtypes "github.com/ethereum/go-ethereum/core/types" -) - -type TxBuilder interface { - // Stringer interface - fmt.Stringer - - // BuildSequenceBatchesTx Builds a sequence of batches transaction - BuildSequenceBatchesTx(ctx context.Context, sequences seqsendertypes.Sequence) (*ethtypes.Transaction, error) - // NewSequence Creates a new sequence - NewSequence( - ctx context.Context, batches []seqsendertypes.Batch, coinbase common.Address, - ) (seqsendertypes.Sequence, error) - // NewSequenceIfWorthToSend Creates a new sequence if it is worth sending - NewSequenceIfWorthToSend( - ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64, - ) (seqsendertypes.Sequence, error) - // NewBatchFromL2Block Creates a new batch from the L2 block from a datastream - NewBatchFromL2Block(l2Block *datastream.L2Block) seqsendertypes.Batch - // SetCondNewSeq Allows to override the condition to send a new sequence, returns previous one - SetCondNewSeq(cond CondNewSequence) CondNewSequence -} - -type CondNewSequence interface { - // NewSequenceIfWorthToSend Return nil, nil if the sequence is not worth sending - NewSequenceIfWorthToSend( - ctx context.Context, txBuilder TxBuilder, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, - ) (seqsendertypes.Sequence, error) -} diff --git a/sequencesender/txbuilder/interface_test.go b/sequencesender/txbuilder/interface_test.go deleted file mode 100644 index 8ad3157d..00000000 --- a/sequencesender/txbuilder/interface_test.go +++ /dev/null @@ -1,50 +0,0 @@ -package txbuilder_test - -import ( - "context" - "fmt" - "testing" - - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/sequencesender/txbuilder/mocks_txbuilder" - "github.com/ethereum/go-ethereum/common" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -/* -This test ara auxiliars function based on the common behaviour of the interfaces -*/ - -func testSequenceIfWorthToSendNoNewSeq(t *testing.T, sut txbuilder.TxBuilder) { - t.Helper() - - cond := mocks_txbuilder.NewCondNewSequence(t) - sut.SetCondNewSeq(cond) - cond.EXPECT().NewSequenceIfWorthToSend(mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, nil).Once() - seq, err := sut.NewSequenceIfWorthToSend(context.TODO(), nil, common.Address{}, 0) - require.NoError(t, err) - require.Nil(t, seq) -} - -func testSequenceIfWorthToSendErr(t *testing.T, sut txbuilder.TxBuilder) { - t.Helper() - - cond := mocks_txbuilder.NewCondNewSequence(t) - sut.SetCondNewSeq(cond) - returnErr := fmt.Errorf("test-error") - cond.EXPECT().NewSequenceIfWorthToSend(mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, returnErr).Once() - seq, err := sut.NewSequenceIfWorthToSend(context.TODO(), nil, common.Address{}, 0) - require.ErrorIs(t, returnErr, err) - require.Nil(t, seq) -} - -func testSetCondNewSeq(t *testing.T, sut txbuilder.TxBuilder) { - t.Helper() - - cond := mocks_txbuilder.NewCondNewSequence(t) - sut.SetCondNewSeq(cond) - cond2 := mocks_txbuilder.NewCondNewSequence(t) - previous := sut.SetCondNewSeq(cond2) - require.Equal(t, cond, previous) -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/cond_new_sequence.go b/sequencesender/txbuilder/mocks_txbuilder/cond_new_sequence.go deleted file mode 100644 index e80ddd3d..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/cond_new_sequence.go +++ /dev/null @@ -1,103 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - context "context" - - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - seqsendertypes "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - - txbuilder "github.com/agglayer/aggkit/sequencesender/txbuilder" -) - -// CondNewSequence is an autogenerated mock type for the CondNewSequence type -type CondNewSequence struct { - mock.Mock -} - -type CondNewSequence_Expecter struct { - mock *mock.Mock -} - -func (_m *CondNewSequence) EXPECT() *CondNewSequence_Expecter { - return &CondNewSequence_Expecter{mock: &_m.Mock} -} - -// NewSequenceIfWorthToSend provides a mock function with given fields: ctx, txBuilder, sequenceBatches, l2Coinbase -func (_m *CondNewSequence) NewSequenceIfWorthToSend(ctx context.Context, txBuilder txbuilder.TxBuilder, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address) (seqsendertypes.Sequence, error) { - ret := _m.Called(ctx, txBuilder, sequenceBatches, l2Coinbase) - - if len(ret) == 0 { - panic("no return value specified for NewSequenceIfWorthToSend") - } - - var r0 seqsendertypes.Sequence - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, txbuilder.TxBuilder, []seqsendertypes.Batch, common.Address) (seqsendertypes.Sequence, error)); ok { - return rf(ctx, txBuilder, sequenceBatches, l2Coinbase) - } - if rf, ok := ret.Get(0).(func(context.Context, txbuilder.TxBuilder, []seqsendertypes.Batch, common.Address) seqsendertypes.Sequence); ok { - r0 = rf(ctx, txBuilder, sequenceBatches, l2Coinbase) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(seqsendertypes.Sequence) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, txbuilder.TxBuilder, []seqsendertypes.Batch, common.Address) error); ok { - r1 = rf(ctx, txBuilder, sequenceBatches, l2Coinbase) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// CondNewSequence_NewSequenceIfWorthToSend_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewSequenceIfWorthToSend' -type CondNewSequence_NewSequenceIfWorthToSend_Call struct { - *mock.Call -} - -// NewSequenceIfWorthToSend is a helper method to define mock.On call -// - ctx context.Context -// - txBuilder txbuilder.TxBuilder -// - sequenceBatches []seqsendertypes.Batch -// - l2Coinbase common.Address -func (_e *CondNewSequence_Expecter) NewSequenceIfWorthToSend(ctx interface{}, txBuilder interface{}, sequenceBatches interface{}, l2Coinbase interface{}) *CondNewSequence_NewSequenceIfWorthToSend_Call { - return &CondNewSequence_NewSequenceIfWorthToSend_Call{Call: _e.mock.On("NewSequenceIfWorthToSend", ctx, txBuilder, sequenceBatches, l2Coinbase)} -} - -func (_c *CondNewSequence_NewSequenceIfWorthToSend_Call) Run(run func(ctx context.Context, txBuilder txbuilder.TxBuilder, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address)) *CondNewSequence_NewSequenceIfWorthToSend_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(txbuilder.TxBuilder), args[2].([]seqsendertypes.Batch), args[3].(common.Address)) - }) - return _c -} - -func (_c *CondNewSequence_NewSequenceIfWorthToSend_Call) Return(_a0 seqsendertypes.Sequence, _a1 error) *CondNewSequence_NewSequenceIfWorthToSend_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *CondNewSequence_NewSequenceIfWorthToSend_Call) RunAndReturn(run func(context.Context, txbuilder.TxBuilder, []seqsendertypes.Batch, common.Address) (seqsendertypes.Sequence, error)) *CondNewSequence_NewSequenceIfWorthToSend_Call { - _c.Call.Return(run) - return _c -} - -// NewCondNewSequence creates a new instance of CondNewSequence. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewCondNewSequence(t interface { - mock.TestingT - Cleanup(func()) -}) *CondNewSequence { - mock := &CondNewSequence{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/global_exit_root_banana_contractor.go b/sequencesender/txbuilder/mocks_txbuilder/global_exit_root_banana_contractor.go deleted file mode 100644 index 86fd4366..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/global_exit_root_banana_contractor.go +++ /dev/null @@ -1,139 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - bind "github.com/ethereum/go-ethereum/accounts/abi/bind" - mock "github.com/stretchr/testify/mock" -) - -// GlobalExitRootBananaContractor is an autogenerated mock type for the globalExitRootBananaContractor type -type GlobalExitRootBananaContractor struct { - mock.Mock -} - -type GlobalExitRootBananaContractor_Expecter struct { - mock *mock.Mock -} - -func (_m *GlobalExitRootBananaContractor) EXPECT() *GlobalExitRootBananaContractor_Expecter { - return &GlobalExitRootBananaContractor_Expecter{mock: &_m.Mock} -} - -// L1InfoRootMap provides a mock function with given fields: opts, index -func (_m *GlobalExitRootBananaContractor) L1InfoRootMap(opts *bind.CallOpts, index uint32) ([32]byte, error) { - ret := _m.Called(opts, index) - - if len(ret) == 0 { - panic("no return value specified for L1InfoRootMap") - } - - var r0 [32]byte - var r1 error - if rf, ok := ret.Get(0).(func(*bind.CallOpts, uint32) ([32]byte, error)); ok { - return rf(opts, index) - } - if rf, ok := ret.Get(0).(func(*bind.CallOpts, uint32) [32]byte); ok { - r0 = rf(opts, index) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([32]byte) - } - } - - if rf, ok := ret.Get(1).(func(*bind.CallOpts, uint32) error); ok { - r1 = rf(opts, index) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GlobalExitRootBananaContractor_L1InfoRootMap_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'L1InfoRootMap' -type GlobalExitRootBananaContractor_L1InfoRootMap_Call struct { - *mock.Call -} - -// L1InfoRootMap is a helper method to define mock.On call -// - opts *bind.CallOpts -// - index uint32 -func (_e *GlobalExitRootBananaContractor_Expecter) L1InfoRootMap(opts interface{}, index interface{}) *GlobalExitRootBananaContractor_L1InfoRootMap_Call { - return &GlobalExitRootBananaContractor_L1InfoRootMap_Call{Call: _e.mock.On("L1InfoRootMap", opts, index)} -} - -func (_c *GlobalExitRootBananaContractor_L1InfoRootMap_Call) Run(run func(opts *bind.CallOpts, index uint32)) *GlobalExitRootBananaContractor_L1InfoRootMap_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.CallOpts), args[1].(uint32)) - }) - return _c -} - -func (_c *GlobalExitRootBananaContractor_L1InfoRootMap_Call) Return(_a0 [32]byte, _a1 error) *GlobalExitRootBananaContractor_L1InfoRootMap_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *GlobalExitRootBananaContractor_L1InfoRootMap_Call) RunAndReturn(run func(*bind.CallOpts, uint32) ([32]byte, error)) *GlobalExitRootBananaContractor_L1InfoRootMap_Call { - _c.Call.Return(run) - return _c -} - -// String provides a mock function with no fields -func (_m *GlobalExitRootBananaContractor) String() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for String") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// GlobalExitRootBananaContractor_String_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'String' -type GlobalExitRootBananaContractor_String_Call struct { - *mock.Call -} - -// String is a helper method to define mock.On call -func (_e *GlobalExitRootBananaContractor_Expecter) String() *GlobalExitRootBananaContractor_String_Call { - return &GlobalExitRootBananaContractor_String_Call{Call: _e.mock.On("String")} -} - -func (_c *GlobalExitRootBananaContractor_String_Call) Run(run func()) *GlobalExitRootBananaContractor_String_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *GlobalExitRootBananaContractor_String_Call) Return(_a0 string) *GlobalExitRootBananaContractor_String_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *GlobalExitRootBananaContractor_String_Call) RunAndReturn(run func() string) *GlobalExitRootBananaContractor_String_Call { - _c.Call.Return(run) - return _c -} - -// NewGlobalExitRootBananaContractor creates a new instance of GlobalExitRootBananaContractor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewGlobalExitRootBananaContractor(t interface { - mock.TestingT - Cleanup(func()) -}) *GlobalExitRootBananaContractor { - mock := &GlobalExitRootBananaContractor{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/global_exit_root_banana_zkevm_contractor.go b/sequencesender/txbuilder/mocks_txbuilder/global_exit_root_banana_zkevm_contractor.go deleted file mode 100644 index 57c6c157..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/global_exit_root_banana_zkevm_contractor.go +++ /dev/null @@ -1,139 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - bind "github.com/ethereum/go-ethereum/accounts/abi/bind" - mock "github.com/stretchr/testify/mock" -) - -// GlobalExitRootBananaZKEVMContractor is an autogenerated mock type for the globalExitRootBananaZKEVMContractor type -type GlobalExitRootBananaZKEVMContractor struct { - mock.Mock -} - -type GlobalExitRootBananaZKEVMContractor_Expecter struct { - mock *mock.Mock -} - -func (_m *GlobalExitRootBananaZKEVMContractor) EXPECT() *GlobalExitRootBananaZKEVMContractor_Expecter { - return &GlobalExitRootBananaZKEVMContractor_Expecter{mock: &_m.Mock} -} - -// L1InfoRootMap provides a mock function with given fields: opts, index -func (_m *GlobalExitRootBananaZKEVMContractor) L1InfoRootMap(opts *bind.CallOpts, index uint32) ([32]byte, error) { - ret := _m.Called(opts, index) - - if len(ret) == 0 { - panic("no return value specified for L1InfoRootMap") - } - - var r0 [32]byte - var r1 error - if rf, ok := ret.Get(0).(func(*bind.CallOpts, uint32) ([32]byte, error)); ok { - return rf(opts, index) - } - if rf, ok := ret.Get(0).(func(*bind.CallOpts, uint32) [32]byte); ok { - r0 = rf(opts, index) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([32]byte) - } - } - - if rf, ok := ret.Get(1).(func(*bind.CallOpts, uint32) error); ok { - r1 = rf(opts, index) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'L1InfoRootMap' -type GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call struct { - *mock.Call -} - -// L1InfoRootMap is a helper method to define mock.On call -// - opts *bind.CallOpts -// - index uint32 -func (_e *GlobalExitRootBananaZKEVMContractor_Expecter) L1InfoRootMap(opts interface{}, index interface{}) *GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call { - return &GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call{Call: _e.mock.On("L1InfoRootMap", opts, index)} -} - -func (_c *GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call) Run(run func(opts *bind.CallOpts, index uint32)) *GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.CallOpts), args[1].(uint32)) - }) - return _c -} - -func (_c *GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call) Return(_a0 [32]byte, _a1 error) *GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call) RunAndReturn(run func(*bind.CallOpts, uint32) ([32]byte, error)) *GlobalExitRootBananaZKEVMContractor_L1InfoRootMap_Call { - _c.Call.Return(run) - return _c -} - -// String provides a mock function with no fields -func (_m *GlobalExitRootBananaZKEVMContractor) String() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for String") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// GlobalExitRootBananaZKEVMContractor_String_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'String' -type GlobalExitRootBananaZKEVMContractor_String_Call struct { - *mock.Call -} - -// String is a helper method to define mock.On call -func (_e *GlobalExitRootBananaZKEVMContractor_Expecter) String() *GlobalExitRootBananaZKEVMContractor_String_Call { - return &GlobalExitRootBananaZKEVMContractor_String_Call{Call: _e.mock.On("String")} -} - -func (_c *GlobalExitRootBananaZKEVMContractor_String_Call) Run(run func()) *GlobalExitRootBananaZKEVMContractor_String_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *GlobalExitRootBananaZKEVMContractor_String_Call) Return(_a0 string) *GlobalExitRootBananaZKEVMContractor_String_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *GlobalExitRootBananaZKEVMContractor_String_Call) RunAndReturn(run func() string) *GlobalExitRootBananaZKEVMContractor_String_Call { - _c.Call.Return(run) - return _c -} - -// NewGlobalExitRootBananaZKEVMContractor creates a new instance of GlobalExitRootBananaZKEVMContractor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewGlobalExitRootBananaZKEVMContractor(t interface { - mock.TestingT - Cleanup(func()) -}) *GlobalExitRootBananaZKEVMContractor { - mock := &GlobalExitRootBananaZKEVMContractor{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/l1_client.go b/sequencesender/txbuilder/mocks_txbuilder/l1_client.go deleted file mode 100644 index 853494f9..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/l1_client.go +++ /dev/null @@ -1,98 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - context "context" - big "math/big" - - mock "github.com/stretchr/testify/mock" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// L1Client is an autogenerated mock type for the l1Client type -type L1Client struct { - mock.Mock -} - -type L1Client_Expecter struct { - mock *mock.Mock -} - -func (_m *L1Client) EXPECT() *L1Client_Expecter { - return &L1Client_Expecter{mock: &_m.Mock} -} - -// HeaderByNumber provides a mock function with given fields: ctx, number -func (_m *L1Client) HeaderByNumber(ctx context.Context, number *big.Int) (*types.Header, error) { - ret := _m.Called(ctx, number) - - if len(ret) == 0 { - panic("no return value specified for HeaderByNumber") - } - - var r0 *types.Header - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *big.Int) (*types.Header, error)); ok { - return rf(ctx, number) - } - if rf, ok := ret.Get(0).(func(context.Context, *big.Int) *types.Header); ok { - r0 = rf(ctx, number) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Header) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, *big.Int) error); ok { - r1 = rf(ctx, number) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// L1Client_HeaderByNumber_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HeaderByNumber' -type L1Client_HeaderByNumber_Call struct { - *mock.Call -} - -// HeaderByNumber is a helper method to define mock.On call -// - ctx context.Context -// - number *big.Int -func (_e *L1Client_Expecter) HeaderByNumber(ctx interface{}, number interface{}) *L1Client_HeaderByNumber_Call { - return &L1Client_HeaderByNumber_Call{Call: _e.mock.On("HeaderByNumber", ctx, number)} -} - -func (_c *L1Client_HeaderByNumber_Call) Run(run func(ctx context.Context, number *big.Int)) *L1Client_HeaderByNumber_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*big.Int)) - }) - return _c -} - -func (_c *L1Client_HeaderByNumber_Call) Return(_a0 *types.Header, _a1 error) *L1Client_HeaderByNumber_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *L1Client_HeaderByNumber_Call) RunAndReturn(run func(context.Context, *big.Int) (*types.Header, error)) *L1Client_HeaderByNumber_Call { - _c.Call.Return(run) - return _c -} - -// NewL1Client creates a new instance of L1Client. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewL1Client(t interface { - mock.TestingT - Cleanup(func()) -}) *L1Client { - mock := &L1Client{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/l1_info_syncer.go b/sequencesender/txbuilder/mocks_txbuilder/l1_info_syncer.go deleted file mode 100644 index 1ff380d5..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/l1_info_syncer.go +++ /dev/null @@ -1,154 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - context "context" - - l1infotreesync "github.com/agglayer/aggkit/l1infotreesync" - mock "github.com/stretchr/testify/mock" -) - -// L1InfoSyncer is an autogenerated mock type for the l1InfoSyncer type -type L1InfoSyncer struct { - mock.Mock -} - -type L1InfoSyncer_Expecter struct { - mock *mock.Mock -} - -func (_m *L1InfoSyncer) EXPECT() *L1InfoSyncer_Expecter { - return &L1InfoSyncer_Expecter{mock: &_m.Mock} -} - -// GetInitL1InfoRootMap provides a mock function with given fields: ctx -func (_m *L1InfoSyncer) GetInitL1InfoRootMap(ctx context.Context) (*l1infotreesync.L1InfoTreeInitial, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for GetInitL1InfoRootMap") - } - - var r0 *l1infotreesync.L1InfoTreeInitial - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*l1infotreesync.L1InfoTreeInitial, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *l1infotreesync.L1InfoTreeInitial); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*l1infotreesync.L1InfoTreeInitial) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// L1InfoSyncer_GetInitL1InfoRootMap_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetInitL1InfoRootMap' -type L1InfoSyncer_GetInitL1InfoRootMap_Call struct { - *mock.Call -} - -// GetInitL1InfoRootMap is a helper method to define mock.On call -// - ctx context.Context -func (_e *L1InfoSyncer_Expecter) GetInitL1InfoRootMap(ctx interface{}) *L1InfoSyncer_GetInitL1InfoRootMap_Call { - return &L1InfoSyncer_GetInitL1InfoRootMap_Call{Call: _e.mock.On("GetInitL1InfoRootMap", ctx)} -} - -func (_c *L1InfoSyncer_GetInitL1InfoRootMap_Call) Run(run func(ctx context.Context)) *L1InfoSyncer_GetInitL1InfoRootMap_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *L1InfoSyncer_GetInitL1InfoRootMap_Call) Return(_a0 *l1infotreesync.L1InfoTreeInitial, _a1 error) *L1InfoSyncer_GetInitL1InfoRootMap_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *L1InfoSyncer_GetInitL1InfoRootMap_Call) RunAndReturn(run func(context.Context) (*l1infotreesync.L1InfoTreeInitial, error)) *L1InfoSyncer_GetInitL1InfoRootMap_Call { - _c.Call.Return(run) - return _c -} - -// GetLatestInfoUntilBlock provides a mock function with given fields: ctx, blockNum -func (_m *L1InfoSyncer) GetLatestInfoUntilBlock(ctx context.Context, blockNum uint64) (*l1infotreesync.L1InfoTreeLeaf, error) { - ret := _m.Called(ctx, blockNum) - - if len(ret) == 0 { - panic("no return value specified for GetLatestInfoUntilBlock") - } - - var r0 *l1infotreesync.L1InfoTreeLeaf - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint64) (*l1infotreesync.L1InfoTreeLeaf, error)); ok { - return rf(ctx, blockNum) - } - if rf, ok := ret.Get(0).(func(context.Context, uint64) *l1infotreesync.L1InfoTreeLeaf); ok { - r0 = rf(ctx, blockNum) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*l1infotreesync.L1InfoTreeLeaf) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint64) error); ok { - r1 = rf(ctx, blockNum) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// L1InfoSyncer_GetLatestInfoUntilBlock_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLatestInfoUntilBlock' -type L1InfoSyncer_GetLatestInfoUntilBlock_Call struct { - *mock.Call -} - -// GetLatestInfoUntilBlock is a helper method to define mock.On call -// - ctx context.Context -// - blockNum uint64 -func (_e *L1InfoSyncer_Expecter) GetLatestInfoUntilBlock(ctx interface{}, blockNum interface{}) *L1InfoSyncer_GetLatestInfoUntilBlock_Call { - return &L1InfoSyncer_GetLatestInfoUntilBlock_Call{Call: _e.mock.On("GetLatestInfoUntilBlock", ctx, blockNum)} -} - -func (_c *L1InfoSyncer_GetLatestInfoUntilBlock_Call) Run(run func(ctx context.Context, blockNum uint64)) *L1InfoSyncer_GetLatestInfoUntilBlock_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64)) - }) - return _c -} - -func (_c *L1InfoSyncer_GetLatestInfoUntilBlock_Call) Return(_a0 *l1infotreesync.L1InfoTreeLeaf, _a1 error) *L1InfoSyncer_GetLatestInfoUntilBlock_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *L1InfoSyncer_GetLatestInfoUntilBlock_Call) RunAndReturn(run func(context.Context, uint64) (*l1infotreesync.L1InfoTreeLeaf, error)) *L1InfoSyncer_GetLatestInfoUntilBlock_Call { - _c.Call.Return(run) - return _c -} - -// NewL1InfoSyncer creates a new instance of L1InfoSyncer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewL1InfoSyncer(t interface { - mock.TestingT - Cleanup(func()) -}) *L1InfoSyncer { - mock := &L1InfoSyncer{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_base_contractor.go b/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_base_contractor.go deleted file mode 100644 index acd82a4e..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_base_contractor.go +++ /dev/null @@ -1,93 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - bind "github.com/ethereum/go-ethereum/accounts/abi/bind" - mock "github.com/stretchr/testify/mock" -) - -// RollupBananaBaseContractor is an autogenerated mock type for the rollupBananaBaseContractor type -type RollupBananaBaseContractor struct { - mock.Mock -} - -type RollupBananaBaseContractor_Expecter struct { - mock *mock.Mock -} - -func (_m *RollupBananaBaseContractor) EXPECT() *RollupBananaBaseContractor_Expecter { - return &RollupBananaBaseContractor_Expecter{mock: &_m.Mock} -} - -// LastAccInputHash provides a mock function with given fields: opts -func (_m *RollupBananaBaseContractor) LastAccInputHash(opts *bind.CallOpts) ([32]byte, error) { - ret := _m.Called(opts) - - if len(ret) == 0 { - panic("no return value specified for LastAccInputHash") - } - - var r0 [32]byte - var r1 error - if rf, ok := ret.Get(0).(func(*bind.CallOpts) ([32]byte, error)); ok { - return rf(opts) - } - if rf, ok := ret.Get(0).(func(*bind.CallOpts) [32]byte); ok { - r0 = rf(opts) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([32]byte) - } - } - - if rf, ok := ret.Get(1).(func(*bind.CallOpts) error); ok { - r1 = rf(opts) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RollupBananaBaseContractor_LastAccInputHash_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'LastAccInputHash' -type RollupBananaBaseContractor_LastAccInputHash_Call struct { - *mock.Call -} - -// LastAccInputHash is a helper method to define mock.On call -// - opts *bind.CallOpts -func (_e *RollupBananaBaseContractor_Expecter) LastAccInputHash(opts interface{}) *RollupBananaBaseContractor_LastAccInputHash_Call { - return &RollupBananaBaseContractor_LastAccInputHash_Call{Call: _e.mock.On("LastAccInputHash", opts)} -} - -func (_c *RollupBananaBaseContractor_LastAccInputHash_Call) Run(run func(opts *bind.CallOpts)) *RollupBananaBaseContractor_LastAccInputHash_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.CallOpts)) - }) - return _c -} - -func (_c *RollupBananaBaseContractor_LastAccInputHash_Call) Return(_a0 [32]byte, _a1 error) *RollupBananaBaseContractor_LastAccInputHash_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RollupBananaBaseContractor_LastAccInputHash_Call) RunAndReturn(run func(*bind.CallOpts) ([32]byte, error)) *RollupBananaBaseContractor_LastAccInputHash_Call { - _c.Call.Return(run) - return _c -} - -// NewRollupBananaBaseContractor creates a new instance of RollupBananaBaseContractor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewRollupBananaBaseContractor(t interface { - mock.TestingT - Cleanup(func()) -}) *RollupBananaBaseContractor { - mock := &RollupBananaBaseContractor{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_validium_contractor.go b/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_validium_contractor.go deleted file mode 100644 index a59b88dd..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_validium_contractor.go +++ /dev/null @@ -1,163 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - bind "github.com/ethereum/go-ethereum/accounts/abi/bind" - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - polygonvalidiumetrog "github.com/0xPolygon/cdk-contracts-tooling/contracts/banana/polygonvalidiumetrog" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// RollupBananaValidiumContractor is an autogenerated mock type for the rollupBananaValidiumContractor type -type RollupBananaValidiumContractor struct { - mock.Mock -} - -type RollupBananaValidiumContractor_Expecter struct { - mock *mock.Mock -} - -func (_m *RollupBananaValidiumContractor) EXPECT() *RollupBananaValidiumContractor_Expecter { - return &RollupBananaValidiumContractor_Expecter{mock: &_m.Mock} -} - -// LastAccInputHash provides a mock function with given fields: opts -func (_m *RollupBananaValidiumContractor) LastAccInputHash(opts *bind.CallOpts) ([32]byte, error) { - ret := _m.Called(opts) - - if len(ret) == 0 { - panic("no return value specified for LastAccInputHash") - } - - var r0 [32]byte - var r1 error - if rf, ok := ret.Get(0).(func(*bind.CallOpts) ([32]byte, error)); ok { - return rf(opts) - } - if rf, ok := ret.Get(0).(func(*bind.CallOpts) [32]byte); ok { - r0 = rf(opts) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([32]byte) - } - } - - if rf, ok := ret.Get(1).(func(*bind.CallOpts) error); ok { - r1 = rf(opts) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RollupBananaValidiumContractor_LastAccInputHash_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'LastAccInputHash' -type RollupBananaValidiumContractor_LastAccInputHash_Call struct { - *mock.Call -} - -// LastAccInputHash is a helper method to define mock.On call -// - opts *bind.CallOpts -func (_e *RollupBananaValidiumContractor_Expecter) LastAccInputHash(opts interface{}) *RollupBananaValidiumContractor_LastAccInputHash_Call { - return &RollupBananaValidiumContractor_LastAccInputHash_Call{Call: _e.mock.On("LastAccInputHash", opts)} -} - -func (_c *RollupBananaValidiumContractor_LastAccInputHash_Call) Run(run func(opts *bind.CallOpts)) *RollupBananaValidiumContractor_LastAccInputHash_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.CallOpts)) - }) - return _c -} - -func (_c *RollupBananaValidiumContractor_LastAccInputHash_Call) Return(_a0 [32]byte, _a1 error) *RollupBananaValidiumContractor_LastAccInputHash_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RollupBananaValidiumContractor_LastAccInputHash_Call) RunAndReturn(run func(*bind.CallOpts) ([32]byte, error)) *RollupBananaValidiumContractor_LastAccInputHash_Call { - _c.Call.Return(run) - return _c -} - -// SequenceBatchesValidium provides a mock function with given fields: opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase, dataAvailabilityMessage -func (_m *RollupBananaValidiumContractor) SequenceBatchesValidium(opts *bind.TransactOpts, batches []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, indexL1InfoRoot uint32, maxSequenceTimestamp uint64, expectedFinalAccInputHash [32]byte, l2Coinbase common.Address, dataAvailabilityMessage []byte) (*types.Transaction, error) { - ret := _m.Called(opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase, dataAvailabilityMessage) - - if len(ret) == 0 { - panic("no return value specified for SequenceBatchesValidium") - } - - var r0 *types.Transaction - var r1 error - if rf, ok := ret.Get(0).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, uint32, uint64, [32]byte, common.Address, []byte) (*types.Transaction, error)); ok { - return rf(opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase, dataAvailabilityMessage) - } - if rf, ok := ret.Get(0).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, uint32, uint64, [32]byte, common.Address, []byte) *types.Transaction); ok { - r0 = rf(opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase, dataAvailabilityMessage) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Transaction) - } - } - - if rf, ok := ret.Get(1).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, uint32, uint64, [32]byte, common.Address, []byte) error); ok { - r1 = rf(opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase, dataAvailabilityMessage) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RollupBananaValidiumContractor_SequenceBatchesValidium_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SequenceBatchesValidium' -type RollupBananaValidiumContractor_SequenceBatchesValidium_Call struct { - *mock.Call -} - -// SequenceBatchesValidium is a helper method to define mock.On call -// - opts *bind.TransactOpts -// - batches []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData -// - indexL1InfoRoot uint32 -// - maxSequenceTimestamp uint64 -// - expectedFinalAccInputHash [32]byte -// - l2Coinbase common.Address -// - dataAvailabilityMessage []byte -func (_e *RollupBananaValidiumContractor_Expecter) SequenceBatchesValidium(opts interface{}, batches interface{}, indexL1InfoRoot interface{}, maxSequenceTimestamp interface{}, expectedFinalAccInputHash interface{}, l2Coinbase interface{}, dataAvailabilityMessage interface{}) *RollupBananaValidiumContractor_SequenceBatchesValidium_Call { - return &RollupBananaValidiumContractor_SequenceBatchesValidium_Call{Call: _e.mock.On("SequenceBatchesValidium", opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase, dataAvailabilityMessage)} -} - -func (_c *RollupBananaValidiumContractor_SequenceBatchesValidium_Call) Run(run func(opts *bind.TransactOpts, batches []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, indexL1InfoRoot uint32, maxSequenceTimestamp uint64, expectedFinalAccInputHash [32]byte, l2Coinbase common.Address, dataAvailabilityMessage []byte)) *RollupBananaValidiumContractor_SequenceBatchesValidium_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.TransactOpts), args[1].([]polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData), args[2].(uint32), args[3].(uint64), args[4].([32]byte), args[5].(common.Address), args[6].([]byte)) - }) - return _c -} - -func (_c *RollupBananaValidiumContractor_SequenceBatchesValidium_Call) Return(_a0 *types.Transaction, _a1 error) *RollupBananaValidiumContractor_SequenceBatchesValidium_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RollupBananaValidiumContractor_SequenceBatchesValidium_Call) RunAndReturn(run func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, uint32, uint64, [32]byte, common.Address, []byte) (*types.Transaction, error)) *RollupBananaValidiumContractor_SequenceBatchesValidium_Call { - _c.Call.Return(run) - return _c -} - -// NewRollupBananaValidiumContractor creates a new instance of RollupBananaValidiumContractor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewRollupBananaValidiumContractor(t interface { - mock.TestingT - Cleanup(func()) -}) *RollupBananaValidiumContractor { - mock := &RollupBananaValidiumContractor{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_zkevm_contractor.go b/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_zkevm_contractor.go deleted file mode 100644 index e29e3252..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/rollup_banana_zkevm_contractor.go +++ /dev/null @@ -1,162 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - bind "github.com/ethereum/go-ethereum/accounts/abi/bind" - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - polygonvalidiumetrog "github.com/0xPolygon/cdk-contracts-tooling/contracts/banana/polygonvalidiumetrog" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// RollupBananaZKEVMContractor is an autogenerated mock type for the rollupBananaZKEVMContractor type -type RollupBananaZKEVMContractor struct { - mock.Mock -} - -type RollupBananaZKEVMContractor_Expecter struct { - mock *mock.Mock -} - -func (_m *RollupBananaZKEVMContractor) EXPECT() *RollupBananaZKEVMContractor_Expecter { - return &RollupBananaZKEVMContractor_Expecter{mock: &_m.Mock} -} - -// LastAccInputHash provides a mock function with given fields: opts -func (_m *RollupBananaZKEVMContractor) LastAccInputHash(opts *bind.CallOpts) ([32]byte, error) { - ret := _m.Called(opts) - - if len(ret) == 0 { - panic("no return value specified for LastAccInputHash") - } - - var r0 [32]byte - var r1 error - if rf, ok := ret.Get(0).(func(*bind.CallOpts) ([32]byte, error)); ok { - return rf(opts) - } - if rf, ok := ret.Get(0).(func(*bind.CallOpts) [32]byte); ok { - r0 = rf(opts) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([32]byte) - } - } - - if rf, ok := ret.Get(1).(func(*bind.CallOpts) error); ok { - r1 = rf(opts) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RollupBananaZKEVMContractor_LastAccInputHash_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'LastAccInputHash' -type RollupBananaZKEVMContractor_LastAccInputHash_Call struct { - *mock.Call -} - -// LastAccInputHash is a helper method to define mock.On call -// - opts *bind.CallOpts -func (_e *RollupBananaZKEVMContractor_Expecter) LastAccInputHash(opts interface{}) *RollupBananaZKEVMContractor_LastAccInputHash_Call { - return &RollupBananaZKEVMContractor_LastAccInputHash_Call{Call: _e.mock.On("LastAccInputHash", opts)} -} - -func (_c *RollupBananaZKEVMContractor_LastAccInputHash_Call) Run(run func(opts *bind.CallOpts)) *RollupBananaZKEVMContractor_LastAccInputHash_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.CallOpts)) - }) - return _c -} - -func (_c *RollupBananaZKEVMContractor_LastAccInputHash_Call) Return(_a0 [32]byte, _a1 error) *RollupBananaZKEVMContractor_LastAccInputHash_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RollupBananaZKEVMContractor_LastAccInputHash_Call) RunAndReturn(run func(*bind.CallOpts) ([32]byte, error)) *RollupBananaZKEVMContractor_LastAccInputHash_Call { - _c.Call.Return(run) - return _c -} - -// SequenceBatches provides a mock function with given fields: opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase -func (_m *RollupBananaZKEVMContractor) SequenceBatches(opts *bind.TransactOpts, batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, indexL1InfoRoot uint32, maxSequenceTimestamp uint64, expectedFinalAccInputHash [32]byte, l2Coinbase common.Address) (*types.Transaction, error) { - ret := _m.Called(opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase) - - if len(ret) == 0 { - panic("no return value specified for SequenceBatches") - } - - var r0 *types.Transaction - var r1 error - if rf, ok := ret.Get(0).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, uint32, uint64, [32]byte, common.Address) (*types.Transaction, error)); ok { - return rf(opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase) - } - if rf, ok := ret.Get(0).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, uint32, uint64, [32]byte, common.Address) *types.Transaction); ok { - r0 = rf(opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Transaction) - } - } - - if rf, ok := ret.Get(1).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, uint32, uint64, [32]byte, common.Address) error); ok { - r1 = rf(opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RollupBananaZKEVMContractor_SequenceBatches_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SequenceBatches' -type RollupBananaZKEVMContractor_SequenceBatches_Call struct { - *mock.Call -} - -// SequenceBatches is a helper method to define mock.On call -// - opts *bind.TransactOpts -// - batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData -// - indexL1InfoRoot uint32 -// - maxSequenceTimestamp uint64 -// - expectedFinalAccInputHash [32]byte -// - l2Coinbase common.Address -func (_e *RollupBananaZKEVMContractor_Expecter) SequenceBatches(opts interface{}, batches interface{}, indexL1InfoRoot interface{}, maxSequenceTimestamp interface{}, expectedFinalAccInputHash interface{}, l2Coinbase interface{}) *RollupBananaZKEVMContractor_SequenceBatches_Call { - return &RollupBananaZKEVMContractor_SequenceBatches_Call{Call: _e.mock.On("SequenceBatches", opts, batches, indexL1InfoRoot, maxSequenceTimestamp, expectedFinalAccInputHash, l2Coinbase)} -} - -func (_c *RollupBananaZKEVMContractor_SequenceBatches_Call) Run(run func(opts *bind.TransactOpts, batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, indexL1InfoRoot uint32, maxSequenceTimestamp uint64, expectedFinalAccInputHash [32]byte, l2Coinbase common.Address)) *RollupBananaZKEVMContractor_SequenceBatches_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.TransactOpts), args[1].([]polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData), args[2].(uint32), args[3].(uint64), args[4].([32]byte), args[5].(common.Address)) - }) - return _c -} - -func (_c *RollupBananaZKEVMContractor_SequenceBatches_Call) Return(_a0 *types.Transaction, _a1 error) *RollupBananaZKEVMContractor_SequenceBatches_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RollupBananaZKEVMContractor_SequenceBatches_Call) RunAndReturn(run func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, uint32, uint64, [32]byte, common.Address) (*types.Transaction, error)) *RollupBananaZKEVMContractor_SequenceBatches_Call { - _c.Call.Return(run) - return _c -} - -// NewRollupBananaZKEVMContractor creates a new instance of RollupBananaZKEVMContractor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewRollupBananaZKEVMContractor(t interface { - mock.TestingT - Cleanup(func()) -}) *RollupBananaZKEVMContractor { - mock := &RollupBananaZKEVMContractor{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/rollup_elderberry_validium_contractor.go b/sequencesender/txbuilder/mocks_txbuilder/rollup_elderberry_validium_contractor.go deleted file mode 100644 index 0d94c081..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/rollup_elderberry_validium_contractor.go +++ /dev/null @@ -1,104 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - bind "github.com/ethereum/go-ethereum/accounts/abi/bind" - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - polygonvalidiumetrog "github.com/0xPolygon/cdk-contracts-tooling/contracts/elderberry/polygonvalidiumetrog" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// RollupElderberryValidiumContractor is an autogenerated mock type for the rollupElderberryValidiumContractor type -type RollupElderberryValidiumContractor struct { - mock.Mock -} - -type RollupElderberryValidiumContractor_Expecter struct { - mock *mock.Mock -} - -func (_m *RollupElderberryValidiumContractor) EXPECT() *RollupElderberryValidiumContractor_Expecter { - return &RollupElderberryValidiumContractor_Expecter{mock: &_m.Mock} -} - -// SequenceBatchesValidium provides a mock function with given fields: opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase, dataAvailabilityMessage -func (_m *RollupElderberryValidiumContractor) SequenceBatchesValidium(opts *bind.TransactOpts, batches []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, maxSequenceTimestamp uint64, initSequencedBatch uint64, l2Coinbase common.Address, dataAvailabilityMessage []byte) (*types.Transaction, error) { - ret := _m.Called(opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase, dataAvailabilityMessage) - - if len(ret) == 0 { - panic("no return value specified for SequenceBatchesValidium") - } - - var r0 *types.Transaction - var r1 error - if rf, ok := ret.Get(0).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, uint64, uint64, common.Address, []byte) (*types.Transaction, error)); ok { - return rf(opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase, dataAvailabilityMessage) - } - if rf, ok := ret.Get(0).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, uint64, uint64, common.Address, []byte) *types.Transaction); ok { - r0 = rf(opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase, dataAvailabilityMessage) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Transaction) - } - } - - if rf, ok := ret.Get(1).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, uint64, uint64, common.Address, []byte) error); ok { - r1 = rf(opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase, dataAvailabilityMessage) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RollupElderberryValidiumContractor_SequenceBatchesValidium_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SequenceBatchesValidium' -type RollupElderberryValidiumContractor_SequenceBatchesValidium_Call struct { - *mock.Call -} - -// SequenceBatchesValidium is a helper method to define mock.On call -// - opts *bind.TransactOpts -// - batches []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData -// - maxSequenceTimestamp uint64 -// - initSequencedBatch uint64 -// - l2Coinbase common.Address -// - dataAvailabilityMessage []byte -func (_e *RollupElderberryValidiumContractor_Expecter) SequenceBatchesValidium(opts interface{}, batches interface{}, maxSequenceTimestamp interface{}, initSequencedBatch interface{}, l2Coinbase interface{}, dataAvailabilityMessage interface{}) *RollupElderberryValidiumContractor_SequenceBatchesValidium_Call { - return &RollupElderberryValidiumContractor_SequenceBatchesValidium_Call{Call: _e.mock.On("SequenceBatchesValidium", opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase, dataAvailabilityMessage)} -} - -func (_c *RollupElderberryValidiumContractor_SequenceBatchesValidium_Call) Run(run func(opts *bind.TransactOpts, batches []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, maxSequenceTimestamp uint64, initSequencedBatch uint64, l2Coinbase common.Address, dataAvailabilityMessage []byte)) *RollupElderberryValidiumContractor_SequenceBatchesValidium_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.TransactOpts), args[1].([]polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData), args[2].(uint64), args[3].(uint64), args[4].(common.Address), args[5].([]byte)) - }) - return _c -} - -func (_c *RollupElderberryValidiumContractor_SequenceBatchesValidium_Call) Return(_a0 *types.Transaction, _a1 error) *RollupElderberryValidiumContractor_SequenceBatchesValidium_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RollupElderberryValidiumContractor_SequenceBatchesValidium_Call) RunAndReturn(run func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonValidiumEtrogValidiumBatchData, uint64, uint64, common.Address, []byte) (*types.Transaction, error)) *RollupElderberryValidiumContractor_SequenceBatchesValidium_Call { - _c.Call.Return(run) - return _c -} - -// NewRollupElderberryValidiumContractor creates a new instance of RollupElderberryValidiumContractor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewRollupElderberryValidiumContractor(t interface { - mock.TestingT - Cleanup(func()) -}) *RollupElderberryValidiumContractor { - mock := &RollupElderberryValidiumContractor{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/rollup_elderberry_zkevm_contractor.go b/sequencesender/txbuilder/mocks_txbuilder/rollup_elderberry_zkevm_contractor.go deleted file mode 100644 index 1ed208ab..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/rollup_elderberry_zkevm_contractor.go +++ /dev/null @@ -1,103 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - bind "github.com/ethereum/go-ethereum/accounts/abi/bind" - common "github.com/ethereum/go-ethereum/common" - - mock "github.com/stretchr/testify/mock" - - polygonvalidiumetrog "github.com/0xPolygon/cdk-contracts-tooling/contracts/elderberry/polygonvalidiumetrog" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// RollupElderberryZKEVMContractor is an autogenerated mock type for the rollupElderberryZKEVMContractor type -type RollupElderberryZKEVMContractor struct { - mock.Mock -} - -type RollupElderberryZKEVMContractor_Expecter struct { - mock *mock.Mock -} - -func (_m *RollupElderberryZKEVMContractor) EXPECT() *RollupElderberryZKEVMContractor_Expecter { - return &RollupElderberryZKEVMContractor_Expecter{mock: &_m.Mock} -} - -// SequenceBatches provides a mock function with given fields: opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase -func (_m *RollupElderberryZKEVMContractor) SequenceBatches(opts *bind.TransactOpts, batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, maxSequenceTimestamp uint64, initSequencedBatch uint64, l2Coinbase common.Address) (*types.Transaction, error) { - ret := _m.Called(opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase) - - if len(ret) == 0 { - panic("no return value specified for SequenceBatches") - } - - var r0 *types.Transaction - var r1 error - if rf, ok := ret.Get(0).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, uint64, uint64, common.Address) (*types.Transaction, error)); ok { - return rf(opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase) - } - if rf, ok := ret.Get(0).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, uint64, uint64, common.Address) *types.Transaction); ok { - r0 = rf(opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Transaction) - } - } - - if rf, ok := ret.Get(1).(func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, uint64, uint64, common.Address) error); ok { - r1 = rf(opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RollupElderberryZKEVMContractor_SequenceBatches_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SequenceBatches' -type RollupElderberryZKEVMContractor_SequenceBatches_Call struct { - *mock.Call -} - -// SequenceBatches is a helper method to define mock.On call -// - opts *bind.TransactOpts -// - batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData -// - maxSequenceTimestamp uint64 -// - initSequencedBatch uint64 -// - l2Coinbase common.Address -func (_e *RollupElderberryZKEVMContractor_Expecter) SequenceBatches(opts interface{}, batches interface{}, maxSequenceTimestamp interface{}, initSequencedBatch interface{}, l2Coinbase interface{}) *RollupElderberryZKEVMContractor_SequenceBatches_Call { - return &RollupElderberryZKEVMContractor_SequenceBatches_Call{Call: _e.mock.On("SequenceBatches", opts, batches, maxSequenceTimestamp, initSequencedBatch, l2Coinbase)} -} - -func (_c *RollupElderberryZKEVMContractor_SequenceBatches_Call) Run(run func(opts *bind.TransactOpts, batches []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, maxSequenceTimestamp uint64, initSequencedBatch uint64, l2Coinbase common.Address)) *RollupElderberryZKEVMContractor_SequenceBatches_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*bind.TransactOpts), args[1].([]polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData), args[2].(uint64), args[3].(uint64), args[4].(common.Address)) - }) - return _c -} - -func (_c *RollupElderberryZKEVMContractor_SequenceBatches_Call) Return(_a0 *types.Transaction, _a1 error) *RollupElderberryZKEVMContractor_SequenceBatches_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RollupElderberryZKEVMContractor_SequenceBatches_Call) RunAndReturn(run func(*bind.TransactOpts, []polygonvalidiumetrog.PolygonRollupBaseEtrogBatchData, uint64, uint64, common.Address) (*types.Transaction, error)) *RollupElderberryZKEVMContractor_SequenceBatches_Call { - _c.Call.Return(run) - return _c -} - -// NewRollupElderberryZKEVMContractor creates a new instance of RollupElderberryZKEVMContractor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewRollupElderberryZKEVMContractor(t interface { - mock.TestingT - Cleanup(func()) -}) *RollupElderberryZKEVMContractor { - mock := &RollupElderberryZKEVMContractor{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/mocks_txbuilder/tx_builder.go b/sequencesender/txbuilder/mocks_txbuilder/tx_builder.go deleted file mode 100644 index bebb7dd2..00000000 --- a/sequencesender/txbuilder/mocks_txbuilder/tx_builder.go +++ /dev/null @@ -1,367 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_txbuilder - -import ( - context "context" - - common "github.com/ethereum/go-ethereum/common" - - datastream "github.com/agglayer/aggkit/state/datastream" - - mock "github.com/stretchr/testify/mock" - - seqsendertypes "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - - txbuilder "github.com/agglayer/aggkit/sequencesender/txbuilder" - - types "github.com/ethereum/go-ethereum/core/types" -) - -// TxBuilder is an autogenerated mock type for the TxBuilder type -type TxBuilder struct { - mock.Mock -} - -type TxBuilder_Expecter struct { - mock *mock.Mock -} - -func (_m *TxBuilder) EXPECT() *TxBuilder_Expecter { - return &TxBuilder_Expecter{mock: &_m.Mock} -} - -// BuildSequenceBatchesTx provides a mock function with given fields: ctx, sequences -func (_m *TxBuilder) BuildSequenceBatchesTx(ctx context.Context, sequences seqsendertypes.Sequence) (*types.Transaction, error) { - ret := _m.Called(ctx, sequences) - - if len(ret) == 0 { - panic("no return value specified for BuildSequenceBatchesTx") - } - - var r0 *types.Transaction - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, seqsendertypes.Sequence) (*types.Transaction, error)); ok { - return rf(ctx, sequences) - } - if rf, ok := ret.Get(0).(func(context.Context, seqsendertypes.Sequence) *types.Transaction); ok { - r0 = rf(ctx, sequences) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*types.Transaction) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, seqsendertypes.Sequence) error); ok { - r1 = rf(ctx, sequences) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// TxBuilder_BuildSequenceBatchesTx_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BuildSequenceBatchesTx' -type TxBuilder_BuildSequenceBatchesTx_Call struct { - *mock.Call -} - -// BuildSequenceBatchesTx is a helper method to define mock.On call -// - ctx context.Context -// - sequences seqsendertypes.Sequence -func (_e *TxBuilder_Expecter) BuildSequenceBatchesTx(ctx interface{}, sequences interface{}) *TxBuilder_BuildSequenceBatchesTx_Call { - return &TxBuilder_BuildSequenceBatchesTx_Call{Call: _e.mock.On("BuildSequenceBatchesTx", ctx, sequences)} -} - -func (_c *TxBuilder_BuildSequenceBatchesTx_Call) Run(run func(ctx context.Context, sequences seqsendertypes.Sequence)) *TxBuilder_BuildSequenceBatchesTx_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(seqsendertypes.Sequence)) - }) - return _c -} - -func (_c *TxBuilder_BuildSequenceBatchesTx_Call) Return(_a0 *types.Transaction, _a1 error) *TxBuilder_BuildSequenceBatchesTx_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *TxBuilder_BuildSequenceBatchesTx_Call) RunAndReturn(run func(context.Context, seqsendertypes.Sequence) (*types.Transaction, error)) *TxBuilder_BuildSequenceBatchesTx_Call { - _c.Call.Return(run) - return _c -} - -// NewBatchFromL2Block provides a mock function with given fields: l2Block -func (_m *TxBuilder) NewBatchFromL2Block(l2Block *datastream.L2Block) seqsendertypes.Batch { - ret := _m.Called(l2Block) - - if len(ret) == 0 { - panic("no return value specified for NewBatchFromL2Block") - } - - var r0 seqsendertypes.Batch - if rf, ok := ret.Get(0).(func(*datastream.L2Block) seqsendertypes.Batch); ok { - r0 = rf(l2Block) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(seqsendertypes.Batch) - } - } - - return r0 -} - -// TxBuilder_NewBatchFromL2Block_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewBatchFromL2Block' -type TxBuilder_NewBatchFromL2Block_Call struct { - *mock.Call -} - -// NewBatchFromL2Block is a helper method to define mock.On call -// - l2Block *datastream.L2Block -func (_e *TxBuilder_Expecter) NewBatchFromL2Block(l2Block interface{}) *TxBuilder_NewBatchFromL2Block_Call { - return &TxBuilder_NewBatchFromL2Block_Call{Call: _e.mock.On("NewBatchFromL2Block", l2Block)} -} - -func (_c *TxBuilder_NewBatchFromL2Block_Call) Run(run func(l2Block *datastream.L2Block)) *TxBuilder_NewBatchFromL2Block_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(*datastream.L2Block)) - }) - return _c -} - -func (_c *TxBuilder_NewBatchFromL2Block_Call) Return(_a0 seqsendertypes.Batch) *TxBuilder_NewBatchFromL2Block_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxBuilder_NewBatchFromL2Block_Call) RunAndReturn(run func(*datastream.L2Block) seqsendertypes.Batch) *TxBuilder_NewBatchFromL2Block_Call { - _c.Call.Return(run) - return _c -} - -// NewSequence provides a mock function with given fields: ctx, batches, coinbase -func (_m *TxBuilder) NewSequence(ctx context.Context, batches []seqsendertypes.Batch, coinbase common.Address) (seqsendertypes.Sequence, error) { - ret := _m.Called(ctx, batches, coinbase) - - if len(ret) == 0 { - panic("no return value specified for NewSequence") - } - - var r0 seqsendertypes.Sequence - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []seqsendertypes.Batch, common.Address) (seqsendertypes.Sequence, error)); ok { - return rf(ctx, batches, coinbase) - } - if rf, ok := ret.Get(0).(func(context.Context, []seqsendertypes.Batch, common.Address) seqsendertypes.Sequence); ok { - r0 = rf(ctx, batches, coinbase) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(seqsendertypes.Sequence) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []seqsendertypes.Batch, common.Address) error); ok { - r1 = rf(ctx, batches, coinbase) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// TxBuilder_NewSequence_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewSequence' -type TxBuilder_NewSequence_Call struct { - *mock.Call -} - -// NewSequence is a helper method to define mock.On call -// - ctx context.Context -// - batches []seqsendertypes.Batch -// - coinbase common.Address -func (_e *TxBuilder_Expecter) NewSequence(ctx interface{}, batches interface{}, coinbase interface{}) *TxBuilder_NewSequence_Call { - return &TxBuilder_NewSequence_Call{Call: _e.mock.On("NewSequence", ctx, batches, coinbase)} -} - -func (_c *TxBuilder_NewSequence_Call) Run(run func(ctx context.Context, batches []seqsendertypes.Batch, coinbase common.Address)) *TxBuilder_NewSequence_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]seqsendertypes.Batch), args[2].(common.Address)) - }) - return _c -} - -func (_c *TxBuilder_NewSequence_Call) Return(_a0 seqsendertypes.Sequence, _a1 error) *TxBuilder_NewSequence_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *TxBuilder_NewSequence_Call) RunAndReturn(run func(context.Context, []seqsendertypes.Batch, common.Address) (seqsendertypes.Sequence, error)) *TxBuilder_NewSequence_Call { - _c.Call.Return(run) - return _c -} - -// NewSequenceIfWorthToSend provides a mock function with given fields: ctx, sequenceBatches, l2Coinbase, batchNumber -func (_m *TxBuilder) NewSequenceIfWorthToSend(ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64) (seqsendertypes.Sequence, error) { - ret := _m.Called(ctx, sequenceBatches, l2Coinbase, batchNumber) - - if len(ret) == 0 { - panic("no return value specified for NewSequenceIfWorthToSend") - } - - var r0 seqsendertypes.Sequence - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []seqsendertypes.Batch, common.Address, uint64) (seqsendertypes.Sequence, error)); ok { - return rf(ctx, sequenceBatches, l2Coinbase, batchNumber) - } - if rf, ok := ret.Get(0).(func(context.Context, []seqsendertypes.Batch, common.Address, uint64) seqsendertypes.Sequence); ok { - r0 = rf(ctx, sequenceBatches, l2Coinbase, batchNumber) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(seqsendertypes.Sequence) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []seqsendertypes.Batch, common.Address, uint64) error); ok { - r1 = rf(ctx, sequenceBatches, l2Coinbase, batchNumber) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// TxBuilder_NewSequenceIfWorthToSend_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewSequenceIfWorthToSend' -type TxBuilder_NewSequenceIfWorthToSend_Call struct { - *mock.Call -} - -// NewSequenceIfWorthToSend is a helper method to define mock.On call -// - ctx context.Context -// - sequenceBatches []seqsendertypes.Batch -// - l2Coinbase common.Address -// - batchNumber uint64 -func (_e *TxBuilder_Expecter) NewSequenceIfWorthToSend(ctx interface{}, sequenceBatches interface{}, l2Coinbase interface{}, batchNumber interface{}) *TxBuilder_NewSequenceIfWorthToSend_Call { - return &TxBuilder_NewSequenceIfWorthToSend_Call{Call: _e.mock.On("NewSequenceIfWorthToSend", ctx, sequenceBatches, l2Coinbase, batchNumber)} -} - -func (_c *TxBuilder_NewSequenceIfWorthToSend_Call) Run(run func(ctx context.Context, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, batchNumber uint64)) *TxBuilder_NewSequenceIfWorthToSend_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]seqsendertypes.Batch), args[2].(common.Address), args[3].(uint64)) - }) - return _c -} - -func (_c *TxBuilder_NewSequenceIfWorthToSend_Call) Return(_a0 seqsendertypes.Sequence, _a1 error) *TxBuilder_NewSequenceIfWorthToSend_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *TxBuilder_NewSequenceIfWorthToSend_Call) RunAndReturn(run func(context.Context, []seqsendertypes.Batch, common.Address, uint64) (seqsendertypes.Sequence, error)) *TxBuilder_NewSequenceIfWorthToSend_Call { - _c.Call.Return(run) - return _c -} - -// SetCondNewSeq provides a mock function with given fields: cond -func (_m *TxBuilder) SetCondNewSeq(cond txbuilder.CondNewSequence) txbuilder.CondNewSequence { - ret := _m.Called(cond) - - if len(ret) == 0 { - panic("no return value specified for SetCondNewSeq") - } - - var r0 txbuilder.CondNewSequence - if rf, ok := ret.Get(0).(func(txbuilder.CondNewSequence) txbuilder.CondNewSequence); ok { - r0 = rf(cond) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(txbuilder.CondNewSequence) - } - } - - return r0 -} - -// TxBuilder_SetCondNewSeq_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetCondNewSeq' -type TxBuilder_SetCondNewSeq_Call struct { - *mock.Call -} - -// SetCondNewSeq is a helper method to define mock.On call -// - cond txbuilder.CondNewSequence -func (_e *TxBuilder_Expecter) SetCondNewSeq(cond interface{}) *TxBuilder_SetCondNewSeq_Call { - return &TxBuilder_SetCondNewSeq_Call{Call: _e.mock.On("SetCondNewSeq", cond)} -} - -func (_c *TxBuilder_SetCondNewSeq_Call) Run(run func(cond txbuilder.CondNewSequence)) *TxBuilder_SetCondNewSeq_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(txbuilder.CondNewSequence)) - }) - return _c -} - -func (_c *TxBuilder_SetCondNewSeq_Call) Return(_a0 txbuilder.CondNewSequence) *TxBuilder_SetCondNewSeq_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxBuilder_SetCondNewSeq_Call) RunAndReturn(run func(txbuilder.CondNewSequence) txbuilder.CondNewSequence) *TxBuilder_SetCondNewSeq_Call { - _c.Call.Return(run) - return _c -} - -// String provides a mock function with no fields -func (_m *TxBuilder) String() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for String") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// TxBuilder_String_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'String' -type TxBuilder_String_Call struct { - *mock.Call -} - -// String is a helper method to define mock.On call -func (_e *TxBuilder_Expecter) String() *TxBuilder_String_Call { - return &TxBuilder_String_Call{Call: _e.mock.On("String")} -} - -func (_c *TxBuilder_String_Call) Run(run func()) *TxBuilder_String_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *TxBuilder_String_Call) Return(_a0 string) *TxBuilder_String_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *TxBuilder_String_Call) RunAndReturn(run func() string) *TxBuilder_String_Call { - _c.Call.Return(run) - return _c -} - -// NewTxBuilder creates a new instance of TxBuilder. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewTxBuilder(t interface { - mock.TestingT - Cleanup(func()) -}) *TxBuilder { - mock := &TxBuilder{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/sequencesender/txbuilder/validium_cond_num_batches.go b/sequencesender/txbuilder/validium_cond_num_batches.go deleted file mode 100644 index 947551bd..00000000 --- a/sequencesender/txbuilder/validium_cond_num_batches.go +++ /dev/null @@ -1,34 +0,0 @@ -package txbuilder - -import ( - "context" - - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/ethereum/go-ethereum/common" -) - -var MaxBatchesForL1Disabled = uint64(0) - -type ConditionalNewSequenceNumBatches struct { - maxBatchesForL1 uint64 // cfg.MaxBatchesForL1 -} - -func NewConditionalNewSequenceNumBatches(maxBatchesForL1 uint64) *ConditionalNewSequenceNumBatches { - return &ConditionalNewSequenceNumBatches{ - maxBatchesForL1: maxBatchesForL1, - } -} - -func (c *ConditionalNewSequenceNumBatches) NewSequenceIfWorthToSend( - ctx context.Context, txBuilder TxBuilder, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, -) (seqsendertypes.Sequence, error) { - if c.maxBatchesForL1 != MaxBatchesForL1Disabled && uint64(len(sequenceBatches)) >= c.maxBatchesForL1 { - log.Infof( - "sequence should be sent to L1, because MaxBatchesForL1 (%d) has been reached", - c.maxBatchesForL1, - ) - return txBuilder.NewSequence(ctx, sequenceBatches, l2Coinbase) - } - return nil, nil -} diff --git a/sequencesender/txbuilder/validium_cond_num_batches_test.go b/sequencesender/txbuilder/validium_cond_num_batches_test.go deleted file mode 100644 index 882dbe77..00000000 --- a/sequencesender/txbuilder/validium_cond_num_batches_test.go +++ /dev/null @@ -1,46 +0,0 @@ -package txbuilder_test - -import ( - "context" - "testing" - - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/sequencesender/txbuilder/mocks_txbuilder" - "github.com/ethereum/go-ethereum/common" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func TestConditionalNumBatchesDisabled(t *testing.T) { - mockTxBuilder := mocks_txbuilder.NewTxBuilder(t) - sut := txbuilder.NewConditionalNewSequenceNumBatches(0) - - tx, err := sut.NewSequenceIfWorthToSend(context.TODO(), mockTxBuilder, nil, common.Address{}) - require.NoError(t, err) - require.Nil(t, tx) -} - -// It have 1 batch and minium are 2, so no new sequence -func TestConditionalNumBatchesDontFulfillCondition(t *testing.T) { - mockTxBuilder := mocks_txbuilder.NewTxBuilder(t) - sut := txbuilder.NewConditionalNewSequenceNumBatches(2) - var sequenceBatches []seqsendertypes.Batch - sequenceBatches = append(sequenceBatches, &txbuilder.BananaBatch{}) - tx, err := sut.NewSequenceIfWorthToSend(context.TODO(), mockTxBuilder, sequenceBatches, common.Address{}) - require.NoError(t, err) - require.Nil(t, tx) -} - -// It have 2 batch and minium are 2, so new sequence -func TestConditionalNumBatchesFulfillCondition(t *testing.T) { - mockTxBuilder := mocks_txbuilder.NewTxBuilder(t) - sut := txbuilder.NewConditionalNewSequenceNumBatches(2) - var sequenceBatches []seqsendertypes.Batch - sequenceBatches = append(sequenceBatches, &txbuilder.BananaBatch{}) - sequenceBatches = append(sequenceBatches, &txbuilder.BananaBatch{}) - mockTxBuilder.EXPECT().NewSequence(context.TODO(), mock.Anything, mock.Anything).Return(nil, nil) - tx, err := sut.NewSequenceIfWorthToSend(context.TODO(), mockTxBuilder, sequenceBatches, common.Address{}) - require.NoError(t, err) - require.Nil(t, tx) -} diff --git a/sequencesender/txbuilder/zkevm_cond_max_size.go b/sequencesender/txbuilder/zkevm_cond_max_size.go deleted file mode 100644 index d07077d7..00000000 --- a/sequencesender/txbuilder/zkevm_cond_max_size.go +++ /dev/null @@ -1,124 +0,0 @@ -package txbuilder - -import ( - "context" - "errors" - "fmt" - - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/log" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/ethereum/go-ethereum/common" -) - -var ( - // ErrOversizedData when transaction input data is greater than a limit (DOS protection) - ErrOversizedData = errors.New("oversized data") - MaxTxSizeForL1Disabled = uint64(0) -) - -type ConditionalNewSequenceMaxSize struct { - maxTxSizeForL1 uint64 // cfg.MaxTxSizeForL1 -} - -func NewConditionalNewSequenceMaxSize(maxTxSizeForL1 uint64) *ConditionalNewSequenceMaxSize { - return &ConditionalNewSequenceMaxSize{ - maxTxSizeForL1: maxTxSizeForL1, - } -} - -func (c *ConditionalNewSequenceMaxSize) NewSequenceIfWorthToSend( - ctx context.Context, txBuilder TxBuilder, sequenceBatches []seqsendertypes.Batch, l2Coinbase common.Address, -) (seqsendertypes.Sequence, error) { - if c.maxTxSizeForL1 == MaxTxSizeForL1Disabled { - log.Debugf("maxTxSizeForL1 is %d, so is disabled", MaxTxSizeForL1Disabled) - return nil, nil - } - sequence, err := txBuilder.NewSequence(ctx, sequenceBatches, l2Coinbase) - if err != nil { - return nil, err - } - if sequence == nil { - err = fmt.Errorf("error txBuilder.NewSequence, returns sequence=nil and err==nil, is not expected") - log.Errorf(err.Error()) - return nil, err - } - - // Check if can be sent - tx, err := txBuilder.BuildSequenceBatchesTx(ctx, sequence) - if tx == nil && err == nil { - err = fmt.Errorf("error txBuilder.BuildSequenceBatchesTx, returns tx=nil and err==nil, is not expected") - log.Errorf(err.Error()) - return nil, err - } - if err == nil && tx != nil && tx.Size() > c.maxTxSizeForL1 { - log.Infof("Oversized Data on TX oldHash %s (txSize %d > %d)", tx.Hash(), tx.Size(), c.maxTxSizeForL1) - err = ErrOversizedData - } - - if err != nil { - log.Debugf("Handling estimate gas send sequence error: %v", err) - sequenceBatches, err = handleEstimateGasSendSequenceErr(sequence.Batches(), err) - if sequenceBatches != nil { - // Handling the error gracefully, re-processing the sequence as a sanity check - sequence, err = txBuilder.NewSequence(ctx, sequenceBatches, l2Coinbase) - if err != nil { - return nil, err - } - - txReduced, err := txBuilder.BuildSequenceBatchesTx(ctx, sequence) - log.Debugf("After reducing batches: (txSize %d -> %d)", tx.Size(), txReduced.Size()) - if err == nil && txReduced != nil && txReduced.Size() > c.maxTxSizeForL1 { - log.Warnf("After reducing batches: (txSize %d -> %d) is still too big > %d", - tx.Size(), txReduced.Size(), c.maxTxSizeForL1, - ) - } - return sequence, err - } - - return sequence, err - } - log.Debugf( - "Current size:%d < max_size:%d num_batches: %d, no sequence promoted yet", - tx.Size(), c.maxTxSizeForL1, sequence.Len(), - ) - return nil, nil -} - -// handleEstimateGasSendSequenceErr handles an error on the estimate gas. -// Results: (nil,nil)=requires waiting, (nil,error)=no handled gracefully, (seq,nil) handled gracefully -func handleEstimateGasSendSequenceErr( - sequenceBatches []seqsendertypes.Batch, err error, -) ([]seqsendertypes.Batch, error) { - // Insufficient allowance - if errors.Is(err, etherman.ErrInsufficientAllowance) { - return nil, err - } - errMsg := fmt.Sprintf("due to unknown error: %v", err) - if isDataForEthTxTooBig(err) { - errMsg = fmt.Sprintf("caused the L1 tx to be too big: %v", err) - } - var adjustMsg string - if len(sequenceBatches) > 1 { - lastPrevious := sequenceBatches[len(sequenceBatches)-1].BatchNumber() - sequenceBatches = sequenceBatches[:len(sequenceBatches)-1] - lastCurrent := sequenceBatches[len(sequenceBatches)-1].BatchNumber() - adjustMsg = fmt.Sprintf( - "removing last batch: old BatchNumber:%d -> %d, new length: %d", - lastPrevious, lastCurrent, len(sequenceBatches), - ) - } else { - sequenceBatches = nil - adjustMsg = "removing all batches" - log.Warnf("No more batches to remove, sequence is empty... it could be a deadlock situation") - } - log.Infof("Adjusted sequence, %s, because %s", adjustMsg, errMsg) - return sequenceBatches, nil -} - -// isDataForEthTxTooBig checks if tx oversize error -func isDataForEthTxTooBig(err error) bool { - return errors.Is(err, etherman.ErrGasRequiredExceedsAllowance) || - errors.Is(err, ErrOversizedData) || - errors.Is(err, etherman.ErrContentLengthTooLarge) -} diff --git a/sequencesender/txbuilder/zkevm_cond_max_size_test.go b/sequencesender/txbuilder/zkevm_cond_max_size_test.go deleted file mode 100644 index 541ef661..00000000 --- a/sequencesender/txbuilder/zkevm_cond_max_size_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package txbuilder_test - -import ( - "context" - "testing" - - "github.com/agglayer/aggkit/etherman" - "github.com/agglayer/aggkit/sequencesender/seqsendertypes" - "github.com/agglayer/aggkit/sequencesender/txbuilder" - "github.com/agglayer/aggkit/sequencesender/txbuilder/mocks_txbuilder" - "github.com/ethereum/go-ethereum/common" - ethtypes "github.com/ethereum/go-ethereum/core/types" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func TestConditionalMaxSizeDisabled(t *testing.T) { - mockTxBuilder := mocks_txbuilder.NewTxBuilder(t) - sut := txbuilder.NewConditionalNewSequenceMaxSize(txbuilder.MaxTxSizeForL1Disabled) - - tx, err := sut.NewSequenceIfWorthToSend(context.TODO(), mockTxBuilder, nil, common.Address{}) - require.NoError(t, err) - require.Nil(t, tx) -} - -func TestConditionalMaxSizeTxBuilderNewSequenceReturnsNil(t *testing.T) { - mockTxBuilder := mocks_txbuilder.NewTxBuilder(t) - sut := txbuilder.NewConditionalNewSequenceMaxSize(1024) - var sequenceBatches []seqsendertypes.Batch - sequenceBatches = append(sequenceBatches, &txbuilder.BananaBatch{}) - mockTxBuilder.EXPECT().NewSequence(context.TODO(), sequenceBatches, common.Address{}).Return(nil, nil) - _, err := sut.NewSequenceIfWorthToSend(context.TODO(), mockTxBuilder, sequenceBatches, common.Address{}) - require.Error(t, err) -} - -func TestConditionalMaxSizeTxBuilderBuildSequenceBatchesTxReturnsNil(t *testing.T) { - mockTxBuilder := mocks_txbuilder.NewTxBuilder(t) - sut := txbuilder.NewConditionalNewSequenceMaxSize(1024) - var sequenceBatches []seqsendertypes.Batch - sequenceBatches = append(sequenceBatches, &txbuilder.BananaBatch{}) - seq := &txbuilder.ElderberrySequence{} - mockTxBuilder.EXPECT().NewSequence(context.TODO(), sequenceBatches, common.Address{}).Return(seq, nil) - mockTxBuilder.EXPECT().BuildSequenceBatchesTx(mock.Anything, mock.Anything).Return(nil, nil) - _, err := sut.NewSequenceIfWorthToSend(context.TODO(), mockTxBuilder, sequenceBatches, common.Address{}) - require.Error(t, err) -} - -func TestConditionalMaxSizeTxBuilderDontFulFill(t *testing.T) { - mockTxBuilder := mocks_txbuilder.NewTxBuilder(t) - sut := txbuilder.NewConditionalNewSequenceMaxSize(1024) - var sequenceBatches []seqsendertypes.Batch - sequenceBatches = append(sequenceBatches, &txbuilder.BananaBatch{}) - seq := &txbuilder.ElderberrySequence{} - mockTxBuilder.EXPECT().NewSequence(context.TODO(), sequenceBatches, common.Address{}).Return(seq, nil) - inner := ðtypes.LegacyTx{} - tx := ethtypes.NewTx(inner) - mockTxBuilder.EXPECT().BuildSequenceBatchesTx(mock.Anything, mock.Anything).Return(tx, nil) - - res, err := sut.NewSequenceIfWorthToSend(context.TODO(), mockTxBuilder, sequenceBatches, common.Address{}) - - require.NoError(t, err) - require.Nil(t, res) -} - -func TestConditionalMaxSizeTxBuilderFulFill(t *testing.T) { - mockTxBuilder := mocks_txbuilder.NewTxBuilder(t) - sut := txbuilder.NewConditionalNewSequenceMaxSize(10) - l2coinbase := common.Address{} - ctx := context.TODO() - - newSeq := newTestSeq(3, 100, l2coinbase) - mockTxBuilder.EXPECT().NewSequence(context.TODO(), newSeq.Batches(), l2coinbase).Return(newSeq, nil) - inner := ðtypes.LegacyTx{ - Data: []byte{0x01, 0x02, 0x03, 0x04}, - } - tx := ethtypes.NewTx(inner) - mockTxBuilder.EXPECT().BuildSequenceBatchesTx(ctx, newSeq).Return(tx, nil) - // The size of result Tx is 14 that is > 10, so it reduce 1 batch - newSeqReduced := newTestSeq(2, 100, l2coinbase) - mockTxBuilder.EXPECT().NewSequence(context.TODO(), newSeqReduced.Batches(), l2coinbase).Return(newSeqReduced, nil) - mockTxBuilder.EXPECT().BuildSequenceBatchesTx(ctx, newSeqReduced).Return(tx, nil) - - res, err := sut.NewSequenceIfWorthToSend(ctx, mockTxBuilder, newSeq.Batches(), l2coinbase) - - require.NoError(t, err) - require.NotNil(t, res) -} - -func newTestSeq(numBatches int, firstBatch uint64, l2coinbase common.Address) *txbuilder.ElderberrySequence { - var sequenceBatches []seqsendertypes.Batch - for i := 0; i < numBatches; i++ { - sequenceBatches = append(sequenceBatches, txbuilder.NewBananaBatch(ðerman.Batch{BatchNumber: firstBatch + uint64(i)})) - } - return txbuilder.NewElderberrySequence(sequenceBatches, l2coinbase) -} diff --git a/sync/mock_downloader_test.go b/sync/mock_downloader_test.go index 662f49f7..45a53b84 100644 --- a/sync/mock_downloader_test.go +++ b/sync/mock_downloader_test.go @@ -53,7 +53,7 @@ func (_c *EVMDownloaderMock_Download_Call) Return() *EVMDownloaderMock_Download_ } func (_c *EVMDownloaderMock_Download_Call) RunAndReturn(run func(context.Context, uint64, chan EVMBlock)) *EVMDownloaderMock_Download_Call { - _c.Run(run) + _c.Call.Return(run) return _c } diff --git a/test/Makefile b/test/Makefile index 46492f87..b99b372e 100644 --- a/test/Makefile +++ b/test/Makefile @@ -1,10 +1,12 @@ .PHONY: generate-mocks generate-mocks: generate-mocks-bridgesync generate-mocks-reorgdetector \ - generate-mocks-sequencesender generate-mocks-da \ + generate-mocks-da \ generate-mocks-l1infotreesync generate-mocks-helpers \ - generate-mocks-sync generate-mocks-aggregator \ + generate-mocks-sync \ generate-mocks-aggsender generate-mocks-agglayer +COMMON_MOCKERY_PARAMS=--disable-version-string --with-expecter --exported + .PHONY: generate-mocks-bridgesync generate-mocks-bridgesync: ## Generates mocks for bridgesync, using mockery tool export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../bridgesync --output ../bridgesync/mocks --outpkg mocks_bridgesync ${COMMON_MOCKERY_PARAMS} @@ -13,18 +15,6 @@ generate-mocks-bridgesync: ## Generates mocks for bridgesync, using mockery tool generate-mocks-reorgdetector: ## Generates mocks for reorgdetector, using mockery tool export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=EthClient --dir=../reorgdetector --output=../reorgdetector --outpkg=reorgdetector --inpackage --structname=EthClientMock --filename=mock_eth_client.go ${COMMON_MOCKERY_PARAMS} -COMMON_MOCKERY_PARAMS=--disable-version-string --with-expecter --exported -.PHONY: generate-mocks-sequencesender -generate-mocks-sequencesender: ## Generates mocks for sequencesender, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../sequencesender/txbuilder --output ../sequencesender/txbuilder/mocks_txbuilder --outpkg mocks_txbuilder ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=EthTxManager --dir=../sequencesender --output=../sequencesender/mocks --outpkg=mocks --structname=EthTxManagerMock --filename=mock_ethtxmanager.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=Etherman --dir=../sequencesender --output=../sequencesender/mocks --outpkg=mocks --structname=EthermanMock --filename=mock_etherman.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=RPCInterface --dir=../sequencesender --output=../sequencesender/mocks --outpkg=mocks --structname=RPCInterfaceMock --filename=mock_rpc.go ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-da -generate-mocks-da: ## Generates mocks for dataavailability, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../dataavailability --output ../dataavailability/mocks_da --outpkg mocks_da ${COMMON_MOCKERY_PARAMS} - .PHONY: generate-mocks-rpc generate-mocks-rpc: ## Generates mocks for rpc, using mockery tool export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../rpc --output ../rpc/mocks --outpkg mocks ${COMMON_MOCKERY_PARAMS} @@ -50,19 +40,6 @@ generate-mocks-sync: ## Generates mocks for sync, using mockery tool export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=processorInterface --dir=../sync --output=../sync --outpkg=sync --inpackage --structname=ProcessorMock --filename=mock_processor_test.go ${COMMON_MOCKERY_PARAMS} export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=ReorgDetector --dir=../sync --output=../sync --outpkg=sync --inpackage --structname=ReorgDetectorMock --filename=mock_reorgdetector_test.go ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-aggregator -generate-mocks-aggregator: ## Generates mocks for aggregator, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=ProverInterface --dir=../aggregator --output=../aggregator/mocks --outpkg=mocks --structname=ProverInterfaceMock --filename=mock_prover.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=Etherman --dir=../aggregator --output=../aggregator/mocks --outpkg=mocks --structname=EthermanMock --filename=mock_etherman.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=StorageInterface --dir=../aggregator --output=../aggregator/mocks --outpkg=mocks --structname=StorageInterfaceMock --filename=mock_storage.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=Synchronizer --srcpkg=github.com/0xPolygonHermez/zkevm-synchronizer-l1/synchronizer --output=../aggregator/mocks --outpkg=mocks --structname=SynchronizerInterfaceMock --filename=mock_synchronizer.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=EthTxManagerClient --dir=../aggregator --output=../aggregator/mocks --outpkg=mocks --structname=EthTxManagerClientMock --filename=mock_eth_tx_manager.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=Txer --dir=../db --output=../aggregator/mocks --outpkg=mocks --structname=TxerMock --filename=mock_txer.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=RPCInterface --dir=../aggregator --output=../aggregator/mocks --outpkg=mocks --structname=RPCInterfaceMock --filename=mock_rpc.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=AggregatorService_ChannelServer --dir=../aggregator/prover --output=../aggregator/prover/mocks --outpkg=mocks --structname=ChannelMock --filename=mock_channel.go ${COMMON_MOCKERY_PARAMS} - - .PHONY: generate-mocks-aggsender generate-mocks-aggsender: ## Generates mocks for aggsender, using mockery tool export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../aggsender --output ../aggsender/mocks --outpkg mocks ${COMMON_MOCKERY_PARAMS} @@ -71,30 +48,6 @@ generate-mocks-aggsender: ## Generates mocks for aggsender, using mockery tool generate-mocks-agglayer: ## Generates mocks for agglayer, using mockery tool export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=AgglayerClientInterface --dir=../agglayer --output=../agglayer --outpkg=agglayer --inpackage --structname=AgglayerClientMock --filename=mock_agglayer_client.go ${COMMON_MOCKERY_PARAMS} -.PHONY: test-e2e-fork9-validium -test-e2e-fork9-validium: stop - ./run-e2e.sh fork9 cdk-validium - export BATS_TEST_TIMEOUT=1800 - bats bats/fep/ - -.PHONY: test-e2e-fork11-rollup -test-e2e-fork11-rollup: stop - ./run-e2e.sh fork11 rollup - export BATS_TEST_TIMEOUT=1800 - bats bats/fep/ - -.PHONY: test-e2e-fork12-validium -test-e2e-fork12-validium: stop - ./run-e2e.sh fork12 cdk-validium - export BATS_TEST_TIMEOUT=1800 - bats bats/fep/ - -.PHONY: test-e2e-fork12-rollup -test-e2e-fork12-rollup: stop - ./run-e2e.sh fork12 rollup - export BATS_TEST_TIMEOUT=1800 - bats bats/fep/ - .PHONY: test-e2e-fork12-pessimistic test-e2e-fork12-pessimistic: stop ./run-e2e.sh fork12 pessimistic diff --git a/test/bats/fep/access-list-e2e.bats b/test/bats/fep/access-list-e2e.bats deleted file mode 100644 index cc621c10..00000000 --- a/test/bats/fep/access-list-e2e.bats +++ /dev/null @@ -1,119 +0,0 @@ -setup() { - load '../helpers/common-setup' - load '../helpers/common' - - _common_setup - - readonly erigon_sequencer_node=${KURTOSIS_ERIGON_SEQUENCER:-cdk-erigon-sequencer-001} - readonly kurtosis_sequencer_wrapper=${KURTOSIS_SEQUENCER_WRAPPER:-"kurtosis service exec $enclave $erigon_sequencer_node"} - readonly key=${SENDER_PRIVATE_KEY:-"12d7de8621a77640c9241b2595ba78ce443d05e94090365ab3bb5e19df82c625"} - readonly receiver=${RECEIVER:-"0x85dA99c8a7C2C95964c8EfD687E95E632Fc533D6"} - readonly data_dir=${ACL_DATA_DIR:-"/home/erigon/data/dynamic-kurtosis-sequencer/txpool/acls"} -} - -teardown() { - run set_acl_mode "disabled" -} - -# Helper function to add address to acl dynamically -add_to_access_list() { - local acl_type="$1" - local policy="$2" - local sender=$(cast wallet address "$key") - - run $kurtosis_sequencer_wrapper "acl add --datadir $data_dir --address $sender --type $acl_type --policy $policy" -} - -# Helper function to set the acl mode command dynamically -set_acl_mode() { - local mode="$1" - - run $kurtosis_sequencer_wrapper "acl mode --datadir $data_dir --mode $mode" -} - -@test "Test Block List - Sending regular transaction when address not in block list" { - local value="10ether" - run set_acl_mode "blocklist" - run send_tx $l2_rpc_url $key $receiver $value - - assert_success - assert_output --regexp "Transaction successful \(transaction hash: 0x[a-fA-F0-9]{64}\)" -} - -@test "Test Block List - Sending contracts deploy transaction when address not in block list" { - local contract_artifact="./contracts/erc20mock/ERC20Mock.json" - run set_acl_mode "blocklist" - run deploy_contract $l2_rpc_url $key $contract_artifact - - assert_success - - contract_addr=$(echo "$output" | tail -n 1) - assert_output --regexp "0x[a-fA-F0-9]{40}" -} - -@test "Test Block List - Sending regular transaction when address is in block list" { - local value="10ether" - - run set_acl_mode "blocklist" - run add_to_access_list "blocklist" "sendTx" - - run send_tx $l2_rpc_url $key $receiver $value - - assert_failure - assert_output --partial "sender disallowed to send tx by ACL policy" -} - -@test "Test Block List - Sending contracts deploy transaction when address is in block list" { - local contract_artifact="./contracts/erc20mock/ERC20Mock.json" - - run set_acl_mode "blocklist" - run add_to_access_list "blocklist" "deploy" - run deploy_contract $l2_rpc_url $key $contract_artifact - - assert_failure - assert_output --partial "sender disallowed to deploy contract by ACL policy" -} - -@test "Test Allow List - Sending regular transaction when address not in allow list" { - local value="10ether" - - run set_acl_mode "allowlist" - run send_tx $l2_rpc_url $key $receiver $value - - assert_failure - assert_output --partial "sender disallowed to send tx by ACL policy" -} - -@test "Test Allow List - Sending contracts deploy transaction when address not in allow list" { - local contract_artifact="./contracts/erc20mock/ERC20Mock.json" - - run set_acl_mode "allowlist" - run deploy_contract $l2_rpc_url $key $contract_artifact - - assert_failure - assert_output --partial "sender disallowed to deploy contract by ACL policy" -} - -@test "Test Allow List - Sending regular transaction when address is in allow list" { - local value="10ether" - - run set_acl_mode "allowlist" - run add_to_access_list "allowlist" "sendTx" - run send_tx $l2_rpc_url $key $receiver $value - - assert_success - assert_output --regexp "Transaction successful \(transaction hash: 0x[a-fA-F0-9]{64}\)" -} - -@test "Test Allow List - Sending contracts deploy transaction when address is in allow list" { - local contract_artifact="./contracts/erc20mock/ERC20Mock.json" - - run set_acl_mode "allowlist" - run add_to_access_list "allowlist" "deploy" - run deploy_contract $l2_rpc_url $key $contract_artifact - - assert_success - - contract_addr=$(echo "$output" | tail -n 1) - assert_output --regexp "0x[a-fA-F0-9]{40}" -} diff --git a/test/bats/fep/basic-e2e.bats b/test/bats/fep/basic-e2e.bats deleted file mode 100644 index d977f4bc..00000000 --- a/test/bats/fep/basic-e2e.bats +++ /dev/null @@ -1,196 +0,0 @@ -setup() { - load '../helpers/common-setup' - load '../helpers/common' - - _common_setup - - readonly sender_private_key=${SENDER_PRIVATE_KEY:-"12d7de8621a77640c9241b2595ba78ce443d05e94090365ab3bb5e19df82c625"} - readonly receiver=${RECEIVER:-"0x85dA99c8a7C2C95964c8EfD687E95E632Fc533D6"} -} - -@test "Send EOA transaction" { - local sender_addr=$(cast wallet address --private-key "$sender_private_key") - local initial_nonce=$(cast nonce "$sender_addr" --rpc-url "$l2_rpc_url") || { - echo "Failed to retrieve nonce for sender: $sender_addr using RPC URL: $l2_rpc_url" - return 1 - } - local value="10ether" - - # case 1: Transaction successful sender has sufficient balance - run send_tx "$l2_rpc_url" "$sender_private_key" "$receiver" "$value" - assert_success - assert_output --regexp "Transaction successful \(transaction hash: 0x[a-fA-F0-9]{64}\)" - - # case 2: Transaction rejected as sender attempts to transfer more than it has in its wallet. - # Transaction will fail pre-validation check on the node and will be dropped subsequently from the pool - # without recording it on the chain and hence nonce will not change - local sender_balance=$(cast balance "$sender_addr" --ether --rpc-url "$l2_rpc_url") || { - echo "Failed to retrieve balance for sender: $sender_addr using RPC URL: $l2_rpc_url" - return 1 - } - local excessive_value=$(echo "$sender_balance + 1" | bc)"ether" - run send_tx "$l2_rpc_url" "$sender_private_key" "$receiver" "$excessive_value" - assert_failure - - # Check whether the sender's nonce was updated correctly - local final_nonce=$(cast nonce "$sender_addr" --rpc-url "$l2_rpc_url") || { - echo "Failed to retrieve nonce for sender: $sender_addr using RPC URL: $l2_rpc_url" - return 1 - } - assert_equal "$final_nonce" "$(echo "$initial_nonce + 1" | bc)" -} - -@test "Test ERC20Mock contract" { - local contract_artifact="./contracts/erc20mock/ERC20Mock.json" - wallet_A_output=$(cast wallet new) - address_A=$(echo "$wallet_A_output" | grep "Address" | awk '{print $2}') - address_A_private_key=$(echo "$wallet_A_output" | grep "Private key" | awk '{print $3}') - address_B=$(cast wallet new | grep "Address" | awk '{print $2}') - - # Deploy ERC20Mock - run deploy_contract "$l2_rpc_url" "$sender_private_key" "$contract_artifact" - assert_success - contract_addr=$(echo "$output" | tail -n 1) - - # Mint ERC20 tokens - local amount="5" - - run send_tx "$l2_rpc_url" "$sender_private_key" "$contract_addr" "$mint_fn_sig" "$address_A" "$amount" - assert_success - assert_output --regexp "Transaction successful \(transaction hash: 0x[a-fA-F0-9]{64}\)" - - ## Case 2: Insufficient gas scenario => Transactions fails - # nonce would not increase since transaction fails at the node's pre-validation check - # Get bytecode from the contract artifact - local bytecode=$(jq -r .bytecode "$contract_artifact") - if [[ -z "$bytecode" || "$bytecode" == "null" ]]; then - echo "Error: Failed to read bytecode from $contract_artifact" - return 1 - fi - - # Estimate gas, gas price and gas cost - local gas_units=$(cast estimate --rpc-url "$l2_rpc_url" --create "$bytecode") - gas_units=$(echo "scale=0; $gas_units / 2" | bc) - local gas_price=$(cast gas-price --rpc-url "$l2_rpc_url") - local value=$(echo "$gas_units * $gas_price" | bc) - local value_ether=$(cast to-unit "$value" ether)"ether" - - # Transfer only half amount of tokens needed for contract deployment fees - cast_output=$(cast send --rpc-url "$l2_rpc_url" --private-key "$sender_private_key" "$address_A" --value "$value_ether" --legacy 2>&1) - if [[ $? -ne 0 ]]; then - echo "Error: Failed to send transaction. Output:" - echo "$cast_output" - return 1 - fi - - # Fetch initial nonce for address_A - local address_A_initial_nonce=$(cast nonce "$address_A" --rpc-url "$l2_rpc_url") || return 1 - # Attempt to deploy contract with insufficient gas - run deploy_contract "$l2_rpc_url" "$address_A_private_key" "$contract_artifact" - assert_failure - - ## Case 3: Transaction should fail as address_A tries to transfer more tokens than it has - # nonce would not increase - # Transfer funds for gas fees to address_A - value_ether="4ether" - cast_output=$(cast send --rpc-url "$l2_rpc_url" --private-key "$sender_private_key" "$address_A" --value "$value_ether" --legacy 2>&1) - if [[ $? -ne 0 ]]; then - echo "Error: Failed to send transaction. Output:" - echo "$cast_output" - return 1 - fi - - # Fetch balance of address_A to simulate excessive transfer - run query_contract "$l2_rpc_url" "$contract_addr" "$balance_of_fn_sig" "$address_A" - assert_success - local address_A_Balance=$(echo "$output" | tail -n 1) - address_A_Balance=$(echo "$address_A_Balance" | xargs) - - # Set excessive amount for transfer - local excessive_amount=$(echo "$address_A_Balance + 1" | bc) - - # Attempt transfer of excessive amount from address_A to address_B - local tranferFnSig="transfer(address,uint256)" - run send_tx "$l2_rpc_url" "$address_A_private_key" "$contract_addr" "$tranferFnSig" "$address_B" "$excessive_amount" - assert_failure - - # Verify balance of address_A after failed transaction - run query_contract "$l2_rpc_url" "$contract_addr" "$balance_of_fn_sig" "$address_A" - assert_success - address_A_BalanceAfterFailedTx=$(echo "$output" | tail -n 1) - address_A_BalanceAfterFailedTx=$(echo "$address_A_BalanceAfterFailedTx" | xargs) - - # Ensure balance is unchanged - assert_equal "$address_A_BalanceAfterFailedTx" "$address_A_Balance" - - # Verify balance of address_B is still zero - run query_contract "$l2_rpc_url" "$contract_addr" "$balance_of_fn_sig" "$address_B" - assert_success - local address_B_Balance=$(echo "$output" | tail -n 1) - address_B_Balance=$(echo "$address_B_Balance" | xargs) - - assert_equal "$address_B_Balance" "0" - - # Nonce should not increase - local address_A_final_nonce=$(cast nonce "$address_A" --rpc-url "$l2_rpc_url") || { - echo "Failed to retrieve nonce for sender: $address_A using RPC URL: $l2_rpc_url" - return 1 - } - assert_equal "$address_A_final_nonce" "$address_A_initial_nonce" -} - - -@test "Deploy and test UniswapV3 contract" { - # Generate new key pair - wallet_A_output=$(cast wallet new) - address_A=$(echo "$wallet_A_output" | grep "Address" | awk '{print $2}') - address_A_private_key=$(echo "$wallet_A_output" | grep "Private key" | awk '{print $3}') - - # Transfer funds for gas - local value_ether="50ether" - cast_output=$(cast send --rpc-url "$l2_rpc_url" --private-key "$sender_private_key" "$address_A" --value "$value_ether" --legacy 2>&1) - if [[ $? -ne 0 ]]; then - echo "Error: Failed to send transaction. Output:" - echo "$cast_output" - return 1 - fi - - run polycli loadtest uniswapv3 --legacy -v 600 --rpc-url $l2_rpc_url --private-key $address_A_private_key - assert_success - - # Remove ANSI escape codes from the output - output=$(echo "$output" | sed -r "s/\x1B\[[0-9;]*[mGKH]//g") - - # Check if all required Uniswap contracts were deployed - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=WETH9" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=UniswapV3Factory" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=UniswapInterfaceMulticall" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=ProxyAdmin" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=TickLens" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=NFTDescriptor" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=NonfungibleTokenPositionDescriptor" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=TransparentUpgradeableProxy" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=NonfungiblePositionManager" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=V3Migrator" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=UniswapV3Staker" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=QuoterV2" - assert_output --regexp "Contract deployed address=0x[a-fA-F0-9]{40} name=SwapRouter02" - - # Check if ERC20 tokens were minted - assert_output --regexp "Minted tokens amount=[0-9]+ recipient=0x[a-fA-F0-9]{40} token=SwapperA" - assert_output --regexp "Minted tokens amount=[0-9]+ recipient=0x[a-fA-F0-9]{40} token=SwapperB" - - # Check if liquidity pool was created and initialized - assert_output --regexp "Pool created and initialized fees=[0-9]+" - - # Check if liquidity was provided to the pool - assert_output --regexp "Liquidity provided to the pool liquidity=[0-9]+" - - # Check if transaction got executed successfully - assert_output --regexp "Starting main load test loop currentNonce=[0-9]+" - assert_output --regexp "Finished main load test loop lastNonce=[0-9]+ startNonce=[0-9]+" - assert_output --regexp "Got final block number currentNonce=[0-9]+ final block number=[0-9]+" - assert_output --regexp "Num errors numErrors=0" - assert_output --regexp "Finished" -} - diff --git a/test/bats/fep/bridge-e2e.bats b/test/bats/fep/bridge-e2e.bats deleted file mode 100644 index d9203297..00000000 --- a/test/bats/fep/bridge-e2e.bats +++ /dev/null @@ -1,193 +0,0 @@ -setup() { - load '../helpers/common-setup' - load '../helpers/common' - load '../helpers/lxly-bridge' - - _common_setup - - if [ -z "$BRIDGE_ADDRESS" ]; then - local combined_json_file="/opt/zkevm/combined.json" - echo "BRIDGE_ADDRESS env variable is not provided, resolving the bridge address from the Kurtosis CDK '$combined_json_file'" >&3 - - # Fetching the combined JSON output and filtering to get polygonZkEVMBridgeAddress - combined_json_output=$($contracts_service_wrapper "cat $combined_json_file" | tail -n +2) - bridge_default_address=$(echo "$combined_json_output" | jq -r .polygonZkEVMBridgeAddress) - BRIDGE_ADDRESS=$bridge_default_address - fi - echo "Bridge address=$BRIDGE_ADDRESS" >&3 - - readonly sender_private_key=${SENDER_PRIVATE_KEY:-"12d7de8621a77640c9241b2595ba78ce443d05e94090365ab3bb5e19df82c625"} - readonly sender_addr="$(cast wallet address --private-key $sender_private_key)" - destination_net=${DESTINATION_NET:-"1"} - destination_addr=${DESTINATION_ADDRESS:-"0x0bb7AA0b4FdC2D2862c088424260e99ed6299148"} - ether_value=${ETHER_VALUE:-"0.0200000054"} - amount=$(cast to-wei $ether_value ether) - readonly native_token_addr=${NATIVE_TOKEN_ADDRESS:-"0x0000000000000000000000000000000000000000"} - if [[ -n "$GAS_TOKEN_ADDR" ]]; then - echo "Using provided GAS_TOKEN_ADDR: $GAS_TOKEN_ADDR" >&3 - gas_token_addr="$GAS_TOKEN_ADDR" - else - echo "GAS_TOKEN_ADDR not provided, retrieving from rollup parameters file." >&3 - readonly rollup_params_file=/opt/zkevm/create_rollup_parameters.json - run bash -c "$contracts_service_wrapper 'cat $rollup_params_file' | tail -n +2 | jq -r '.gasTokenAddress'" - assert_success - assert_output --regexp "0x[a-fA-F0-9]{40}" - gas_token_addr=$output - fi - readonly is_forced=${IS_FORCED:-"true"} - readonly bridge_addr=$BRIDGE_ADDRESS - readonly meta_bytes=${META_BYTES:-"0x"} - - readonly l1_rpc_url=${L1_ETH_RPC_URL:-"$(kurtosis port print $enclave el-1-geth-lighthouse rpc)"} - readonly bridge_api_url=${BRIDGE_API_URL:-"$(kurtosis port print $enclave zkevm-bridge-service-001 rpc)"} - - readonly dry_run=${DRY_RUN:-"false"} - readonly l1_rpc_network_id=$(cast call --rpc-url $l1_rpc_url $bridge_addr 'networkID() (uint32)') - readonly l2_rpc_network_id=$(cast call --rpc-url $l2_rpc_url $bridge_addr 'networkID() (uint32)') - gas_price=$(cast gas-price --rpc-url "$l2_rpc_url") - readonly weth_token_addr=$(cast call --rpc-url $l2_rpc_url $bridge_addr 'WETHToken()' | cast parse-bytes32-address) -} - -# Helper function to run native gas token deposit to WETH -native_gas_token_deposit_to_WETH() { - local bridge_type="$1" - - echo "Bridge_type: $bridge_type" >&3 - - destination_addr=$sender_addr - local initial_receiver_balance=$(cast call --rpc-url "$l2_rpc_url" "$weth_token_addr" "$balance_of_fn_sig" "$destination_addr" | awk '{print $1}') - echo "Initial receiver balance of native token on L2 $initial_receiver_balance" >&3 - - echo "=== Running LxLy deposit $bridge_type on L1 to network: $l2_rpc_network_id native_token: $native_token_addr" >&3 - - destination_net=$l2_rpc_network_id - - if [[ $bridge_type == "bridgeMessage" ]]; then - run bridge_message "$native_token_addr" "$l1_rpc_url" - else - run bridge_asset "$native_token_addr" "$l1_rpc_url" - fi - assert_success - - echo "=== Claiming on L2..." >&3 - timeout="120" - claim_frequency="10" - run wait_for_claim "$timeout" "$claim_frequency" "$l2_rpc_url" "$bridge_type" - assert_success - - run verify_balance "$l2_rpc_url" "$weth_token_addr" "$destination_addr" "$initial_receiver_balance" "$ether_value" - assert_success - - echo "=== $bridge_type L2 WETH: $weth_token_addr to L1 ETH" >&3 - destination_addr=$sender_addr - destination_net=0 - - if [[ $bridge_type == "bridgeMessage" ]]; then - run bridge_message "$weth_token_addr" "$l2_rpc_url" - else - run bridge_asset "$weth_token_addr" "$l2_rpc_url" - fi - assert_success - - echo "=== Claiming on L1..." >&3 - timeout="400" - claim_frequency="60" - run wait_for_claim "$timeout" "$claim_frequency" "$l1_rpc_url" "$bridge_type" - assert_success -} - -@test "Native gas token deposit to WETH - BridgeAsset" { - run native_gas_token_deposit_to_WETH "bridgeAsset" -} - -@test "Native gas token deposit to WETH - BridgeMessage" { - run native_gas_token_deposit_to_WETH "bridgeMessage" -} - -@test "Custom gas token deposit" { - echo "Gas token addr $gas_token_addr, L1 RPC: $l1_rpc_url" >&3 - - # Set receiver address and query for its initial native token balance on the L2 - receiver=${RECEIVER:-"0x85dA99c8a7C2C95964c8EfD687E95E632Fc533D6"} - local initial_receiver_balance=$(cast balance "$receiver" --rpc-url "$l2_rpc_url") - echo "Initial receiver balance of native token on L2 $initial_receiver_balance" >&3 - - local l1_minter_balance=$(cast balance "0x8943545177806ED17B9F23F0a21ee5948eCaa776" --rpc-url "$l1_rpc_url") - echo "Initial minter balance on L1 $l1_minter_balance" >&3 - - # Query for initial sender balance - run query_contract "$l1_rpc_url" "$gas_token_addr" "$balance_of_fn_sig" "$sender_addr" - assert_success - local gas_token_init_sender_balance=$(echo "$output" | tail -n 1 | awk '{print $1}') - echo "Initial sender balance $gas_token_init_sender_balance" of gas token on L1 >&3 - - # Mint gas token on L1 - local tokens_amount="0.1ether" - local wei_amount=$(cast --to-unit $tokens_amount wei) - local minter_key=${MINTER_KEY:-"bcdf20249abf0ed6d944c0288fad489e33f66b3960d9e6229c1cd214ed3bbe31"} - run mint_erc20_tokens "$l1_rpc_url" "$gas_token_addr" "$minter_key" "$sender_addr" "$tokens_amount" - assert_success - - # Assert that balance of gas token (on the L1) is correct - run query_contract "$l1_rpc_url" "$gas_token_addr" "$balance_of_fn_sig" "$sender_addr" - assert_success - local gas_token_final_sender_balance=$(echo "$output" | - tail -n 1 | - awk '{print $1}') - local expected_balance=$(echo "$gas_token_init_sender_balance + $wei_amount" | - bc | - awk '{print $1}') - - echo "Sender balance ($sender_addr) (gas token L1): $gas_token_final_sender_balance" >&3 - assert_equal "$gas_token_final_sender_balance" "$expected_balance" - - # Send approve transaction to the gas token on L1 - deposit_ether_value="0.1ether" - run send_tx "$l1_rpc_url" "$sender_private_key" "$gas_token_addr" "$approve_fn_sig" "$bridge_addr" "$deposit_ether_value" - assert_success - assert_output --regexp "Transaction successful \(transaction hash: 0x[a-fA-F0-9]{64}\)" - - # Deposit - destination_addr=$receiver - destination_net=$l2_rpc_network_id - amount=$wei_amount - run bridge_asset "$gas_token_addr" "$l1_rpc_url" - assert_success - - # Claim deposits (settle them on the L2) - timeout="120" - claim_frequency="10" - run wait_for_claim "$timeout" "$claim_frequency" "$l2_rpc_url" "bridgeAsset" - assert_success - - # Validate that the native token of receiver on L2 has increased by the bridge tokens amount - run verify_balance "$l2_rpc_url" "$native_token_addr" "$receiver" "$initial_receiver_balance" "$tokens_amount" - assert_success -} - -@test "Custom gas token withdrawal" { - echo "Running LxLy withdrawal" >&3 - echo "Gas token addr $gas_token_addr, L1 RPC: $l1_rpc_url" >&3 - - local initial_receiver_balance=$(cast call --rpc-url "$l1_rpc_url" "$gas_token_addr" "$balance_of_fn_sig" "$destination_addr" | awk '{print $1}') - assert_success - echo "Receiver balance of gas token on L1 $initial_receiver_balance" >&3 - - destination_net=$l1_rpc_network_id - run bridge_asset "$native_token_addr" "$l2_rpc_url" - assert_success - - # Claim withdrawals (settle them on the L1) - timeout="360" - claim_frequency="10" - destination_net=$l1_rpc_network_id - run wait_for_claim "$timeout" "$claim_frequency" "$l1_rpc_url" "bridgeAsset" - assert_success - - # Validate that the token of receiver on L1 has increased by the bridge tokens amount - run verify_balance "$l1_rpc_url" "$gas_token_addr" "$destination_addr" "$initial_receiver_balance" "$ether_value" - if [ $status -eq 0 ]; then - break - fi - assert_success -} diff --git a/test/bats/fep/e2e.bats b/test/bats/fep/e2e.bats deleted file mode 100644 index a468e7aa..00000000 --- a/test/bats/fep/e2e.bats +++ /dev/null @@ -1,11 +0,0 @@ -setup() { - load '../helpers/common-setup' - - _common_setup -} - -@test "Verify batches" { - echo "Waiting 10 minutes to get some verified batch...." - run $PROJECT_ROOT/../scripts/batch_verification_monitor.sh 0 600 - assert_success -} diff --git a/test/bats/helpers/common-multi_cdk-setup.bash b/test/bats/helpers/common-multi_cdk-setup.bash index 2758c9f7..c5e055ee 100644 --- a/test/bats/helpers/common-multi_cdk-setup.bash +++ b/test/bats/helpers/common-multi_cdk-setup.bash @@ -3,13 +3,13 @@ _common_multi_setup() { load '../helpers/common-setup' _common_setup - # generated with cast wallet new + # generated with cast wallet new readonly target_address=0xbecE3a31343c6019CDE0D5a4dF2AF8Df17ebcB0f readonly target_private_key=0x51caa196504216b1730280feb63ddd8c5ae194d13e57e58d559f1f1dc3eda7c9 - kurtosis service exec $enclave contracts-001 "cat /opt/zkevm/combined-001.json" | tail -n +2 | jq '.' > combined-001.json - kurtosis service exec $enclave contracts-002 "cat /opt/zkevm/combined-002.json" | tail -n +2 | jq '.' > combined-002.json - kurtosis service exec $enclave contracts-002 "cat /opt/zkevm-contracts/deployment/v2/create_rollup_parameters.json" | tail -n +2 | jq -r '.gasTokenAddress' > gas-token-address.json + kurtosis service exec $enclave contracts-001 "cat /opt/zkevm/combined-001.json" | tail -n +2 | jq '.' >combined-001.json + kurtosis service exec $enclave contracts-002 "cat /opt/zkevm/combined-002.json" | tail -n +2 | jq '.' >combined-002.json + kurtosis service exec $enclave contracts-002 "cat /opt/zkevm-contracts/deployment/v2/create_rollup_parameters.json" | tail -n +2 | jq -r '.gasTokenAddress' >gas-token-address.json readonly private_key="0x12d7de8621a77640c9241b2595ba78ce443d05e94090365ab3bb5e19df82c625" readonly eth_address=$(cast wallet address --private-key $private_key) @@ -27,7 +27,7 @@ _common_multi_setup() { readonly l1_rpc_network_id=$(cast call --rpc-url $l1_rpc_url $bridge_address 'networkID() (uint32)') readonly l2_pp1b_network_id=$(cast call --rpc-url $l2_pp1_url $bridge_address 'networkID() (uint32)') readonly l2_pp2b_network_id=$(cast call --rpc-url $l2_pp2_url $bridge_address 'networkID() (uint32)') - + readonly aggsender_find_imported_bridge="../target/aggsender_find_imported_bridge" echo "=== Bridge address=$bridge_address ===" >&3 echo "=== POL address=$pol_address ===" >&3 @@ -40,10 +40,10 @@ _common_multi_setup() { echo "=== L2 PP2 URL=$l2_pp2_url ===" >&3 echo "=== L2 PP1B URL=$l2_pp1b_url ===" >&3 echo "=== L2 PP2B URL=$l2_pp2b_url ===" >&3 - + } -add_cdk_network2_to_agglayer(){ +add_cdk_network2_to_agglayer() { echo "=== Checking if network 2 is in agglayer ===" >&3 local _prev=$(kurtosis service exec $enclave agglayer "grep \"2 = \" /etc/zkevm/agglayer-config.toml || true" | tail -n +2) if [ ! -z "$_prev" ]; then @@ -56,26 +56,25 @@ add_cdk_network2_to_agglayer(){ kurtosis service start $enclave agglayer } -fund_claim_tx_manager(){ +fund_claim_tx_manager() { echo "=== Funding bridge auto-claim ===" >&3 cast send --legacy --value 100ether --rpc-url $l2_pp1_url --private-key $private_key 0x5f5dB0D4D58310F53713eF4Df80ba6717868A9f8 cast send --legacy --value 100ether --rpc-url $l2_pp2_url --private-key $private_key 0x93F63c24735f45Cd0266E87353071B64dd86bc05 } - -mint_pol_token(){ - echo "=== Minting POL ===" >&3 +mint_pol_token() { + echo "=== Minting POL ===" >&3 cast send \ - --rpc-url $l1_rpc_url \ - --private-key $private_key \ - $pol_address \ - "$mint_fn_sig" \ - $eth_address 10000000000000000000000 + --rpc-url $l1_rpc_url \ + --private-key $private_key \ + $pol_address \ + "$mint_fn_sig" \ + $eth_address 10000000000000000000000 # Allow bridge to spend it cast send \ - --rpc-url $l1_rpc_url \ - --private-key $private_key \ - $pol_address \ - "$approve_fn_sig" \ - $bridge_address 10000000000000000000000 + --rpc-url $l1_rpc_url \ + --private-key $private_key \ + $pol_address \ + "$approve_fn_sig" \ + $bridge_address 10000000000000000000000 } diff --git a/test/bats/helpers/common-setup.bash b/test/bats/helpers/common-setup.bash index 485d2337..5f53cbf8 100644 --- a/test/bats/helpers/common-setup.bash +++ b/test/bats/helpers/common-setup.bash @@ -18,7 +18,7 @@ _common_setup() { # Kurtosis enclave and service identifiers - readonly enclave=${KURTOSIS_ENCLAVE:-aggkit} + readonly enclave=${KURTOSIS_ENCLAVE:-cdk} readonly contracts_container=${KURTOSIS_CONTRACTS:-contracts-001} readonly contracts_service_wrapper=${KURTOSIS_CONTRACTS_WRAPPER:-"kurtosis service exec $enclave $contracts_container"} readonly erigon_rpc_node=${KURTOSIS_ERIGON_RPC:-cdk-erigon-rpc-001} diff --git a/test/bats/helpers/lxly-bridge.bash b/test/bats/helpers/lxly-bridge.bash index a8180f9d..6bc937f4 100644 --- a/test/bats/helpers/lxly-bridge.bash +++ b/test/bats/helpers/lxly-bridge.bash @@ -53,15 +53,15 @@ function bridge_asset() { else local tmp_response_file=$(mktemp) if [[ $token_addr == "0x0000000000000000000000000000000000000000" ]]; then - echo "cast send --legacy --private-key $sender_private_key --value $amount --rpc-url $rpc_url $bridge_addr $bridge_sig $destination_net $destination_addr $amount $token_addr $is_forced $meta_bytes" - cast send --legacy --private-key $sender_private_key --value $amount --rpc-url $rpc_url $bridge_addr $bridge_sig $destination_net $destination_addr $amount $token_addr $is_forced $meta_bytes > $tmp_response_file + echo "cast send --legacy --private-key $sender_private_key --value $amount --rpc-url $rpc_url $bridge_addr $bridge_sig $destination_net $destination_addr $amount $token_addr $is_forced $meta_bytes" + cast send --legacy --private-key $sender_private_key --value $amount --rpc-url $rpc_url $bridge_addr $bridge_sig $destination_net $destination_addr $amount $token_addr $is_forced $meta_bytes >$tmp_response_file else echo "cast send --legacy --private-key $sender_private_key --rpc-url $rpc_url $bridge_addr $bridge_sig $destination_net $destination_addr $amount $token_addr $is_forced $meta_bytes" - - cast send --legacy --private-key $sender_private_key --rpc-url $rpc_url $bridge_addr $bridge_sig $destination_net $destination_addr $amount $token_addr $is_forced $meta_bytes > $tmp_response_file + + cast send --legacy --private-key $sender_private_key --rpc-url $rpc_url $bridge_addr $bridge_sig $destination_net $destination_addr $amount $token_addr $is_forced $meta_bytes >$tmp_response_file fi export bridge_tx_hash=$(grep "^transactionHash" $tmp_response_file | cut -f 2- -d ' ' | sed 's/ //g') - echo "bridge_tx_hash=$bridge_tx_hash" + echo "bridge_tx_hash=$bridge_tx_hash" fi } @@ -72,7 +72,7 @@ function claim() { if [[ $bridge_type == "bridgeMessage" ]]; then claim_sig="claimMessage(bytes32[32],bytes32[32],uint256,bytes32,bytes32,uint32,address,uint32,address,uint256,bytes)" fi - + readonly bridge_deposit_file=$(mktemp) readonly claimable_deposit_file=$(mktemp) echo "Getting full list of deposits" >&3 @@ -117,12 +117,12 @@ function claim() { if [[ $dry_run == "true" ]]; then cast calldata $claim_sig "$in_merkle_proof" "$in_rollup_merkle_proof" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata else - local comp_gas_price=$(bc -l <<< "$gas_price * 1.5" | sed 's/\..*//') + local comp_gas_price=$(bc -l <<<"$gas_price * 1.5" | sed 's/\..*//') if [[ $? -ne 0 ]]; then echo "Failed to calculate gas price" >&3 exit 1 fi - + echo "cast send --legacy --gas-price $comp_gas_price --rpc-url $destination_rpc_url --private-key $sender_private_key $bridge_addr \"$claim_sig\" \"$in_merkle_proof\" \"$in_rollup_merkle_proof\" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata" >&3 cast send --legacy --gas-price $comp_gas_price --rpc-url $destination_rpc_url --private-key $sender_private_key $bridge_addr "$claim_sig" "$in_merkle_proof" "$in_rollup_merkle_proof" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata fi @@ -137,12 +137,12 @@ function claim() { # - global_index function claim_tx_hash() { - local timeout="$1" + local timeout="$1" tx_hash="$2" local destination_addr="$3" local destination_rpc_url="$4" local bridge_merkle_proof_url="$5" - + readonly bridge_deposit_file=$(mktemp) local ready_for_claim="false" local start_time=$(date +%s) @@ -156,10 +156,10 @@ function claim_tx_hash() { echo " $current_time > $end_time" >&3 exit 1 fi - curl -s "$bridge_merkle_proof_url/bridges/$destination_addr?limit=100&offset=0" | jq "[.deposits[] | select(.tx_hash == \"$tx_hash\" )]" > $bridge_deposit_file + curl -s "$bridge_merkle_proof_url/bridges/$destination_addr?limit=100&offset=0" | jq "[.deposits[] | select(.tx_hash == \"$tx_hash\" )]" >$bridge_deposit_file deposit_count=$(jq '. | length' $bridge_deposit_file) if [[ $deposit_count == 0 ]]; then - echo "...[$(date '+%Y-%m-%d %H:%M:%S')] ❌ the tx_hash [$tx_hash] not found (elapsed: $elpased_time / timeout:$timeout)" >&3 + echo "...[$(date '+%Y-%m-%d %H:%M:%S')] ❌ the tx_hash [$tx_hash] not found (elapsed: $elpased_time / timeout:$timeout)" >&3 sleep "$claim_frequency" continue fi @@ -180,17 +180,17 @@ function claim_tx_hash() { exit 0 fi local curr_deposit_cnt=$(jq '.[0].deposit_cnt' $bridge_deposit_file) - local curr_network_id=$(jq '.[0].network_id' $bridge_deposit_file) + local curr_network_id=$(jq '.[0].network_id' $bridge_deposit_file) readonly current_deposit=$(mktemp) jq '.[(0|tonumber)]' $bridge_deposit_file | tee $current_deposit readonly current_proof=$(mktemp) echo ".... requesting merkel proof for $tx_hash deposit_cnt=$curr_deposit_cnt network_id: $curr_network_id" >&3 request_merkle_proof "$curr_deposit_cnt" "$curr_network_id" "$bridge_merkle_proof_url" "$current_proof" - echo "FILE current_deposit=$current_deposit" - echo "FILE bridge_deposit_file=$bridge_deposit_file" - echo "FILE current_proof=$current_proof" + echo "FILE current_deposit=$current_deposit" + echo "FILE bridge_deposit_file=$bridge_deposit_file" + echo "FILE current_proof=$current_proof" - while true; do + while true; do echo ".... requesting claim for $tx_hash" >&3 run request_claim $current_deposit $current_proof $destination_rpc_url request_result=$status @@ -223,14 +223,14 @@ function claim_tx_hash() { rm $current_deposit rm $current_proof rm $bridge_deposit_file - + } -function request_merkle_proof(){ +function request_merkle_proof() { local curr_deposit_cnt="$1" local curr_network_id="$2" local bridge_merkle_proof_url="$3" local result_proof_file="$4" - curl -s "$bridge_merkle_proof_url/merkle-proof?deposit_cnt=$curr_deposit_cnt&net_id=$curr_network_id" | jq '.' > $result_proof_file + curl -s "$bridge_merkle_proof_url/merkle-proof?deposit_cnt=$curr_deposit_cnt&net_id=$curr_network_id" | jq '.' >$result_proof_file echo "request_merkle_proof: $result_proof_file" } @@ -240,16 +240,16 @@ function request_merkle_proof(){ # -gas_price # -sender_private_key # -bridge_addr -function request_claim(){ +function request_claim() { local deposit_file="$1" local proof_file="$2" local destination_rpc_url="$3" - + local leaf_type=$(jq -r '.leaf_type' $deposit_file) local claim_sig="claimAsset(bytes32[32],bytes32[32],uint256,bytes32,bytes32,uint32,address,uint32,address,uint256,bytes)" - + if [[ $leaf_type != "0" ]]; then - claim_sig="claimMessage(bytes32[32],bytes32[32],uint256,bytes32,bytes32,uint32,address,uint32,address,uint256,bytes)" + claim_sig="claimMessage(bytes32[32],bytes32[32],uint256,bytes32,bytes32,uint32,address,uint32,address,uint256,bytes)" fi local in_merkle_proof="$(jq -r -c '.proof.merkle_proof' $proof_file | tr -d '"')" @@ -264,36 +264,36 @@ function request_claim(){ local in_amount=$(jq -r '.amount' $deposit_file) local in_metadata=$(jq -r '.metadata' $deposit_file) if [[ $dry_run == "true" ]]; then - echo "... Not real cleaim (dry_run mode)" >&3 - cast calldata $claim_sig "$in_merkle_proof" "$in_rollup_merkle_proof" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata - else - local comp_gas_price=$(bc -l <<< "$gas_price * 1.5" | sed 's/\..*//') - if [[ $? -ne 0 ]]; then - echo "Failed to calculate gas price" >&3 - exit 1 - fi - echo "... Claiming deposit: global_index: $in_global_index orig_net: $in_orig_net dest_net: $in_dest_net amount:$in_amount" >&3 - echo "claim: mainnetExitRoot=$in_main_exit_root rollupExitRoot=$in_rollup_exit_root" - echo "cast send --legacy --gas-price $comp_gas_price --rpc-url $destination_rpc_url --private-key $sender_private_key $bridge_addr \"$claim_sig\" \"$in_merkle_proof\" \"$in_rollup_merkle_proof\" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata" - local tmp_response=$(mktemp) - cast send --legacy --gas-price $comp_gas_price \ - --rpc-url $destination_rpc_url \ - --private-key $sender_private_key \ - $bridge_addr "$claim_sig" "$in_merkle_proof" "$in_rollup_merkle_proof" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata 2> $tmp_response || check_claim_revert_code $tmp_response + echo "... Not real cleaim (dry_run mode)" >&3 + cast calldata $claim_sig "$in_merkle_proof" "$in_rollup_merkle_proof" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata + else + local comp_gas_price=$(bc -l <<<"$gas_price * 1.5" | sed 's/\..*//') + if [[ $? -ne 0 ]]; then + echo "Failed to calculate gas price" >&3 + exit 1 fi + echo "... Claiming deposit: global_index: $in_global_index orig_net: $in_orig_net dest_net: $in_dest_net amount:$in_amount" >&3 + echo "claim: mainnetExitRoot=$in_main_exit_root rollupExitRoot=$in_rollup_exit_root" + echo "cast send --legacy --gas-price $comp_gas_price --rpc-url $destination_rpc_url --private-key $sender_private_key $bridge_addr \"$claim_sig\" \"$in_merkle_proof\" \"$in_rollup_merkle_proof\" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata" + local tmp_response=$(mktemp) + cast send --legacy --gas-price $comp_gas_price \ + --rpc-url $destination_rpc_url \ + --private-key $sender_private_key \ + $bridge_addr "$claim_sig" "$in_merkle_proof" "$in_rollup_merkle_proof" $in_global_index $in_main_exit_root $in_rollup_exit_root $in_orig_net $in_orig_addr $in_dest_net $in_dest_addr $in_amount $in_metadata 2>$tmp_response || check_claim_revert_code $tmp_response + fi } -function check_claim_revert_code(){ +function check_claim_revert_code() { local file_curl_reponse="$1" # 0x646cf558 -> AlreadyClaimed() - echo "check revert " + echo "check revert " cat $file_curl_reponse - cat $file_curl_reponse | grep "0x646cf558" > /dev/null + cat $file_curl_reponse | grep "0x646cf558" >/dev/null if [ $? -eq 0 ]; then echo "....[$(date '+%Y-%m-%d %H:%M:%S')] 🎉 deposit is already claimed (revert code 0x646cf558)" >&3 return 0 fi - cat $file_curl_reponse | grep "0x002f6fad" > /dev/null + cat $file_curl_reponse | grep "0x002f6fad" >/dev/null if [ $? -eq 0 ]; then echo "....[$(date '+%Y-%m-%d %H:%M:%S')] 🎉 GlobalExitRootInvalid()(revert code 0x002f6fad)" >&3 return 2 @@ -304,10 +304,10 @@ function check_claim_revert_code(){ } function wait_for_claim() { - local timeout="$1" # timeout (in seconds) - local claim_frequency="$2" # claim frequency (in seconds) + local timeout="$1" # timeout (in seconds) + local claim_frequency="$2" # claim frequency (in seconds) local destination_rpc_url="$3" # destination rpc url - local bridge_type="$4" # bridgeAsset or bridgeMessage + local bridge_type="$4" # bridgeAsset or bridgeMessage local start_time=$(date +%s) local end_time=$((start_time + timeout)) diff --git a/test/combinations/fork11-rollup.yml b/test/combinations/fork11-rollup.yml deleted file mode 100644 index a1fd027e..00000000 --- a/test/combinations/fork11-rollup.yml +++ /dev/null @@ -1,9 +0,0 @@ -args: - zkevm_contracts_image: leovct/zkevm-contracts:v7.0.0-rc.2-fork.11-patch.1 - zkevm_prover_image: hermeznetwork/zkevm-prover:v7.0.2-fork.11 - cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.1.2 - zkevm_node_image: hermeznetwork/zkevm-node:v0.7.0-fork11-RC1 - cdk_node_image: aggkit:latest - gas_token_enabled: true - data_availability_mode: rollup - sequencer_type: erigon diff --git a/test/combinations/fork12-cdk-validium.yml b/test/combinations/fork12-cdk-validium.yml deleted file mode 100644 index 7982192e..00000000 --- a/test/combinations/fork12-cdk-validium.yml +++ /dev/null @@ -1,8 +0,0 @@ -args: - zkevm_contracts_image: leovct/zkevm-contracts:v8.0.0-rc.4-fork.12-patch.1 - zkevm_prover_image: hermeznetwork/zkevm-prover:v8.0.0-RC12-fork.12 - cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.1.2 - cdk_node_image: aggkit:latest - gas_token_enabled: true - data_availability_mode: cdk-validium - sequencer_type: erigon diff --git a/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml b/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml index 3bb16646..47aa6d78 100644 --- a/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml +++ b/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml @@ -26,7 +26,7 @@ args: zkevm_l2_proofsigner_private_key: "0xc7fe3a006d75ba9326d9792523385abb49057c66aee0b8b4248821a89713f975" - cdk_node_image: aggkit:latest + cdk_node_image: cdk:latest cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.60.0 zkevm_contracts_image: leovct/zkevm-contracts:v9.0.0-rc.3-pp-fork.12-patch.1 additional_services: [] diff --git a/test/combinations/fork12-pessimistic-multi.yml b/test/combinations/fork12-pessimistic-multi.yml index d8ffead6..46845991 100644 --- a/test/combinations/fork12-pessimistic-multi.yml +++ b/test/combinations/fork12-pessimistic-multi.yml @@ -1,5 +1,5 @@ args: - cdk_node_image: aggkit:latest + cdk_node_image: cdk:latest agglayer_image: ghcr.io/agglayer/agglayer:0.2.0-rc.20 cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.60.0 zkevm_contracts_image: leovct/zkevm-contracts:v9.0.0-rc.3-pp-fork.12-patch.1 @@ -11,5 +11,5 @@ args: zkevm_use_real_verifier: true enable_normalcy: true verifier_program_vkey: 0x00766aa16a6efe4ac05c0fe21d4b50f9631dbd1a2663a982da861427085ea2ea - agglayer_prover_sp1_key: {{.agglayer_prover_sp1_key}} + agglayer_prover_sp1_key: {{.AGGLAYER_PROVER_SP1_KEY}} diff --git a/test/combinations/fork12-pessimistic.yml b/test/combinations/fork12-pessimistic.yml index f4b17eb4..f4f229d3 100644 --- a/test/combinations/fork12-pessimistic.yml +++ b/test/combinations/fork12-pessimistic.yml @@ -1,7 +1,7 @@ args: agglayer_image: ghcr.io/agglayer/agglayer:0.2.0-rc.20 cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.60.0-beta8 - cdk_node_image: aggkit:latest + cdk_node_image: cdk zkevm_bridge_proxy_image: haproxy:3.0-bookworm zkevm_bridge_service_image: hermeznetwork/zkevm-bridge-service:v0.6.0-RC1 zkevm_bridge_ui_image: leovct/zkevm-bridge-ui:multi-network @@ -11,5 +11,5 @@ args: sequencer_type: erigon erigon_strict_mode: false gas_token_enabled: true - agglayer_prover_sp1_key: {{.agglayer_prover_sp1_key}} + agglayer_prover_sp1_key: {{.AGGLAYER_PROVER_SP1_KEY}} enable_normalcy: true diff --git a/test/combinations/fork12-rollup.yml b/test/combinations/fork12-rollup.yml deleted file mode 100644 index a16f0311..00000000 --- a/test/combinations/fork12-rollup.yml +++ /dev/null @@ -1,8 +0,0 @@ -args: - zkevm_contracts_image: leovct/zkevm-contracts:v8.0.0-rc.4-fork.12-patch.1 - zkevm_prover_image: hermeznetwork/zkevm-prover:v8.0.0-RC12-fork.12 - cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.1.2 - cdk_node_image: aggkit:latest - gas_token_enabled: true - data_availability_mode: rollup - sequencer_type: erigon diff --git a/test/combinations/fork9-cdk-validium.yml b/test/combinations/fork9-cdk-validium.yml deleted file mode 100644 index dda3d6ed..00000000 --- a/test/combinations/fork9-cdk-validium.yml +++ /dev/null @@ -1,12 +0,0 @@ -args: - zkevm_contracts_image: leovct/zkevm-contracts:v6.0.0-rc.1-fork.9-patch.1 - zkevm_prover_image: hermeznetwork/zkevm-prover:v6.0.6 - cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.1.2 - zkevm_node_image: hermeznetwork/zkevm-node:v0.7.3-RC1 - cdk_validium_node_image: 0xpolygon/cdk-validium-node:0.7.0-cdk - cdk_node_image: aggkit:latest - gas_token_enabled: true - additional_services: - - pless_zkevm_node - data_availability_mode: cdk-validium - sequencer_type: erigon diff --git a/test/config/kurtosis-cdk-node-config.toml.template b/test/config/kurtosis-cdk-node-config.toml.template index 45b938a2..7a2624c1 100644 --- a/test/config/kurtosis-cdk-node-config.toml.template +++ b/test/config/kurtosis-cdk-node-config.toml.template @@ -20,8 +20,6 @@ L2Coinbase = "{{.zkevm_l2_sequencer_address}}" SequencerPrivateKeyPath = "{{or .zkevm_l2_sequencer_keystore_file "/etc/cdk/sequencer.keystore"}}" SequencerPrivateKeyPassword = "{{.zkevm_l2_keystore_password}}" -AggregatorPrivateKeyPath = "{{or .zkevm_l2_aggregator_keystore_file "/etc/cdk/aggregator.keystore"}}" -AggregatorPrivateKeyPassword = "{{.zkevm_l2_keystore_password}}" SenderProofToL1Addr = "{{.zkevm_l2_agglayer_address}}" polygonBridgeAddr = "{{.zkevm_bridge_address}}" @@ -46,13 +44,6 @@ genesisBlockNumber = "{{.zkevm_rollup_manager_block_number}}" Environment = "development" # "production" or "development" Level = "{{.global_log_level}}" Outputs = ["stderr"] - -[Aggregator] - Port = "{{.zkevm_aggregator_port}}" - RetryTime = "30s" - VerifyProofInterval = "10s" - GasOffset = 150000 - SettlementBackend = "agglayer" [AggSender] SaveCertificatesToFilesPath = "{{.zkevm_path_rw_data}}/" diff --git a/test/config/test.config.toml b/test/config/test.config.toml index 7ed0e7b0..f21ae965 100644 --- a/test/config/test.config.toml +++ b/test/config/test.config.toml @@ -1,105 +1,3 @@ [Common] IsValidiumMode = false ContractVersions = "banana" - -[SequenceSender] -WaitPeriodSendSequence = "15s" -LastBatchVirtualizationTimeMaxWaitPeriod = "10s" -L1BlockTimestampMargin = "30s" -MaxTxSizeForL1 = 131072 -L2Coinbase = "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266" -PrivateKey = {Path = "./test/sequencer.keystore", Password = "testonly"} -SequencesTxFileName = "sequencesender.json" -GasOffset = 80000 -WaitPeriodPurgeTxFile = "60m" -MaxPendingTx = 1 -RPCURL = "http://127.0.0.1:8123" -GetBatchWaitInterval = "10s" - [SequenceSender.EthTxManager] - FrequencyToMonitorTxs = "1s" - WaitTxToBeMined = "2m" - GetReceiptMaxTime = "250ms" - GetReceiptWaitInterval = "1s" - PrivateKeys = [ - {Path = "./test/sequencer.keystore", Password = "testonly"}, - ] - ForcedGas = 0 - GasPriceMarginFactor = 1 - MaxGasPriceLimit = 0 - StoragePath = "ethtxmanager.db" - ReadPendingL1Txs = false - SafeStatusL1NumberOfBlocks = 5 - FinalizedStatusL1NumberOfBlocks = 10 - [SequenceSender.EthTxManager.Etherman] - URL = "http://127.0.0.1:8545" - MultiGasProvider = false - L1ChainID = 1337 - HTTPHeaders = [] - -[Aggregator] -Host = "0.0.0.0" -Port = 50081 -RetryTime = "5s" -VerifyProofInterval = "10s" -TxProfitabilityCheckerType = "acceptall" -TxProfitabilityMinReward = "1.1" -ProofStatePollingInterval = "5s" -SenderAddress = "0x3f2963d678442c4af27a797453b64ef6ce9443e9" -CleanupLockedProofsInterval = "2m" -GeneratingProofCleanupThreshold = "10m" -BatchProofSanityCheckEnabled = true -ForkId = 9 -GasOffset = 0 -RPCURL = "http://127.0.0.1:8123" -WitnessURL = "http://127.0.0.1:8123" -SettlementBackend = "l1" -AggLayerTxTimeout = "5m" -AggLayerURL = "" -SyncModeOnlyEnabled = false -UseFullWitness = false -SequencerPrivateKey = {} - [Aggregator.Log] - Environment = "development" # "production" or "development" - Level = "info" - Outputs = ["stderr"] - [Aggregator.EthTxManager] - FrequencyToMonitorTxs = "1s" - WaitTxToBeMined = "2m" - GetReceiptMaxTime = "250ms" - GetReceiptWaitInterval = "1s" - PrivateKeys = [ - {Path = "/pk/aggregator.keystore", Password = "testonly"}, - ] - ForcedGas = 0 - GasPriceMarginFactor = 1 - MaxGasPriceLimit = 0 - StoragePath = "" - ReadPendingL1Txs = false - SafeStatusL1NumberOfBlocks = 0 - FinalizedStatusL1NumberOfBlocks = 0 - [Aggregator.EthTxManager.Etherman] - URL = "" - L1ChainID = 11155111 - HTTPHeaders = [] - [Aggregator.Synchronizer] - [Aggregator.Synchronizer.DB] - Name = "sync_db" - User = "sync_user" - Password = "sync_password" - Host = "aggkit-l1-sync-db" - Port = "5432" - EnableLog = false - MaxConns = 10 - [Aggregator.Synchronizer.Synchronizer] - SyncInterval = "10s" - SyncChunkSize = 1000 - GenesisBlockNumber = 5511080 - SyncUpToBlock = "finalized" - BlockFinality = "finalized" - [Aggregator.Synchronizer.Etherman] - [Aggregator.Synchronizer.Etherman.Validium] - Enabled = false - TrustedSequencerURL = "" - DataSourcePriority = ["trusted", "external"] - [Aggregator.Synchronizer.Etherman.Validium.Translator] - FullMatchRules = [] diff --git a/test/docker-compose.yml b/test/docker-compose.yml index a0208440..8241d119 100644 --- a/test/docker-compose.yml +++ b/test/docker-compose.yml @@ -1,12 +1,12 @@ networks: default: - name: aggkit + name: cdk services: cdk-sequence-sender: container_name: cdk-sequence-sender restart: no - image: aggkit + image: cdk build: . volumes: - ./config/test.config.toml:/app/config.toml @@ -27,51 +27,8 @@ services: command: > zkProver -c /usr/src/app/config.json - cdk-aggregator: - container_name: cdk-aggregator - image: aggkit - ports: - - 50081:50081 - - 9093:9091 # needed if metrics enabled - environment: - - CDK_AGGREGATOR_DB_HOST=cdk-aggregator-db - - CDK_AGGREGATOR_SENDER_ADDRESS=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 - volumes: - - ./config/test.config.toml:/app/config.toml - - ./config/test.genesis.json:/app/genesis.json - - ./aggregator.keystore:/pk/aggregator.keystore - command: - - "/bin/sh" - - "-c" - - "/app/aggkit run --cfg /app/config.toml --network custom --custom-network-file /app/genesis.json --components aggregator" - depends_on: - cdk-aggregator-db: - condition: service_started - aggkit-l1-sync-db: - condition: service_started - - cdk-aggregator-db: - container_name: cdk-aggregator-db - image: postgres:15 - deploy: - resources: - limits: - memory: 2G - reservations: - memory: 1G - ports: - - 5434:5432 - environment: - - POSTGRES_USER=aggregator_user - - POSTGRES_PASSWORD=aggregator_password - - POSTGRES_DB=aggregator_db - command: - - "postgres" - - "-N" - - "500" - - aggkit-l1-sync-db: - container_name: aggkit-l1-sync-db + cdk-l1-sync-db: + container_name: cdk-l1-sync-db image: postgres:15 deploy: resources: diff --git a/test/helpers/e2e.go b/test/helpers/e2e.go index e9a52885..2c402784 100644 --- a/test/helpers/e2e.go +++ b/test/helpers/e2e.go @@ -126,10 +126,10 @@ func L1Setup(t *testing.T) *L1Environment { go l1InfoTreeSync.Start(ctx) const ( - waitForNewBlocksPeriod = 10 * time.Millisecond + waitForNewBlocksPeriod = time.Millisecond * 10 originNetwork = 1 initialBlock = 0 - retryPeriod = 50 * time.Millisecond + retryPeriod = time.Millisecond * 30 retriesCount = 10 ) @@ -192,7 +192,7 @@ func L2Setup(t *testing.T) *L2Environment { originNetwork = 1 initialBlock = 0 retryPeriod = 50 * time.Millisecond - retriesCount = 10 + retriesCount = 100 ) bridgeL2Sync, err := bridgesync.NewL2( diff --git a/test/run-e2e-multi_pp.sh b/test/run-e2e-multi_pp.sh index 8d8f4928..f6301356 100755 --- a/test/run-e2e-multi_pp.sh +++ b/test/run-e2e-multi_pp.sh @@ -17,15 +17,15 @@ function ok_or_fatal(){ } function build_docker_if_required(){ - docker images -q aggkit:latest > /dev/null + docker images -q cdk:latest > /dev/null if [ $? -ne 0 ] ; then - echo "Building aggkit:latest" + echo "Building cdk:latest" pushd $BASE_FOLDER/.. make build-docker ok_or_fatal "Failed to build docker image" popd else - echo "docker aggkit:latest already exists" + echo "docker cdk:latest already exists" fi } @@ -49,7 +49,7 @@ function resolve_template(){ BASE_FOLDER=$(dirname $0) PP1_ORIGIN_CONFIG_FILE=combinations/fork12-pessimistic-multi.yml PP2_ORIGIN_CONFIG_FILE=combinations/fork12-pessimistic-multi-attach-second-cdk.yml -KURTOSIS_ENCLAVE=aggkit +KURTOSIS_ENCLAVE=cdk [ -z $KURTOSIS_FOLDER ] && echo "KURTOSIS_FOLDER is not set" && exit 1 [ ! -d $KURTOSIS_FOLDER ] && echo "KURTOSIS_FOLDER is not a directory ($KURTOSIS_FOLDER)" && exit 1 diff --git a/test/run-e2e.sh b/test/run-e2e.sh index e1f4d117..300c5413 100755 --- a/test/run-e2e.sh +++ b/test/run-e2e.sh @@ -3,7 +3,7 @@ source $(dirname $0)/scripts/env.sh FORK=$1 if [ -z $FORK ]; then - echo "Missing FORK: ['fork9', 'fork12']" + echo "Missing FORK: [valid values: 'fork12']" exit 1 fi @@ -14,22 +14,22 @@ if [ -z $DATA_AVAILABILITY_MODE ]; then fi BASE_FOLDER=$(dirname $0) -docker images -q aggkit:latest > /dev/null +docker images -q cdk:latest > /dev/null if [ $? -ne 0 ] ; then - echo "Building aggkit:latest" + echo "Building cdk:latest" pushd $BASE_FOLDER/.. make build-docker popd else - echo "docker aggkit:latest already exists" + echo "docker cdk:latest already exists" fi kurtosis clean --all -echo "Override aggkit config file" +echo "Override cdk config file" cp $BASE_FOLDER/config/kurtosis-cdk-node-config.toml.template $KURTOSIS_FOLDER/templates/trusted-node/cdk-node-config.toml KURTOSIS_CONFIG_FILE="combinations/$FORK-$DATA_AVAILABILITY_MODE.yml" TEMP_CONFIG_FILE=$(mktemp --suffix ".yml") echo "rendering $KURTOSIS_CONFIG_FILE to temp file $TEMP_CONFIG_FILE" go run ../scripts/run_template.go $KURTOSIS_CONFIG_FILE > $TEMP_CONFIG_FILE -kurtosis run --enclave aggkit --args-file "$TEMP_CONFIG_FILE" --image-download always $KURTOSIS_FOLDER +kurtosis run --enclave cdk --args-file "$TEMP_CONFIG_FILE" --image-download always $KURTOSIS_FOLDER rm $TEMP_CONFIG_FILE \ No newline at end of file diff --git a/test/scripts/agglayer_certificates_monitor.sh b/test/scripts/agglayer_certificates_monitor.sh index 1206c8cb..c530548f 100755 --- a/test/scripts/agglayer_certificates_monitor.sh +++ b/test/scripts/agglayer_certificates_monitor.sh @@ -28,7 +28,7 @@ function check_timeout(){ } function check_num_certificates(){ - readonly agglayer_rpc_url="$(kurtosis port print aggkit agglayer agglayer)" + readonly agglayer_rpc_url="$(kurtosis port print cdk agglayer agglayer)" cast_output=$(cast rpc --rpc-url "$agglayer_rpc_url" "interop_getLatestKnownCertificateHeader" "$l2_rpc_network_id" 2>&1) diff --git a/test/scripts/batch_verification_monitor.sh b/test/scripts/batch_verification_monitor.sh index 4b3e9857..a0bfaefd 100755 --- a/test/scripts/batch_verification_monitor.sh +++ b/test/scripts/batch_verification_monitor.sh @@ -17,7 +17,7 @@ timeout="$2" start_time=$(date +%s) end_time=$((start_time + timeout)) -rpc_url="$(kurtosis port print aggkit cdk-erigon-rpc-001 rpc)" +rpc_url="$(kurtosis port print cdk cdk-erigon-rpc-001 rpc)" while true; do verified_batches="$(cast to-dec "$(cast rpc --rpc-url "$rpc_url" zkevm_verifiedBatchNumber | sed 's/"//g')")" diff --git a/test/scripts/env.sh b/test/scripts/env.sh index af9ff936..298d4f73 100644 --- a/test/scripts/env.sh +++ b/test/scripts/env.sh @@ -1,7 +1,7 @@ #!/bin/bash ### Common variables -KURTOSIS_ENCLAVE=aggkit -TMP_AGGKIT_FOLDER=tmp/aggkit -DEST_KURTOSIS_PARAMS_YML=../$TMP_AGGKIT_FOLDER/e2e-params.yml +KURTOSIS_ENCLAVE=cdk +TMP_CDK_FOLDER=tmp/cdk +DEST_KURTOSIS_PARAMS_YML=../$TMP_CDK_FOLDER/e2e-params.yml KURTOSIS_FOLDER=${KURTOSIS_FOLDER:=../kurtosis-cdk} USE_L1_GAS_TOKEN_CONTRACT=true From 49d6392bf8833335bb391c8ed5a458537b3a7883 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Negovanovi=C4=87?= <93934272+Stefan-Ethernal@users.noreply.github.com> Date: Mon, 20 Jan 2025 10:16:11 +0100 Subject: [PATCH 2/3] chore: merge `develop` into `main` branch (#133) Signed-off-by: Arpit Temani Signed-off-by: dependabot[bot] Co-authored-by: Goran Rojovic <100121253+goran-ethernal@users.noreply.github.com> Co-authored-by: Arpit Temani Co-authored-by: Goran Rojovic Co-authored-by: Rachit Sonthalia <54906134+rachit77@users.noreply.github.com> Co-authored-by: Arnau Bennassar Co-authored-by: Joan Esteban <129153821+joanestebanr@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Victor Castell <0x@vcastellm.xyz> --- .github/dependabot.yml | 7 + .github/workflows/test-e2e.yml | 20 +- .github/workflows/test-unit.yml | 14 +- .mockery.yaml | 92 ++++++ Cargo.lock | 52 ++-- README.md | 21 +- aggoracle/mocks/mock_l2germanager.go | 2 +- ..._storage.go => mock_agg_sender_storage.go} | 0 ...terface.go => mock_aggsender_interface.go} | 4 +- ...der_storer.go => mock_aggsender_storer.go} | 4 +- ...ock_notifier.go => mock_block_notifier.go} | 0 ...och_notifier.go => mock_epoch_notifier.go} | 0 .../{eth_client.go => mock_eth_client.go} | 0 ...bscriber.go => mock_generic_subscriber.go} | 10 +- ..._syncer.go => mock_l1_info_tree_syncer.go} | 0 ...dge_syncer.go => mock_l2_bridge_syncer.go} | 0 aggsender/mocks/{logger.go => mock_logger.go} | 0 aggsender/rpc/aggsender_rpc.go | 12 +- .../{eth_clienter.go => mock_eth_clienter.go} | 2 +- ...org_detector.go => mock_reorg_detector.go} | 2 +- dataavailability/mocks_da/da_backender.go | 263 ------------------ docs/assets/gitflow.png | Bin 0 -> 101351 bytes docs/release_lifecycle.md | 63 +++++ go.mod | 10 +- go.sum | 20 +- .../{eth_clienter.go => mock_eth_clienter.go} | 2 +- ...eorgdetector.go => mock_reorg_detector.go} | 2 +- reorgdetector/reorgdetector.go | 9 +- reorgdetector/reorgdetector_db.go | 2 +- reorgdetector/reorgdetector_test.go | 99 +++++++ ...ace.go => mock_bridge_client_interface.go} | 0 rpc/mocks/{bridger.go => mock_bridger.go} | 0 ...m_sponsorer.go => mock_claim_sponsorer.go} | 0 ...ce.go => mock_client_factory_interface.go} | 0 ..._interface.go => mock_client_interface.go} | 0 ...l1_info_treer.go => mock_l1_info_treer.go} | 4 +- .../{last_ge_rer.go => mock_last_ge_rer.go} | 0 scripts/local_config | 4 +- sync/driver.go | 19 -- sync/evmdriver.go | 7 + .../{mock_l2_test.go => mock_eth_clienter.go} | 0 ...er_test.go => mock_evm_downloader_full.go} | 0 ...or_test.go => mock_processor_interface.go} | 0 ...etector_test.go => mock_reorg_detector.go} | 0 test/Makefile | 50 +--- ...12-pessimistic-multi-attach-second-cdk.yml | 3 +- .../combinations/fork12-pessimistic-multi.yml | 3 +- test/combinations/fork12-pessimistic.yml | 2 +- test/scripts/env.sh | 4 +- 49 files changed, 361 insertions(+), 447 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .mockery.yaml rename aggsender/mocks/{agg_sender_storage.go => mock_agg_sender_storage.go} (100%) rename aggsender/mocks/{aggsender_interface.go => mock_aggsender_interface.go} (94%) rename aggsender/mocks/{aggsender_storer.go => mock_aggsender_storer.go} (97%) rename aggsender/mocks/{block_notifier.go => mock_block_notifier.go} (100%) rename aggsender/mocks/{epoch_notifier.go => mock_epoch_notifier.go} (100%) rename aggsender/mocks/{eth_client.go => mock_eth_client.go} (100%) rename aggsender/mocks/{generic_subscriber.go => mock_generic_subscriber.go} (91%) rename aggsender/mocks/{l1_info_tree_syncer.go => mock_l1_info_tree_syncer.go} (100%) rename aggsender/mocks/{l2_bridge_syncer.go => mock_l2_bridge_syncer.go} (100%) rename aggsender/mocks/{logger.go => mock_logger.go} (100%) rename bridgesync/mocks/{eth_clienter.go => mock_eth_clienter.go} (99%) rename bridgesync/mocks/{reorg_detector.go => mock_reorg_detector.go} (99%) delete mode 100644 dataavailability/mocks_da/da_backender.go create mode 100644 docs/assets/gitflow.png create mode 100644 docs/release_lifecycle.md rename l1infotreesync/mocks/{eth_clienter.go => mock_eth_clienter.go} (99%) rename l1infotreesync/mocks/{mock_reorgdetector.go => mock_reorg_detector.go} (99%) rename rpc/mocks/{bridge_client_interface.go => mock_bridge_client_interface.go} (100%) rename rpc/mocks/{bridger.go => mock_bridger.go} (100%) rename rpc/mocks/{claim_sponsorer.go => mock_claim_sponsorer.go} (100%) rename rpc/mocks/{client_factory_interface.go => mock_client_factory_interface.go} (100%) rename rpc/mocks/{client_interface.go => mock_client_interface.go} (100%) rename rpc/mocks/{l1_info_treer.go => mock_l1_info_treer.go} (99%) rename rpc/mocks/{last_ge_rer.go => mock_last_ge_rer.go} (100%) delete mode 100644 sync/driver.go rename sync/{mock_l2_test.go => mock_eth_clienter.go} (100%) rename sync/{mock_downloader_test.go => mock_evm_downloader_full.go} (100%) rename sync/{mock_processor_test.go => mock_processor_interface.go} (100%) rename sync/{mock_reorgdetector_test.go => mock_reorg_detector.go} (100%) diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..ef4a8cc5 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +version: 2 +updates: + - package-ecosystem: gomod + directory: / + schedule: + interval: weekly + target-branch: develop diff --git a/.github/workflows/test-e2e.yml b/.github/workflows/test-e2e.yml index 8df39db4..6522d256 100644 --- a/.github/workflows/test-e2e.yml +++ b/.github/workflows/test-e2e.yml @@ -55,16 +55,6 @@ jobs: - name: Install Kurtosis CDK tools uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk - - name: Install polycli - run: | - POLYCLI_VERSION="${{ vars.POLYCLI_VERSION }}" - tmp_dir=$(mktemp -d) - curl -L "https://github.com/0xPolygon/polygon-cli/releases/download/${POLYCLI_VERSION}/polycli_${POLYCLI_VERSION}_linux_amd64.tar.gz" | tar -xz -C "$tmp_dir" - mv "$tmp_dir"/* /usr/local/bin/polycli - rm -rf "$tmp_dir" - sudo chmod +x /usr/local/bin/polycli - /usr/local/bin/polycli version - - name: Setup Bats and bats libs uses: bats-core/bats-action@2.0.0 @@ -138,18 +128,10 @@ jobs: with: repository: 0xPolygon/kurtosis-cdk path: kurtosis-cdk - ref: jhilliard/multi-pp-testing + ref: main - name: Install Kurtosis CDK tools uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk - - - name: Install polycli - run: | - git clone https://github.com/0xPolygon/polygon-cli -b jhilliard/alonso - cd polygon-cli - make install - cp ~/go/bin/polycli /usr/local/bin/polycli - /usr/local/bin/polycli version - name: Setup Bats and bats libs uses: bats-core/bats-action@2.0.0 diff --git a/.github/workflows/test-unit.yml b/.github/workflows/test-unit.yml index 7db18445..6df8d1ba 100644 --- a/.github/workflows/test-unit.yml +++ b/.github/workflows/test-unit.yml @@ -32,11 +32,9 @@ jobs: - name: Test run: make test-unit - - # TODO: Uncomment the following lines to enable SonarCloud analysis, once the project is set up in SonarCloud - # - # - name: Analyze with SonarCloud - # uses: sonarsource/sonarcloud-github-action@master - # env: - # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} \ No newline at end of file + + - name: Analyze with SonarCloud + uses: sonarsource/sonarcloud-github-action@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} diff --git a/.mockery.yaml b/.mockery.yaml new file mode 100644 index 00000000..13b7e49b --- /dev/null +++ b/.mockery.yaml @@ -0,0 +1,92 @@ +issue-845-fix: true +with-expecter: true +resolve-type-alias: false +dir: "{{ .InterfaceDir }}/../mocks" +outpkg: "mocks" +filename: "mock_{{ .InterfaceName | snakecase | lower }}.go" +mockname: "{{ .InterfaceName }}" +packages: + github.com/agglayer/aggkit/agglayer: + config: + inpackage: true + dir: "{{ .InterfaceDir }}" + outpkg: "{{ .PackageName }}" + interfaces: + AgglayerClientInterface: + config: + mockname: AgglayerClientMock + filename: mock_agglayer_client.go + github.com/agglayer/aggkit/aggoracle/chaingersender: + config: + interfaces: + EthTxManager: + configs: + - mockname: EthTxManagerMock + filename: mock_ethtxmanager.go + - mockname: EthTxManagerMock + filename: mock_ethtxmanager.go + dir: "{{ .InterfaceDir }}/../../test/helpers" + outpkg: "helpers" + L2GERManagerContract: + config: + mockname: L2GERManagerMock + filename: mock_l2germanager.go + github.com/agglayer/aggkit/aggsender/db: + config: + all: true + github.com/agglayer/aggkit/aggsender/rpc: + config: + all: true + github.com/agglayer/aggkit/aggsender/types: + config: + all: true + github.com/agglayer/aggkit/bridgesync: + config: + dir: "{{ .InterfaceDir }}/mocks" + interfaces: + ReorgDetector: + EthClienter: + github.com/agglayer/aggkit/l1infotreesync: + config: + dir: "{{ .InterfaceDir }}/mocks" + interfaces: + EthClienter: + github.com/agglayer/aggkit/reorgdetector: + config: + dir: "{{ .InterfaceDir }}" + outpkg: "{{ .PackageName }}" + mockname: "{{ .InterfaceName }}Mock" + interfaces: + EthClient: + github.com/agglayer/aggkit/rpc/client: + config: + all: true + github.com/agglayer/aggkit/rpc: + config: + dir: "{{ .InterfaceDir }}/mocks" + all: true + github.com/agglayer/aggkit/sync: + config: + dir: "{{ .InterfaceDir }}" + outpkg: "{{ .PackageName }}" + mockname: "{{ .InterfaceName }}Mock" + inpackage: true + interfaces: + ReorgDetector: + configs: + - dir: "{{ .InterfaceDir }}/../l1infotreesync/mocks" + outpkg: "mocks" + mockname: "{{ .InterfaceName }}Mock" + inpackage: false + - dir: "{{ .InterfaceDir }}" + outpkg: "{{ .PackageName }}" + mockname: "{{ .InterfaceName }}Mock" + processorInterface: + config: + mockname: "ProcessorMock" + evmDownloaderFull: + config: + mockname: "EVMDownloaderMock" + EthClienter: + config: + mockname: "L2Mock" diff --git a/Cargo.lock b/Cargo.lock index 81547bae..65b948cf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "Inflector" @@ -38,31 +38,6 @@ dependencies = [ "cpufeatures", ] -[[package]] -name = "aggkit" -version = "0.1.0" -dependencies = [ - "aggkit-config", - "alloy-json-rpc", - "alloy-rpc-client", - "alloy-transport-http", - "anyhow", - "clap", - "colored", - "dotenvy", - "execute", - "regex", - "reqwest 0.12.8", - "serde", - "serde_json", - "tempfile", - "tokio", - "toml", - "tracing", - "tracing-subscriber", - "url", -] - [[package]] name = "aggkit-config" version = "0.1.0" @@ -722,6 +697,31 @@ dependencies = [ "once_cell", ] +[[package]] +name = "cdk" +version = "0.1.0" +dependencies = [ + "aggkit-config", + "alloy-json-rpc", + "alloy-rpc-client", + "alloy-transport-http", + "anyhow", + "clap", + "colored", + "dotenvy", + "execute", + "regex", + "reqwest 0.12.8", + "serde", + "serde_json", + "tempfile", + "tokio", + "toml", + "tracing", + "tracing-subscriber", + "url", +] + [[package]] name = "cesu8" version = "1.1.0" diff --git a/README.md b/README.md index 76e396dc..a93bd6f1 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,9 @@ Logo Logo -## Polygon AggKit +## AggKit -**Polygon AggKit** is a modular framework that developers can use to build and deploy Pessimistic Proofs enabled chains (TBD). +**AggKit** is a modular framework that developers can use to connect networks to the AggLayer @@ -59,18 +59,13 @@ Feel free to [open an issue](https://github.com/agglayer/aggkit/issues/new) if y ## License -Polygon AggKit Copyright (c) 2024 PT Services DMCC -This program is free software: you can redistribute it and/or modify -it under the terms of the GNU Affero General Public License as published -by the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. +Licensed under either of -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Affero General Public License for more details. +* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) +* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) -You should have received a copy of the GNU Affero General Public License -along with this program. If not, see . +at your option. + +The SPDX license identifier for this project is `MIT OR Apache-2.0`. diff --git a/aggoracle/mocks/mock_l2germanager.go b/aggoracle/mocks/mock_l2germanager.go index a7ec0296..8c1dd89c 100644 --- a/aggoracle/mocks/mock_l2germanager.go +++ b/aggoracle/mocks/mock_l2germanager.go @@ -10,7 +10,7 @@ import ( mock "github.com/stretchr/testify/mock" ) -// L2GERManagerMock is an autogenerated mock type for the L2GERManager type +// L2GERManagerMock is an autogenerated mock type for the L2GERManagerContract type type L2GERManagerMock struct { mock.Mock } diff --git a/aggsender/mocks/agg_sender_storage.go b/aggsender/mocks/mock_agg_sender_storage.go similarity index 100% rename from aggsender/mocks/agg_sender_storage.go rename to aggsender/mocks/mock_agg_sender_storage.go diff --git a/aggsender/mocks/aggsender_interface.go b/aggsender/mocks/mock_aggsender_interface.go similarity index 94% rename from aggsender/mocks/aggsender_interface.go rename to aggsender/mocks/mock_aggsender_interface.go index bf7b6876..df1d129d 100644 --- a/aggsender/mocks/aggsender_interface.go +++ b/aggsender/mocks/mock_aggsender_interface.go @@ -7,7 +7,7 @@ import ( mock "github.com/stretchr/testify/mock" ) -// AggsenderInterface is an autogenerated mock type for the aggsenderInterface type +// AggsenderInterface is an autogenerated mock type for the AggsenderInterface type type AggsenderInterface struct { mock.Mock } @@ -20,7 +20,7 @@ func (_m *AggsenderInterface) EXPECT() *AggsenderInterface_Expecter { return &AggsenderInterface_Expecter{mock: &_m.Mock} } -// Info provides a mock function with given fields: +// Info provides a mock function with no fields func (_m *AggsenderInterface) Info() types.AggsenderInfo { ret := _m.Called() diff --git a/aggsender/mocks/aggsender_storer.go b/aggsender/mocks/mock_aggsender_storer.go similarity index 97% rename from aggsender/mocks/aggsender_storer.go rename to aggsender/mocks/mock_aggsender_storer.go index cc2aa309..3f832a69 100644 --- a/aggsender/mocks/aggsender_storer.go +++ b/aggsender/mocks/mock_aggsender_storer.go @@ -7,7 +7,7 @@ import ( mock "github.com/stretchr/testify/mock" ) -// AggsenderStorer is an autogenerated mock type for the aggsenderStorer type +// AggsenderStorer is an autogenerated mock type for the AggsenderStorer type type AggsenderStorer struct { mock.Mock } @@ -78,7 +78,7 @@ func (_c *AggsenderStorer_GetCertificateByHeight_Call) RunAndReturn(run func(uin return _c } -// GetLastSentCertificate provides a mock function with given fields: +// GetLastSentCertificate provides a mock function with no fields func (_m *AggsenderStorer) GetLastSentCertificate() (*types.CertificateInfo, error) { ret := _m.Called() diff --git a/aggsender/mocks/block_notifier.go b/aggsender/mocks/mock_block_notifier.go similarity index 100% rename from aggsender/mocks/block_notifier.go rename to aggsender/mocks/mock_block_notifier.go diff --git a/aggsender/mocks/epoch_notifier.go b/aggsender/mocks/mock_epoch_notifier.go similarity index 100% rename from aggsender/mocks/epoch_notifier.go rename to aggsender/mocks/mock_epoch_notifier.go diff --git a/aggsender/mocks/eth_client.go b/aggsender/mocks/mock_eth_client.go similarity index 100% rename from aggsender/mocks/eth_client.go rename to aggsender/mocks/mock_eth_client.go diff --git a/aggsender/mocks/generic_subscriber.go b/aggsender/mocks/mock_generic_subscriber.go similarity index 91% rename from aggsender/mocks/generic_subscriber.go rename to aggsender/mocks/mock_generic_subscriber.go index b4bee4b4..a713d033 100644 --- a/aggsender/mocks/generic_subscriber.go +++ b/aggsender/mocks/mock_generic_subscriber.go @@ -5,11 +5,11 @@ package mocks import mock "github.com/stretchr/testify/mock" // GenericSubscriber is an autogenerated mock type for the GenericSubscriber type -type GenericSubscriber[T interface{}] struct { +type GenericSubscriber[T any] struct { mock.Mock } -type GenericSubscriber_Expecter[T interface{}] struct { +type GenericSubscriber_Expecter[T any] struct { mock *mock.Mock } @@ -23,7 +23,7 @@ func (_m *GenericSubscriber[T]) Publish(data T) { } // GenericSubscriber_Publish_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Publish' -type GenericSubscriber_Publish_Call[T interface{}] struct { +type GenericSubscriber_Publish_Call[T any] struct { *mock.Call } @@ -71,7 +71,7 @@ func (_m *GenericSubscriber[T]) Subscribe(subscriberName string) <-chan T { } // GenericSubscriber_Subscribe_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Subscribe' -type GenericSubscriber_Subscribe_Call[T interface{}] struct { +type GenericSubscriber_Subscribe_Call[T any] struct { *mock.Call } @@ -100,7 +100,7 @@ func (_c *GenericSubscriber_Subscribe_Call[T]) RunAndReturn(run func(string) <-c // NewGenericSubscriber creates a new instance of GenericSubscriber. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. -func NewGenericSubscriber[T interface{}](t interface { +func NewGenericSubscriber[T any](t interface { mock.TestingT Cleanup(func()) }) *GenericSubscriber[T] { diff --git a/aggsender/mocks/l1_info_tree_syncer.go b/aggsender/mocks/mock_l1_info_tree_syncer.go similarity index 100% rename from aggsender/mocks/l1_info_tree_syncer.go rename to aggsender/mocks/mock_l1_info_tree_syncer.go diff --git a/aggsender/mocks/l2_bridge_syncer.go b/aggsender/mocks/mock_l2_bridge_syncer.go similarity index 100% rename from aggsender/mocks/l2_bridge_syncer.go rename to aggsender/mocks/mock_l2_bridge_syncer.go diff --git a/aggsender/mocks/logger.go b/aggsender/mocks/mock_logger.go similarity index 100% rename from aggsender/mocks/logger.go rename to aggsender/mocks/mock_logger.go diff --git a/aggsender/rpc/aggsender_rpc.go b/aggsender/rpc/aggsender_rpc.go index 8b08a509..a9d1950a 100644 --- a/aggsender/rpc/aggsender_rpc.go +++ b/aggsender/rpc/aggsender_rpc.go @@ -12,26 +12,26 @@ const ( base10 = 10 ) -type aggsenderStorer interface { +type AggsenderStorer interface { GetCertificateByHeight(height uint64) (*types.CertificateInfo, error) GetLastSentCertificate() (*types.CertificateInfo, error) } -type aggsenderInterface interface { +type AggsenderInterface interface { Info() types.AggsenderInfo } // AggsenderRPC is the RPC interface for the aggsender type AggsenderRPC struct { logger *log.Logger - storage aggsenderStorer - aggsender aggsenderInterface + storage AggsenderStorer + aggsender AggsenderInterface } func NewAggsenderRPC( logger *log.Logger, - storage aggsenderStorer, - aggsender aggsenderInterface, + storage AggsenderStorer, + aggsender AggsenderInterface, ) *AggsenderRPC { return &AggsenderRPC{ logger: logger, diff --git a/bridgesync/mocks/eth_clienter.go b/bridgesync/mocks/mock_eth_clienter.go similarity index 99% rename from bridgesync/mocks/eth_clienter.go rename to bridgesync/mocks/mock_eth_clienter.go index 3d208e45..673c1e4d 100644 --- a/bridgesync/mocks/eth_clienter.go +++ b/bridgesync/mocks/mock_eth_clienter.go @@ -1,6 +1,6 @@ // Code generated by mockery. DO NOT EDIT. -package mocks_bridgesync +package mocks import ( big "math/big" diff --git a/bridgesync/mocks/reorg_detector.go b/bridgesync/mocks/mock_reorg_detector.go similarity index 99% rename from bridgesync/mocks/reorg_detector.go rename to bridgesync/mocks/mock_reorg_detector.go index d9838db4..8804f75a 100644 --- a/bridgesync/mocks/reorg_detector.go +++ b/bridgesync/mocks/mock_reorg_detector.go @@ -1,6 +1,6 @@ // Code generated by mockery. DO NOT EDIT. -package mocks_bridgesync +package mocks import ( context "context" diff --git a/dataavailability/mocks_da/da_backender.go b/dataavailability/mocks_da/da_backender.go deleted file mode 100644 index 73c732c4..00000000 --- a/dataavailability/mocks_da/da_backender.go +++ /dev/null @@ -1,263 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks_da - -import ( - context "context" - - common "github.com/ethereum/go-ethereum/common" - - etherman "github.com/agglayer/aggkit/etherman" - - mock "github.com/stretchr/testify/mock" -) - -// DABackender is an autogenerated mock type for the DABackender type -type DABackender struct { - mock.Mock -} - -type DABackender_Expecter struct { - mock *mock.Mock -} - -func (_m *DABackender) EXPECT() *DABackender_Expecter { - return &DABackender_Expecter{mock: &_m.Mock} -} - -// GetSequence provides a mock function with given fields: ctx, batchHashes, dataAvailabilityMessage -func (_m *DABackender) GetSequence(ctx context.Context, batchHashes []common.Hash, dataAvailabilityMessage []byte) ([][]byte, error) { - ret := _m.Called(ctx, batchHashes, dataAvailabilityMessage) - - if len(ret) == 0 { - panic("no return value specified for GetSequence") - } - - var r0 [][]byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []common.Hash, []byte) ([][]byte, error)); ok { - return rf(ctx, batchHashes, dataAvailabilityMessage) - } - if rf, ok := ret.Get(0).(func(context.Context, []common.Hash, []byte) [][]byte); ok { - r0 = rf(ctx, batchHashes, dataAvailabilityMessage) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([][]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []common.Hash, []byte) error); ok { - r1 = rf(ctx, batchHashes, dataAvailabilityMessage) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// DABackender_GetSequence_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSequence' -type DABackender_GetSequence_Call struct { - *mock.Call -} - -// GetSequence is a helper method to define mock.On call -// - ctx context.Context -// - batchHashes []common.Hash -// - dataAvailabilityMessage []byte -func (_e *DABackender_Expecter) GetSequence(ctx interface{}, batchHashes interface{}, dataAvailabilityMessage interface{}) *DABackender_GetSequence_Call { - return &DABackender_GetSequence_Call{Call: _e.mock.On("GetSequence", ctx, batchHashes, dataAvailabilityMessage)} -} - -func (_c *DABackender_GetSequence_Call) Run(run func(ctx context.Context, batchHashes []common.Hash, dataAvailabilityMessage []byte)) *DABackender_GetSequence_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]common.Hash), args[2].([]byte)) - }) - return _c -} - -func (_c *DABackender_GetSequence_Call) Return(_a0 [][]byte, _a1 error) *DABackender_GetSequence_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *DABackender_GetSequence_Call) RunAndReturn(run func(context.Context, []common.Hash, []byte) ([][]byte, error)) *DABackender_GetSequence_Call { - _c.Call.Return(run) - return _c -} - -// Init provides a mock function with given fields: -func (_m *DABackender) Init() error { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Init") - } - - var r0 error - if rf, ok := ret.Get(0).(func() error); ok { - r0 = rf() - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// DABackender_Init_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Init' -type DABackender_Init_Call struct { - *mock.Call -} - -// Init is a helper method to define mock.On call -func (_e *DABackender_Expecter) Init() *DABackender_Init_Call { - return &DABackender_Init_Call{Call: _e.mock.On("Init")} -} - -func (_c *DABackender_Init_Call) Run(run func()) *DABackender_Init_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *DABackender_Init_Call) Return(_a0 error) *DABackender_Init_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *DABackender_Init_Call) RunAndReturn(run func() error) *DABackender_Init_Call { - _c.Call.Return(run) - return _c -} - -// PostSequenceBanana provides a mock function with given fields: ctx, sequence -func (_m *DABackender) PostSequenceBanana(ctx context.Context, sequence etherman.SequenceBanana) ([]byte, error) { - ret := _m.Called(ctx, sequence) - - if len(ret) == 0 { - panic("no return value specified for PostSequenceBanana") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, etherman.SequenceBanana) ([]byte, error)); ok { - return rf(ctx, sequence) - } - if rf, ok := ret.Get(0).(func(context.Context, etherman.SequenceBanana) []byte); ok { - r0 = rf(ctx, sequence) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, etherman.SequenceBanana) error); ok { - r1 = rf(ctx, sequence) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// DABackender_PostSequenceBanana_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PostSequenceBanana' -type DABackender_PostSequenceBanana_Call struct { - *mock.Call -} - -// PostSequenceBanana is a helper method to define mock.On call -// - ctx context.Context -// - sequence etherman.SequenceBanana -func (_e *DABackender_Expecter) PostSequenceBanana(ctx interface{}, sequence interface{}) *DABackender_PostSequenceBanana_Call { - return &DABackender_PostSequenceBanana_Call{Call: _e.mock.On("PostSequenceBanana", ctx, sequence)} -} - -func (_c *DABackender_PostSequenceBanana_Call) Run(run func(ctx context.Context, sequence etherman.SequenceBanana)) *DABackender_PostSequenceBanana_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(etherman.SequenceBanana)) - }) - return _c -} - -func (_c *DABackender_PostSequenceBanana_Call) Return(_a0 []byte, _a1 error) *DABackender_PostSequenceBanana_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *DABackender_PostSequenceBanana_Call) RunAndReturn(run func(context.Context, etherman.SequenceBanana) ([]byte, error)) *DABackender_PostSequenceBanana_Call { - _c.Call.Return(run) - return _c -} - -// PostSequenceElderberry provides a mock function with given fields: ctx, batchesData -func (_m *DABackender) PostSequenceElderberry(ctx context.Context, batchesData [][]byte) ([]byte, error) { - ret := _m.Called(ctx, batchesData) - - if len(ret) == 0 { - panic("no return value specified for PostSequenceElderberry") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, [][]byte) ([]byte, error)); ok { - return rf(ctx, batchesData) - } - if rf, ok := ret.Get(0).(func(context.Context, [][]byte) []byte); ok { - r0 = rf(ctx, batchesData) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, [][]byte) error); ok { - r1 = rf(ctx, batchesData) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// DABackender_PostSequenceElderberry_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PostSequenceElderberry' -type DABackender_PostSequenceElderberry_Call struct { - *mock.Call -} - -// PostSequenceElderberry is a helper method to define mock.On call -// - ctx context.Context -// - batchesData [][]byte -func (_e *DABackender_Expecter) PostSequenceElderberry(ctx interface{}, batchesData interface{}) *DABackender_PostSequenceElderberry_Call { - return &DABackender_PostSequenceElderberry_Call{Call: _e.mock.On("PostSequenceElderberry", ctx, batchesData)} -} - -func (_c *DABackender_PostSequenceElderberry_Call) Run(run func(ctx context.Context, batchesData [][]byte)) *DABackender_PostSequenceElderberry_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([][]byte)) - }) - return _c -} - -func (_c *DABackender_PostSequenceElderberry_Call) Return(_a0 []byte, _a1 error) *DABackender_PostSequenceElderberry_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *DABackender_PostSequenceElderberry_Call) RunAndReturn(run func(context.Context, [][]byte) ([]byte, error)) *DABackender_PostSequenceElderberry_Call { - _c.Call.Return(run) - return _c -} - -// NewDABackender creates a new instance of DABackender. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewDABackender(t interface { - mock.TestingT - Cleanup(func()) -}) *DABackender { - mock := &DABackender{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/docs/assets/gitflow.png b/docs/assets/gitflow.png new file mode 100644 index 0000000000000000000000000000000000000000..52bcf7128d72a2ba65514335ef4fc5693ef68301 GIT binary patch literal 101351 zcmeFZcU)6h`Zx?RC{3j(MT$WKsC1OxqKG8Y6p-E&l-{J5P)0#enxLQ{MM03>dmTg& z3m_0$ga{}I5fCAiQ1YG|XLo0JoqfN*|Ht_-T+Th^IZr!JJ2(20uI7HmBaAdOH2X2= z^ZGP2d(kvBd!p#~f|hg>tQh!3>!q)Gj;5rCcMklKeBA_dLq~_^6nLhmfzuwLp@X`B zA5~hOe?DKJJwdaF+76?kx#LI!|D%sC_znF_06)-}pTFUWku>|jUu@v#b{6d)Bfj(2wS1Kl zcd*v5qoIkniH^3St-GuERXg`<*Twx^J)n7Ll>HUKqw96=s|bHr7dJ0Of0bj@9*W=@ zYL+;LpmybCQ3|bN;%Q?M+7yZ%21G z1T^o}YwkYYD#wmN3;pxoPdKmpJN|1WH?Kdo1vV%Fy^)X-mz4PD++e6O)T(&N(f_)O z>3K(2fHSa$s)FPhWorNbdh@Rp|1{F%Un8YtCFTD-^iQw;bLbVX>o?WiUBQ~(s{f+b zAA|q=@{fVa642KFgo>YdrnUk^t1>D}{F7*^jN42XAYS5iJg;X6eglq!{?T@T|DE{x z8+wkbbixI((9oc0Fz3%2`qO?JXV?}sJ}XQ$!H$$fAtcwg z(~`_<8wwg5z<={k+_9=p8w+*Q6=#EVeGEvAJ^~G_4-ISFn zi`N0(Tf{W4r)pXrHC;BX;W58df-l)~(9y3Tf;bC4^;kR<4d00KfU2cz_ z-7C)0>cd}}E(WLnd>n!3wihMcZo2|=yKSj4*$soi66wkgy*ER2dHvq{s5kIUNwJs0 z1ihDdk!ypUWx1CCaS~Tg2YJ!DP$3 z&sC@|1t3rYLt^1d2uo|n~n4en@qm zK`(W__3#%43(p?&_J^FI4h7Q+R@=4dOa~Re%o*&|K2r|&v)WFX?C-Uf$1NxJL^xA^ zsN|8|H3DG9PgZ&-(^7(+3Z|bwVKP=*7Me_5Z`v`W6ra$`%JF*I^Z{oJy92cN9Z}`u z39%@=&SYl58&z72Y<5$y=I(N}{T;5v!4zJ1MzCD(=F>C=^gMf3+pxsWYtD1bn0YNg z3owSLN{JvX23cmc9d-}!s+)Qur$=gfz;m^HI%sK!^A8Y1zDm4oJP?T_JG@ z2Cg@YU;0Gcw3kIG7^2(mRO!e4w9#R9Uoa0abTBqrjF3UV z7dLt|94VsrO#2i|f}M0i!&#fSurRv=3Gd;ZClCnk=@+gg??K=0nH%`BcqEvRIVjSt z#OHrEaj$JS@P)Sk++#5XciO_h17JH^Sq%UO_ou3l7n9&fSYoku6D_~t|86qB>URmo z!^x81^5i2eLyqA}#^21~r;Wz^rg2Zu8O)FR)mvp7N6exMxo9x9}du|%Py*H|swx99*~Nj+uDgW_NxG4?ejwa+0^ z1=^4JJ3+v7o;DxG>qCn*pLS8MW&#aHW*2P^1ALqcToaVM1RhZPZXn)6i|&;^Tsy%G zbtWcxAojET&!+C7JE%SMF6RM-fR+>}GX*otAIu?<(m)*cf!Ju@tzjO5)kkEye}@ti9j@R?I|g2 zhGuS<85)^~aMUSlKa8`2&}Ql7>_TD$4cErBY!m=OZ@R4=lXL(NS$FJ-vFxC8jVi;w zej2DV!oZXG9@^JmH+7@-o;yunWD00>!v@Db5(z||XMS2C7a~Z;Xv0J@l_2R?r}H5x z;_h{^bl3V7OC^%X1?g>!}76fZxQeEd-?-t`$IZ{L`k zR#!L|zXfiWtChJNzBQ(Ysz?t*PQ+x^@_P2`yu|4>bvIqBSw|ueqq?Xmi?5Ebb(2`Y z71~!=xyk@0+HPe#!jliPB#4R_cZR?cukJ-I-`JtE-Zt~^cptAS8up_1&FNmsdioXU zEXK(dlYErjnANA)@EcJ}YC?v4A}Imu=0&6p;)|xSU8kL`wFRa|zon9rBReb2LFQv|ToWdv(O9M)SAV0(dbS=KSsl#?%p4{)jFrwy!I4*DLo#c} zqe}U|08_}%Lzkf zSYrK4g%OzRY&$7)eTB!}tKnquXO%IE+V*{5R9(P7bi3=O_-NEY*q&D#RWS}}P1~Dp z{sw^(FOJG~QQ{4kvWMXp*?`JCGe$OxL5b6LJ7?^O7{`v@$_O@c{?4i%>r_^LvF?5x zcYF#F(&dJ;zKja}wdV@({VRvwoOu{6@KV=7e{;0BiqLnI4w*)YF7`g&vim-c%fTXG z`PJlIuZgD9u2Z$DMw^bJwB6V2t&gOTtsrZ0{Lq9qN?*-to1?Sq;O=_KJ<3gcu1>Z@ zdi1>gdw(ln-ru^qpsA5O^m zemuEwh{sx@A}7!&-}e4A${}dfpFbzc8BM9)dyqanRKXx&~u5MWY~ZS9EV0EYUDm^}Eh>gAx7xw*9-UKl*DIZcCj+ zM3ZE2m(_auteDV04xsx9()m4&x!b82U={I1K&VGNu2yj^tSy;yPkzUQ?q3u4c`iMN7n}15c2V@TAPf%GRFsPW zPbm3id#b5Vmn&96XCb!h_3~6GGcAT0e(gI;O75rJIiV0{7G0H%TCIY!=L&>NoEgpZ zRY;jD7oT?N?rB+3BS|}&pTct_2^~LlttX z;^1(Ak0^P6;{5xqLuQ8V2jq6n=#zx(Kjsw&S6ft%lfFMMexL73`cYnD==X7r(n^j2THBC2DQOOjH$vgamj}?-JS3f?2^&#bG70D} z1A12%wsjJ2g`!o&bhbLmuFbYegt*R1VF!>qv}<+l%q-#?J!>0^52Jer*oaYD%_{j~ zI0TYT80V9p;i}AtjwH#Ur+A|T&Rfi1JxAIb@-aENrqwhNmXM3cH8cJg(h)7ZV3;yr za8$`ndlGH1ncJH3nvFjcMXmw9! z{Y#e)^7rrG=HlQDeRt48#$wJrWpK~Kn!tjt7mEXil;_rUk~!|4gSV0oT^q!66ZL#v z%}Uu|w49s=RP&q5L+2iqXF6t@XjV7p_q6aAx|R%?_Sau(Hno;9nO*U&JTqQae6*0f z5bJ%bHe`=das1Yh`|FzV7dmpPX(1P!hid#|24@fZRm;+W4ddDKBomyMKN*#K9*rH=Ws?32A z2$66!VfTf&6mPW8P%Eu8QJz#$-eMAJ;9;OL)R)18j^s=5PM08|o&{F6x|;9s@GYA- zwwgZGU{snXb?)G6uC$7=wccm5?#j9`0tvy_?h1$kzwZdyd*Rm}pCjOF;a;G~M^56TE4n{hq*>2Y5D5llCTBuypk!wyHJF>nIgmKLuHh)uJ zc5=4CJ=K^*n)C5l%-I|~qd8FHv?+gQs<-u3=Hq)?hAD<2MwUAGYkc|d?>%91%yil+ zSir25I&S6mM%Wp+S_fxL*I)XNQ$CIsLl#0atM|((^fz#Y_To`Br*zvIa5SEi4 zJg^*DrHhjO7M8QS5KsQ-NR~y5CjEA|hUS=S=^fsaNC$kWnID!AfY%={*K;MDiS}`E z#w-*bwMOZ4&Mo=iV>kS7bDgnR$ygr?DB_ojpd+;jJalAK?pGy!KUVBsD1tS};lKEf zQSO>j16(-28P_$qHoxl_y%SRw9wdPJRcHG1WpfFFVy#OT}^I zDROSxah&CTdwst0;G)Xi{ST|KQSrd$-iKVqbLgmDe>kIW-uIzKbs;5WqQEMYkzt56 zp~E(Bx^uQNj%w!7;~E#o<;d& z)`>+NJ{jY{bVOhNa%t&qgIQ?vqMHH(t~QO;<}P51wB%}uJ`Zn- zSJ5%~#J5;W08LX-Id*ADbH@TrdtMPx#6FMutc(IzQo+4IUl?& ze%JL&iYZ|znYTCMdhj=O1QdV}Kc0USehNDL%Y$&=pYfaJ43ol;#d^wfc;PNW;IQtC zV+f4&K6k}$cuwD?39N(0-d=B=6jaW9EipNZK>k z{JkvsJ@cMZA&%H!)xwf_y{lK&qe-ux@A_$_@)~5Zt?Ox3hvEVoYXorv>$bQ$Gru@a z3NY*&-0F-~rdPz}HVWnD3KuRuJ-6*xFi$v|e{_1#oSz(Y)YDvr1w$HceO|7;uj)D zCyuiw2#>cF6VAtN~$>hRX8{h?3k8KD!*51 zutV~?YGheibA?z`tML9Qx%|cgd+jShzDh>|Ci?x3yOY*V6bNUNR0DOgkboSR?Bdc_ z^dMro`#A4g-&du)Gn6knfz$%@Oy9Y8NtVhC_u!BH%=NwEj3QbHvG+@bqzAy?s8GbbN*yrFH{K zeEsW+50Hd|;{9^s^Tg?LgM_{+F;~6cHJqIBs*YIE;lcH~W-DVSubsS-esWENzWJ2x zDf1>bd-i+AZ#U@Z%KeTxc1Ue*=wRsjRk+n~zISQ@=GRNLEdQn|F}Iyrd^u*5t#r}TRw zauqH^=gXPW2B7X$Wg_Suw+mKIzb_qG4BVNk!~Mt)os$RW!Q&dN0Z-~!%36NyalX^l z)vml#yRHiupH>;M6kNs=s`mG~>Ul_Bjl>mu0DGL&C3R`NQhIcLpf7u6%yc4j(Ct7k5s1 zVlDJ-Sc$QuGY<=gtPdjeJuVR>aOUoM1%>%X^Gilb$e#KrY3J5Jo@**tF&yAo(K$fR ztEQx{TboU3&q>qId6JhqiLAooXFAeeEJw~C-bdjI3N9if6esJ2zEBx&iuI8jzwJ7f zHx^gb+~8i_6UZ~w;+SbF=WFn|1Y1~>-#d$IF(it8v3L>ZTumW{w0x~kcw;7pT2GydJG}iU&m2p0(R` zFXwvT>w!%vg`^zvpb85s5re-*Urvl19@{h*T8?k%CGVNTWzOEEy=cL|ywmkfIhTSV z4Ygv~YHMA!h=otJ=WTr~6+315fy+%8A#`x%@GR?uw-uEk==jTH=Vo6__1aj1ypM0o zRsDlX`kT4aIj)w(iwhUO6n4cDr}X!u=SjwIhwu9msLT@oY$)ke{?skur?dD$3?J%-PYmxo=hbP%B(hbl5Odr4RijZC(pM z@AJGxmW~hjW?Sd)Y#3b2k9UN~INFk20H^@6OQv$7g+f@>rJI#iflXwc@%ftEv~pZp zHLhYg(ygd4&{od1bV56hm^!bspu5%4WoU1(>789YC*)mV`=RCe4mrf-w8$3=hg5bf zj~p&##&CG0OjWPYrziCm?&fkpYORh}Wk^h5Gif1z(a^p*_~XR$Omzj${(v;{Lw+yi z|MWFor3pgEQx0>4CtH>kqb6uFW2-?OMQPT{p_or#vn(v1fEC z@G*>+>6qO`Z++k8xbQ^yq_T-1@I%jxuf~UCm<6Y72lU7D=6Lu|e*1bPA*W^VR)Q1P z>C#M{g>ONY)dXCF7c&!xM;N<*Gu}T$QXY@WeGtF3T7RZmZipHHs4weDTf~(Y?4`kxmIIjK zx++|OgD;Z;>vPjKEZ20xHAH###MWHDPc}#gPFqpv{f29spV^7`e2i1Xu|>(&GXVxc zwkjYHVCnu&d>UV)*Tj7^B{wvEqDIdmbFJ}*&1=t&-^-3;({g}wBb%)CR%R(+D?JbL z{D*d?_K(%6h`ah2<0VUb3~RL13`WJVct@LF3VS?-O;+-;*lq zCla3goolsXwSRK-t;(J>iB*#_|fN=dXuqL1i+(Pbz9QS~#2rfr)A2Ck%-Mof=+k)%` zsiAR0-yx~A8ku6PE^Yi2G1vC_%{2c*3EDtZZf_OYcGw&jle*+!cI~lW4rc{nV{aQ$ZjE!+Z@_?-5eya7n7ybH6N$r`QW0As$x604c3F3y4x zAgub1FQ%Poa8II+7ZlM?%kWZH$$(M~gZ1@uz6@wtu|kOF`cf!E*i6-bZ|{fVsL~2z$iGLTRwTC%Ph61QDju?oZR{ zkW8M3jZMI!C*AsMy-iU4Fk6}McR*P_-v45eB?<;(Td_$FgWz@!c~GSTb#!d$orf~A z)Ry;pE}PNCi8EV8El~QTaz+z_|EXz{MS;y8uu4)f2R2KMI$o;_%6(V~>F<3A?TA}@ z+NuHC!K(Cq=@;k0#N9SW*8Az8xME85gR>wg%e?n8C%rewM1cV3?Y0pBlr2yB`x+>I zKl&|(XYm6x;bc$G`x@>4-Sm==N9=fC9>~+^>G-g6MFCtR_DJ>JfZ*7Zp338%47O9T zjFIcx18w53o9w1PA3)oPfIxR~nbL`H5}@=DQE6=qO4XG2PPU9mKpT=t>9mT65VWT7 z(U_W*Y?`fEgWFMi|JdW5GYMUQn)F_UtJz3{vcbX0@PHo4ik<69BVMI^rs&JLz6RjVmsOADT{FsidH{&yJ&(* zkd-H!z5O8cKNr@!p~Q%m=D9R3&F}ysr?GZ>k+VVG(YN3pkDMgg0ixukY`EHO@ z3niQXN7FQya51v10AS|I*)4XiEbtI^OFByk${FoRjWc(>3&DY7kk7gW?dq?a>$cgrSmv6k?oUOqC+J#QCxsbry_)GsUEyfl_>;USJM&ObVy*h zW)~F4L=mQU3KSWb^?;~1Nqsm569EMVMndlzU=SLJ3@xZxXlr!2T0g-e4@*JCQJIG{MUOB?665@-ki|GD6QW@#(<%=nIbfQ;^E$paGfp9O%L zu;m@bF@c@@BijmalU7M|f|Aq~CG|2jfmF_ZKk8FW&6*8=JYwDqSUvX98k>nONUS|c zYo=#51GxD*IT|<(VWL>CJ^T!`4er;59|KSS?e@WoOk<*WhiLUt|qu=5rEO`hh;Y%0JDbcS26*$Lw`egyM{2%F7z|g7k+>1~d zhn;+J$~>eUD+W8dJ%k}jzI1FhOaV0q<)hJUUjd@Cd8GFXNrI9KX~K)oY>*Uoue6T& zfv!1{XF}xEa4ea-n_E&3gjL}WP7=L|rf_i-dV@%XXG98{?FmY!N-Jc~lTUvW|Mk3V8 zN$H6w*2%kkbfjK#esW-g9~5Y*Tq|%IgsQ!=`B`Fx%RsrDod&9ro$DEDoBN^vp0;x~ ztl^-_Nr{u=G3^^6uqd5e#K6};i}O^-+K-(^jUqo;pxOAf0ALSP_pGWhc4R(}8)o8w z!GC~CiO;8#1J8Yu3?I-{S|}T~VtFL!(aCLpTP0}i-6`oY*Pe(E2gZ$#WDx5Sq95OQ zA0eGh2au@7E?sV`?>nAba6#;o1sBMS2gk$eGniD^^6&nOyZS5QsfoHss=7(NK|dir8=Xp|c!_=IVf z@*#%7X5b18M1%>5au!4asC7e@E)VkjRrz<;t+241J&DKuyKenw3(5+p%cf2P_kXK( z{8J@t_yN%20Q&A<){qAPHT&a;UbDbom9Hy;+^{0zbPQ4u6XSkb=LF z@HZ0v!-@F&bojT^L4%Q3-XrUE2)J3+_&S7mh8Bir{~pq)5~K;;P7YsY+Ti7*aLv;~ zSo|yu{5E%OQWWYk%svzAJJK2&dKp{dyB2f0hdyZSTYkxpC%~n1W*uqtb3Z}qj9VY8 z);{@=4S~wy3g6h|R@2n7430|%!AKo2wI}crJEe5ZFkz@$s5#Cf(RPFu?C-Uh#7IpT zd`RF@AQzv`^p;C-Yc*Al!!_uDdtRmC*Im-&oct})@D8+(Sl|t(Jso6)ePO7(D#z`` z$@AqsRz;}Hkyg%^8jz3E1WP@E2gi|l-#e-jnZQ17f^z)Nvd_rsSqKnIvMZxQ(&+?R z3YH0iF&{@p`v<*Dz-D8t+G;0+)xV}a%FcYF2;ss7L_9|i6vwf?hC)W%wzQaCFUx}r zF6K}qY;=v+I~W>v1$w7I+ihZB=F5&iAwoeAaM!lz4!q*90~1=enZiAJJIT*}cqu;g zJIa5Sm`g!4hanrE4hg_+u|tI}(3anOMlV$_tJF5sl^fu`Lr4V|`l{%V^;VVVQ;gM| z$p`zlco!6H9~_4O(*e;HYN~;LRS1NUw%-}7UFS(SW9b6&0BQQCj-j$NvcwS4OSKB( z%gsip#Oc@b>|jP&2}uz|{q&wSmzJ|U@>5>?symu}9>(F!gqVkdy2LK}iY-+oJOD$=(PE?k`fyi3n=4cvCxEP3! z32nd<#dOcz5xN8-@*rmY+HhD0%)Tm)Ks-)hLq{g3LPNfUjS(Aa^P;tN|zNBLn#XZY(tLa3#SQ}fmSMmCCcbX;@A)#51~;vadzKt zLlolx5r6GE5Si8CiC_RcY;y|)R`y?0Iz$WGfiRN+OAMKPI7ZL%2*gI|(Yf{!GSKvW z8i0c?vn6;x7JCD6ew!>oDj6s8KdH zVEyp@Q8Qo%&~2@9u!GFg7ugVH;DZHV+{bt2oY4GWH7^vnKS4$9Q-Fs^Md>k(ur>>T z)Z1v3v|qsqu;;rFjNNw)UM~WaoG^l9Dv%aao);DeWXk3{SnBtK_xG|P`aqog@oReY z*o`RCPoALWW(L&(Liz#Tl2|zt9>}hcQUJ))0DCd^MO6=I!T~4Q!N5fu9cBpzmahO@ zaO(xDpPvAFML-bl5ks7`YbOc;fG+@!;hxNLl%|D&+f>~hXHioz0EBTWfDN;c$bhs7 zP-aiM0+wiDUl+m(-fjZ`m*b!E{|D$JATAG3QQkd+0^5464Q=ZZE#?NhxgVs=?gM3( z{rliy*mDLK5ya-kzJs?n2+uh!S)e-=d}~UoHofPzDH~&zFBycFCN@inam$ZNLxv{u zr(lU7giFuz)zoHy78BeWG)r7tT0Pd%D^BKboZs>eE{*judk2`9}eZ&$6Z7HDQ>8(;RPhVJ$N$!2yzow-93z6it9qo*{~4bZGF z};7TwtRNp#b)AGqjY^7Xgu1yKE_`;CzmQTAI~E-ca8R!DUyWrLrG;5;ifry`qt z?~8%uy{1`4^vp@TE)*8Dq^n^~$mwmrUUGSU$L#aU=><@R%EnI>6J~%sshC7rK;08% zv0=!a%$@nzh7wK$i6``)l}pD?T01u_W_qF_jHZ$(_)fhe1MiQw;2T$p{i!C_E>z;`p6-ZcY*P-K6&fii?Q%crHqb=3SozS{XQdDeKix7j=W=FHn3yn-${b9MA%bw-tSfkbm$ zysBp5Pdc_G01j6_vfotzWg`&Y0!#E4_^=vE9-x%#aWc;RcD%VnR@SwU@Srdm(>oB` zAbx#7by;e`*pf4IYcIyK!x;uAbAb8@$DnwfrL;K}nep3>)H-@yT#y-WoO$l6Z-8!-&HvPJsM}OVh{f^ru%Tx%X1Y9JK$0 zO%@TofL=<+r76zs6a}8n&9|7>JVp4TBD-LK)n595lQpaz&djSZ9B%y6F$q?v!XC)& zEu8n!b}}BeIgY(xq?TmBw=5OGr_=Q!xlmPa%W!y;x7f*_%8Kehkuf`-sfwTpkARaw z4YtNvQm3hzaWe~=qS@LYE+hYN>E?>K<)x;$+23MLgG-RFJy>v2SMSAq4toZilw-dd9u@+f|2 zV)e0dVMd)h>2zapNtJ4~%>SgZx98>NuqaAiOUV5Mw?{ITyzd&zUigagm0E>XYY(hc zS!2_}a>S=(Xkl2UBDRE%nj9X%xgD>eAQzb;-5vB&Ic|Nfs(3*P)E`QaL((Re3Vr?! zdqA9eb5|7}tIqb@J2$H=SJu3py-|?2P+76dZKb9=K&;a9N$dSk5IGASR+6Atu{+^{ zx;d~H&{?HnNdBt8-&t=(ZAiooywY?stk!NCc_F?cE?RKipznh(tG<{yHe;=@iq-H~X2>L{UbT9vf^t!Ag6JUFqcsM_#n zIs-_Z!w&_X*|ELll-95pNeV3V>6ktF&4KCK{FlNl!`i>1F*CY=b8kNDvXG?@=6b4I z-*curxjaEQ#i%5%;;*{6FXe@)kELkt-_AJ*c`S~p-er9ka zru{g?6wILXM!Jgi13wI|$h69x&~Xch$41k_wd0WDTdQHZQ`N$snv)g=0F;^)0|i|Q ze+DgjVeSWi+8aZ{(SabxqU^wFq2kikuFmzj`CZRejg-N-`H+88G<%VAfr#B(w?JiF zy@|Wtwz4)s7K87%x8L!S#x7nN&a-`mwdWJYSz@(^wQ)XwrEK;jdjHPC11Nf_`O*v7 znviAIS{X}+4pM&HJRRCX%^kCadkeldaLkGw(N~K)N{b<>xO`Y2839$d{j2=u*rV>! z4uPd^r&8ui+`RrQAg6(VXivUua*oaoLthgw(ztGTrC^q+r zSCpy!RU6j$#5b0KALx`crg~p}T&4{=axafuz%#JB5@#?jtQx=diYbK*4*z@pI$@EhU*2#rAe1w(m|(z2yXT!Q zHukjLdlUG6qY1wNF-hl}eRZJLvhC~#B;bS?L#P)s^FNM1a8d)^gk*>ZuRtPRnp>KH z;2s~u*-&p3-~Pmd%ucI0q76))vyjp9@{^Z0%fRL1kTsTd7KQ2$yLK2@J&+OCi9t!n zZ5BL)cY@*skaiE)by5PnS920rPhXkPH4038X#imO4PejSm>ane)|LgV>7+^UHH5%a zC*E*TaL~XKMTIJKAxn=59MB96VRlSOy#mmQos|?8mcyQO4)8Llb&%#hOxw+3pC`-u z8eB;SB|pv=h>&F|3%tk2CQN8qxmjMSfyx2w3(=Y9z~$Tn*^+zJQPNqM{F88LaEBIr z5FmZ5K?7{98nRD2K-HC=%s#5kC%(_Qe}nlQ@783MmK%+A@MFPz~JsLlePNH{&}^8g?~1)4Dn zmY5-&JWoZV9Pr-U9{?B1W&n(et_!mpINiHU{{ZX-6bHx=NsukP17SZ7So1SUX4FxT z&qv91LZWQcp|z^Q?9|zNuF@Z(BL9%`j!!xmB?9>l!LY=Kx%bnl5XOSOGE)VqG}M|S zA;2D_p3~Lr2$tV~(Kw|{-TdFa$p5o10$g7O(ysk&cHN2b=4baGY(sW6bRF5ynGR`y zGy-y6i84pA5!}5jrh;69dyB>YtT0_^F-=z8B(GIm@0il7X)o`)9l$jLPVvtQv!~p^ zKokdFx2V-X-8yhEg0`zN+PL7UK!~dvdu1ES2l8?o09Ra!1B>X#wdLfgYHIO#?g#=V z^#@Ek%`GA(h{}Tem@s;@6Z=ao;OBw+@Eg&^6T%M}0cSv-vz>GwEb*LBlN$)8*xUw; z!g?T8h#dia*g=~Yj_d&*JLJED&!GgxDvR2*i*Y%CGA-%xzzU}w95+5{dm?r3JTD1bO`{Ge^l(M(84r(iC zU_}E5k{^g*pat$?SleE@i@V0}m#@79jz+i+!vk^%vDw$)^gB<`4a66EKQwh2FF~SY z^91bGGcL^t@>{_2&^-VCAOeKXsI5}uSMQ+Sl2E0me3wokBxAptBK$C*g2<=QhrgU2 zqBJNfw0kn0i~H}zf3~zS0w526|90_L(?f8@7yz}{<2m5;3;3y1*w6tW8U1|$Q~Vl= zPN1o|XX_k+S@&xw%AgnxopK}Kk6%wQ4GyCiua8x!zg_^?Z{WzNfMr5BCGt?x60DM(s59cGg4{a8+ahch`MURQxwT-P(~|q zPmvb1k+KSa*z4yG137%w=9rf*i(7y`HQdn!@-orp? z*7&BTC9b%{^ben#CY%M1ya9z#ISxe;c%wOaNY_EUvqeK+a~UFu2>qFe4}gnM3sf%B zVA$XlbE@-6hVYaONGHl*F9(Ca0Y~ElrL{oau(lg?sczpoGG$`If#*U+rUR!#d2fuYQHRwuWpbi?cFS!9N{3j*h0^kNXsCaeO(*Xvr0K~IX zC{m=VUmg}fBGv>K8iD`>o{b?u+~xf#ooon6czDgd{lIX#pveP5Ll{twKj1JAg{$dZ z^nc=^`?7~9btzEcWyhSI6ASFEi`1p0UtI0yg2APM^w>#8HBjkl&iZqy>+-yiWdq*} zm8E|FkM)b%ybWe~$9A;Du}ycqA}%xF;Stu+qZJ z4@GgZd5$&*`8kV>iObH=AuZ?KR9@@{n}4m1Xc6uSER7wvltnCuq>b8r;DaEQ*Uox7 zYav?cw>0r`?Vbd6NJhaR_54IgUIRKWYMS9tsrLl2H$q7*qd?saUu@t9PPuU@=a zMWSx{BEZN4$v0#)h%bga$tN<_(s8?T^0;ye>-h9GNMgFy(cl332p^kJ!%wht0*6J; zCviMrHa#UH5DH7v!m>FWws>6|4mgV7*B%qSyax@`ry*|qm&QgM4bNpczE*M)>DV~p z&>-npC^xi_YiU>pG)5u{zh!^Mq2cO)48pN7pPlF$=pIL;o`xtJ7udbviE8YOEOw%& zU|8nu$c^YVNSxZ90j|(^^*|Vf&)ggPpljsyU|WMa-;TnO{{&-uY!W|J1<=FTDj|Fk0NX{OQFFN#=JnpRA4tyxam4$e{W-(C zMu8-^2|(M1q06boyVOy;Fv}9TXXiwXt^4hZdOtM8Q2ne+pfo&Z@cytn41R=etUcaP z9$d$H_H6Fj@x92c^`(R5aW#SwCtUHxAb5s43A1#2*ReiGW+UA3TNtZe2{9)mTjiyx z%~exDMkJ)QH6lq++Sk8UtH7mzb;iD%3%eXx2IFkdz8yeZXL;1aaHh4>+Ko zlQ|x7CW@Msd)Qr;`oh3+LL;j;p+hHK|3+*Gb-!#TLP*Tk&{1sMkVX?VTfznhHz{H& zR6(5l0m@n++QaT-h!QK}QUdCzKy;cSoeq~JWE6zIgp=tp!EyW-=IJ1va0t3A?NH|P zkS6>UozZ^G?c?{-@V98frRj_qFdqj`di8vznhLyZCW6Rig}~PzPDAuN0qCzk@rWn| zLUD|uJc;z09Jh_0(1cIXB~yomjL|^$L--QCD0|`tuMBnHS>8bL&Yeb4fS@AF9MH#j z=Q;{l02H}!4rtWlsmk9VUBSp{f&^=sZPOzm{3=h!l{BlH-@ zse#v%Km{F&hEDQF5-jS8J6r2_HohLSZAoDRO(sT&NF=9)$L~FhjlK(vAi-%`b7~8O z_rbhV@N4Kqj?C;oVmK`FfF25VliUz(06bI>|NH>i@Hq+LLs0JPSMUG4gI^2>d@cZ# z!nwU>a(`)_L9V7cM+OzXUx5K7HW;}b#((OyUx0A{IHU(m{V)6%@PBOQ5V-nr(k{y< z3Bv!ccJVh709yXy#;BEeYKr z-r*VldSP;7I{sj7-7B$nR!!dWH&4H5QK{5^M{Rr3WA1rMWN+}y=}9eXvVPsvU6FP_ z?66;ivnA1NGE%PTYF)RE8(YhIP^;X>)m*vfR7(U-ZwQ~Vzp>4`(;=+Ns=8j^8}U}6 zRXKQ?hcHsF+qhO;$`_|f{+iy~yR_A^#MH$f;obgCD;3xYP!;Vrm zx?ZYnzf`*&O3n*qTneRZ`MG_r;wW*EpLt~4USiqU&bR1s$0McjqoBpC;&8sp&m*1w zfq=&c4bF(r?XFPfr69sqdjzM|Nd16Z3)V#J`YZB2V}F` zLDDCptpIj(^R;Gs+Qe$E$720#WGQyRxzLEy%_|LmJ7}C}r9>&Vc+3c~HbN=9wt2yI z^1XYAmpo;AC1?(>xBFQya_i%)>g2xA@B4!C^#rLxVDkb-0Q-N*Gx0t0dva})phfWV zD-pSsz~M%#x-OMP){yzUkW)`Q+8IQK^I%lLk%TT1nq0W3#87k@u^ZF<)_V8H_u*5>2RsP#>Rl!&Oj`0 z0B~0(J>!F(@EC!RE(=(bSr4)IheY6qd%hvM&g{iOuhse4_yME3O0>&z zQ_Jz8O_c0rNAE5uSeHyURePvckywTPQa6;gUTP2mngVCc;0Om6YUZxsrH>heFvh8ZYx z`$A59T;&kHV;kQZP`6v&6>+@OtJ$Nv&{8aT`flkR%Fa=1`V{Ou`{e1Pjq<@$1XUK* z9ZIN!TY7mL7I0V@Fkf}AAEqYRJ()V(Pj$MB`kK0Vf3KX`FdrA^^;_+t7?hT!?oisRFe{$28os`(bi^SXnxl z_(tUAaLbS9-=Ftl6@wT1<0JK~ZeHE6rUZs;bPmr_7P>c*x|h9HKPTxe4(^Sh>gzV} z@x`+iAU` z=7f#%(9OK>rAz+K8=utPwR#?E#jsJL8h^U0F=s!4u^0U34Kj~RY)!29xGa8@llQvAe=KnD)EbeH0o(*`8mx+wvcZvYMd=wGli6^_!R_as{6l_x=Yh4q%%it&s1 zhKc>{9^-R1tFamuHIyNK>RbL-uy5Dp*i15XS48;~!GChlV3- zo4;)qUwH*STp>u6d9Kzr0%eELyUp+l)II3Qg9vU>akg>rLgC~`wGiLBjm(J&{}|hN z71^e*8WN0h4Ig5U)S6S>d63p%p|}}uZ&|v&)l(%8w;$Y{4XSaq3czlgThH=>BX3oc zQd|yB7~hBuvCC8+o8>i#ekHyA;%XX865sryirv7$NhL_>+LB{9-bg_@6!IpMPEXvE4FkyEk#+l ziPF^SRJ11USFh#Yd0KX5b9jVCF8F_ys@KRGh4>&%ts_gKm>#MY$U^m*W zQ6&IZ;WhO?4gmpgbI}L0IBlU_Gukfz#$rH1-I$4MczQGxuqd|iJU9T|Knt?0atZH( z@7AmO&kq+R$P5pSd{{jXi@Q{5zDwA}GZdYbH2)0$_~pZFy;k+UmL#LszRyVPM0r6| ze1bM(&FzAx`;++B>sGDR1CB6&v70m38{91F&fr0y9tXie`}O;E>Q{o31-Bf{^g6C% z&_1uws_0jnc_6v?j34^^hw+y#%vzU9Ud7o<@vL;GnuxsZ4t|)4^EwZYO{`~qQ9U^+ zP|BO^f2;UwpGZWaV!)~Ppk+=~Y_~sCdGNd>c>ruqmRpGm`AEpG+&1)NsgCXjW=D#) zLO(-neRsY?{JP@!m|Zt>^6Kigts$dZ*ouGl6#B+{_fLV@+@#)KMSH_dpd*ho7#`PgBVm zT3U=TYnP%8xF#7!=5F4(25D<9m?`%(w@&sw ziNo+|#~5#?m7A&_qkUf{fb+}V1sxsWgg{%N6g^vBv+-PQ*lR!emj0a;QAgYV*N=mWPyfK$SKF;I}yA6c1(uSIYFiOwX;5lhu}I}TJAD$YW955 z19cXzqXs(S`n~rZS%h>xWEf__s}% zzHRXMcKPe^Pp{7YX}Z0_fLBHbuZ&`KZO>o;Vv2vTK;Q$FTg-w+hmCW)`LuYb&r$!8 zb-vp)*94d6?)GV{#l4fgj#!B_bJA&tP<$W z?AGw-BM(n@ycuSqzy z*frEO=o!@w3?(16%1mEv*nOvNUAdFEal?Dj)!XcEOA_^yI7*WsC;)kSCY8%1i#THf ziqZ9hZbRqQ)15W1=@$)Nvl?V4;Uj00a3GRP!M3u(XH^kHF-8YpV*;qX_xJm?ZFhH# zM0fC~$xiqDvIP!segFP60@O6&e?>hdeJBmblxm#><{dd4CDlD1n%Dq)+a2;L-lv~d zOg=_EUay?KVms_svR-)4@XWQqaiGljHI%jol+9aci!g|8F)hB%p|RinI%s?RE#UMp zaeCKG%=9y*H?7H^_TjK^V?_gceUnULi)^}`o#=Fow#92BaC*AyB=Sn>qavx=<6tg{ z16PERXj$Li=BoU+hxiMcutbscio+D~ARC-kwsr68PxA2I==xGLx#Ok2T2VLC$>sU9 zaw*YAWFSbpZbfstayBIXkKX(x2mduJghurdVR{B_X$C*d+B(;4PGyO06go?}ZS59# zGu2LU?|-mpXU$3=O+v4kss8=yT=l81bQn9g{xn*C=Qk5slU(EIAQqshdv6s_*U{6O zT?sZrUFh*H{ypQdb!TWM`d_#sBRS;0za9cd{;;FKyOZ%NnUUx3$E&ZmXT*GV7pK$W95{kSvLOW!^nk&nh-&Aa=XDRRgXY&ixPG{mvJ~hJCo~dDvk@7-Qu9{7tA= z?V`y~{(-tK@4b@g(?VAkJK^{17g`F2c+xeBO<8KJUI|*i-As+jC<h-@cy*ECU- ze{!<}ukK?pBFJUri(YxF+Mbv8n`WQ<#nWWWj_CgL*B=(2F9LFV$d*6JrWn&yizJh(3w?C!Ph z>@sH6BrB9|&cr*l`f8nSK|-bZ#(4?O_R5gmpd3(Qr){^h7f@{V|D|8=se@9DzNLdO zsCK$rRa!sdxHTiwC2UH`gR_x0ytCN#ZZC#aJu_u4JMmev@b5>MpE<@u0gwCn9QkW&VI+$mAia{H;nRaQ*kNy?9`*y_CxMkFK zzR$j3l!<0r5(^uL81F@68b^&nhH&`ZCDsE2Pn4|fNX>JvO_5AF9AfAbFA&6sy*F9i z_UrDha8ynxd6pL?NO-j8^wn(C>-~DZNK^38!sdpslRP1v_*#|85o$T({>OU}9D3!x zYIp2sUapN*bg9F;L$(*YxT+i%2|sl?F8*vw67!hrk~l@7(ukgs-@Fe`5igrcA;LAR zv}@`MRnaymeb(c9)SvpMe!>YLaH6Q6xV;8mQUfAMSCuh!g zSU_0B1b)>$=!_r!W%qiV8@)I~o*R_s@npIDRcS1I#vMtxJgn=lLrx*v7jEc2J93eIMQ~Lu;l;Jv0@kA@2}YSpAAEmx4;P!h zdv=%6zVC|^{M!uaN0~hZl z*^Zuxu{!qwqB*n9xcRG!dBJINq3wPbE(LYG=QI%hrY^M{Pb=d3oA2nBer2IaSJK3r zN%=;j9+^pno(grfQ30!AxbW47GcK!n6&LxO*Y?)Og1gAUZJt0qnd+5_>xU}roh-B_ zwThm|^i+6117;#wiE}gTrDD=51Mxk-i&v;l&}>#;ObMd$Zz>;!B8N){hWy6_I|2&E z9FyGj?uP#bc0L97b2l)G?%kJo+7T3^bc0P#+uNpNT%G`M2}tYeGhWX)lP>MyXo?M@ zC3wp|dgi@e(+gZvppu3~{iEF#EGQ0w$!o2bHhfOD0!~nUs`Um6Z4pct;z$*S5_)z8 zNd>)*ndL%m1WL6X{rVhBvea)7nDs?Lp1C`S&ZyEde5#6aErmh3rU z0#~1n6lW~m$PLu{`9daRm)LQup}FPd>aoG80qf;o?n3%8i`8e2fu6VcL@deusOI6-qWTcBAHtA{tDd>si!}r3Ie2v>z*DdFdH{d5QnH!xgB!!%F{Y z!Mg}9Bk?NGioVd^;u@C?wCU}&7clc`%e2QZlesLdU0}GI zP|&A!mCI0{=jo5wR16A{cu{FG1sO}IG`ZO=YFjlQO}?MavZC=!m`V5E<+FI*k=yhtFxy^&rbr%<)FjBt5TbJL0J1-t9Py;b*i3w$ybEBH6y>8mE)x z-i%04R4`H)xL?Z}O5Sm5a_Lkba>keK+g01;h|AR}A!ty%Bo!qNRN+3P3Iq<~FadH3 zanH);Z{H|CI8&hMsyNIZsrFDLUZgLyYL_25>3ovLf^Ou9qSJ!>+>TT_raOzSGGD(+ z%^P!Q#u%Co6CTRH=73U#t1cm&uk6wL`noN=#{e+LHi2Bb>| zk|r5Z>>|ay5|gjpz(&Joj82bYdUU28G}G;q1%uowFC-DUjDF{$r918Lp3%xyV|u>R zDM%J@Kzn8`eqooFHs}nhV~NN$p4Vn;SYD&qUS-*b$+>QXeYhY(oh2hHt1y(G%WmFy z=2VCa>#%T1$0iv_rQwG#$Aur^v9T1h&F;W`qWrh$2u55i-7X z8me2_FO-#|kR1`b znJ)o#uR|$;w2TZAG>LLuNTjg*ObC`V+-z&NUf(jRKdClb(WAdYEr24IWz+a%H!&*& zuY@tm28CrP#!w~7?ol)0c8ps#f2BOI*{dmb>G267$DlWS6oaYwgYKNWGn{)I-Bpfe z9m`TRDzPkrrdNKFD>YI1H#pa3D#tTdC*eTB0T7)9BEJ174Wki`&HWU^I`^b1H8Q#+ zZuO^K!ieSGOJ={jo4vA7$NZO{wzr37Qe?RDR=dS1ill{PmUDiN)jqh%nNAxGz> zVfcLh{VHt>Ht@7Yk7mCI^H}uX9BJ@rIQ5^|Un1AlrRXvLjp`)Pb;F6~&}07k`U)?n z;dBv8##8DY>npR38h<@|VEAsQUimbHXfR=--m4@Sim|?uYOqLVM(M{jBrex$n#%%2%j2GfPU0<1c4Eboczo;-2 z$j`H-soNdYlTA3I0SlH*U-3x>{ieS&Er5Rh z!{VYp^-7fgxxwI-fWUow5{olb@)M3$?n8QEgEzJvNflwGlq9RvgdKa)w$~NM=fc02 zDZXbjt$O{|{t?B6BkX*Pkmo_+_rL(4MRSTs7cz%y+^YyYGrq$Z13C}a}211AD3gE>DK@*MB~!VN}k7jyqEGRK-3PK zDThd!>WN>jeft9N~inoQ3(n}mPOSb#f_nvorlixgrWvM1#GZ1y3 z{_+Y#xYc}&S~jlQd=kM@MCQkLv%APxZlugIW-@%SEmE%9X_)~iM&YFKFp0R|dv@*i z4z74xGm|$y@6yqqbR0U?N501BpS<@mL}z#kOgl3Mym^E7LC`kIq%Gpxu-sYb#PaJW zjpetDUeZI=b7)V`kn$fGOxk_6EjV(E%S}41x1We$QfzMyJlkE^A8`(mq)0l@z+D)p zO#Fl#7arCJhSOudrkK4}c|Yd3czJBI9^ECa)8JNakG2{qZUg24j46Y6Z-oE055|*H z#KkIN8K|3blK0UvBJ?o}%v>kMeRl;(cluawG~Lp{V*n{X-W_>gSfIbnwQPnkeLu^gd}PVf&bi?teG^8>IgZZ#(mALFHATy?3LNifku<+M9Ys_r& z!mf37;b9c(LaI-F%ZT&wc$H%kL*PJPAHLA4o){3P((oKR%-pbXGTsLNc6GiCaI2&O zhbvgvUJ*_pHbj3A@U)K|*DU%xUw>uTtE8 z$=MzU`-}vswA?>yd~eNNv7E&?{+qE5~IwC7B_y$=lNLv zvlw>GULVn1jnIZp1_KB%(fDM{4}UYhITu4EPCe$xQgym%<;Zo8gW_~!saqqS?QkbM z&9rYQSvP~B%yKB&8-1kn_cweaL1unu*(RJF#!7z2VeZXR?WS-+o+kVL(yUsI7gvtK z-FNFFwJNCPw08}OWosqRcNUUvaJUNJ{zO;ZnJ6;EAJkDxJKwiN5-v^7NQRAjnGIV!Q4HM1JKdAsVchlauN+Wa^PCw z<1jIIRG4zSz@c)Qdw+?8?fQ4`{Vb=JCK|n&QyLofqrHX(0e951s>LLalXd2ha}!B} zOr0e z#c`15>)E6$w$P-#3iH22q`QC~ckDuf7|yXzBHh^ecW$CfSOgphDJ7+ORcOi%j-mT9 z(;-Nd(acFDk`JmHdU(}px4(LaB7cTBCwtw@5l>>)#?-Fu^LaUD18ThYOD>!zB_en? zB)s)YQyP3uy&IOvIkk0u+a3hXaTtPVCRjb&`)bm(^Mtb4yDb5s+GAdZ3U>Qrb~eB- z&qRx~-65{L>o-zj*3leD7Abm^nOhOaIpytIChVa1Qxr*SS>`xI#BRDX^lm2kmw|EF z@y5fVC*Mq#xQQ7hnt#uL|<>X~>bs6dS#=SGQ!AGN&5-;i^!QaH(qf zEh7f%`iF2I!|K@XjCt)}jin~^X9hZD2Z{&62EX)BNd+7yalOQ$J=r+#XPTS86(TAg zQnrORyczfOa4G0Ru}<*H5w`Y-iOe_d1s8kF>I(TRqz`dxQ6DMnff0=Py)oO0(e=;6 zf|H(%1$BFI^RaB2pAok8hnYZA@hX*}pDa$bee*Cc0JZs*=-42v-0tUhO3|%HZG$eUoMJ^U~s%v(8rk|!ofoHJN#>dRiu{$`ld$(!oU-G zr%Eb-u(#N>;b9x7I&`PpEw&yjUtNgj_uynz!z4!(m>eEtN4`@m7uliY zy>Q~P9h-2cP>*o^1i6Pn841;PxGOxGe0PQ7wxq?N62||pdUWA=?)W6T`aTvGp&3&8 zu7>Xf(Es7}kT4=xHUe^uU6*!_UQ0Q|snliAJwh`YxK}wGMl9tFw^^%-AEu+$MtG~T zgpLjlcNRN9G~{UM&Qhhz)~m=rV+T?)>!2?ib8B9>vN7Y8w-l zeB6kj4?G=UPJF)i+vRO-(mpdCXYW2`L1ckpUAk;Al~zsQA4lck$F7~<+8WMiI#O)f z_LE;dS`Q)h9k1ur5Le>q#16kuBKfU{-8&nfFtxH$=f!?7C_2X7v?tK(lRr-7@h4l3`HmPae$E#DKPhQ;BWSI$ zfjIB5^`uw*F)llPB1Xf#3`u&{2NfVD+4K78t$Uz7i(4s9bZ)4#He zTS8J-?{)Wv_^MQ8;LhrlJZ&@bL0^sV1No5=2(P_Xcf}E(oFK#j{^+t_B#Wv$ZFTPx zTuz0GE7;iPEcg2}~M= z0U+s(_d^-74s{6fwJ$RYq|z z+>~nh6(mcLNafjqM0?v4pC;fTq$DR1I`y7a03wj}KAev+NnlN-a{>jpjFWC8s2&xi zrKO91x*j(P`xL71mRk;S8h-*r#Hb_+Q`&=U&fq-vs;dgy+v$?nIAj`Mo@G33#H~l~ z>FHECWaR2qr~n`(GMCWV26gFQ%6J7*M!V~j7hv?{-7KCJwv(J->M{Il0Fg)1 z_5j@%pztfK-b+zBAhAN_(%lF3JF8By30{f4+|ax0Tzj{Qmqe(%W@U#MssKdm=~@)* zYm7THGh=eFIW3DZZI9Z@^)p$6hb`cGqj$AcNuC2t+F@;&Tgdfy0)&2Jz*vy{!jBJ3 zdgWFQ;1=7r^?vELK}*zbe18hU10z4+PM*^`90S0JBTI1L#T)TftxCJcJsY-lyIMF`0JUzm32tu=!nxo*scRsRmbr2ZarYR?1F{qXh3IR_v% z02mx(0O@93NwTWeqorH9SIpPWL=OQ>S5qV-WNk&i%3}F#5-d6K6x1Jm4u4rcDDJ$X z!Ki|T%|h_(vGip`z4G;w8$u50wz1ZKb}otK2ZsQ8QpCi$9vK4tUA5)DAdQ7xLx5Tz z(;mz>uuRYDvjQ+HQ~)%Z!-|TEc)X6B7}x2gUb_vl^0=Inccx}|Z$U|!a=RBFN^>Ns zh{3)t+VDt$)jSA(eBhOP^5jX-+-R94%Yz3GpvqN+tT4_{aj|OfbkDhy=1+))zWNLT zU%$UMJkywQ$lks7qeuYWB!%NdKLgS>>Jd<}KKLF)X$)C3!Vmlw>D!<7>tmlabC{vv z^4~NP02һ{5r7oZ`$Ck?<__~_QPAmOLm*3;&=~a)KU_C7B=6H8B9}VW}@I2o( zVMPC)s8LLl4gLr60Hpd&K@3Y`5<6c^4=h%*!IcIUi@&V&fky^lh~_3jj~cPEbxJZx zSbTleadI<qx}tAz{&Gs2Y3`5ZC@DD zPHg!M#A&R9OwhxYk`kD|^dl%=`F7oiV&o@C@@yK=E|8AZTn?g>Fa(_=M#fGsf~Als z?EK$sgJh7;Qg_ZauzH}!1_vWxTL9jVBr!c^m4s&|BIdma)(5FCA+}B6o?Jzv8g@W! zJSyh3eIA&ACEVZwbZu3HVV~_aJpw44#)}0WK)#G&xBM)q}?LLV7hR>gNL@SGf-{2^qp#EDouwWlSs!2+v?Jfh^zg;o_ zo%ScxWnW^!2+5$nm9(rZl@0*V4K5Picm_oG$m}f61IUashRbMAq2yUlDx}5#+cghX z3mVcvdg%b#dj&r_qB+=sf7*FlTxs|aF*^vVdowYBfAZw|9OnT z1({;D?++%(N>=eptwz)cX@uvTV>zGZz0>U@wu@*4(@p6h;>xy{SqwyA(8tsup8TV8 zP1gavp6c$vTL5}NSBcd1m$vLHh~8(zg&#pL;d`q)Xoo}yIX*qpID%ZG21ED#-`43Z zal#kz!MD!#Aqh7D>UvLBvE^)SPBM_O7gM*}0odRJ3{=_?eOap1Z9bb#mm*kHY0p>k z#2ha2I4#`=x04vu=;oIh-;jrP@n^k9fG}*bUhH$P2E-B}Ys2>y@@kj)EC-cBXvHiL zLXW@Tuo_gCLb_{!3&I5>$YNyO!S5qSftdQgCUl~@Y;Ce(vQ;;^8QAM_FaTTE*lq*6 zI0QLyFzS0SH49L3{+Yk`26p)ZSOnrA6Lp@htAM@!t}O@xO{W{o-Su7`hr2^Y!MV;H z0RaAi;`gxTqvkgx#ZJp=jA9&db3YTufRX$|EM7e@sHyl~Mu=8l5#UiOcj|fAfIwm; zot#RoChI3!tAPl6Q)Monv#u7^tGJEcpY+Ix0fNyfeHNasn*59X=bvUXSmuBB<`;0Q zT{k#ET;=8o4GJPR7>|1m{H3t#?^_}+Ynuv!KA008&|&#+6$bJo71nbG`EQoAyTCCK9I%IJCmhTK^0eh}`WOa4hSCmLjI-z=B@fD+HY z4gZUGiGX+ABfY$5ysH7;ymHl>hYIlSlWzkIDJ}gH;3z^)8tI9HIOR4IYyj!ueVwJ5hST82 z9X2%;OIS}&PqEjrOTlBaAF&SroVIY)_)nn>z6>vg%xP6|N1Hs@ToVj= zRi5A~8v^76gtRo-S-5_{@haqKrE(uYJ}}v?~X4GnV8 zK?A=xSBEzsB&4+OMI%&uf>{9G*TMjJYm4FfZ!QT#Tq?;hJL8fo#HAs$2_N9neZSSK z@_$#TQgU*mHrMYsE(Gz}{?f-B%n}uRj0PDiNN8%GSpBmvyq6$>LM{bY!OqPGQh`}< z9s-~Om5sXu2;thT*%rkQJf=6q)p}ji8eiaEp3AHF#q)X)nA7rbQQChAP#4PADmoA{+al} zal`9pq6@}xgIq&8A`Fg&jVB4u!pX0kA@~4HyjK`X3_ z&(c_aKgcBEZKB5DKo$-h%MbW@IDP<~IW7*I*bbIWfH)GUuYa$q5ZVZ^I@7J0Z^fR6 z4mve1c>p(;X>V^&PfwS&82HR|g^Dli8i%g@Sh=-nvezE6%Z=7HFHP17^(KS?sRXLU zvKy}B_JJd}r2fzZwr}Ex1w&qgtK)zdW8=Mi0iJG-%QJ#qq>#x2rFIFS;Jyb^D>@Xj zSt*8E&?YKW{74=GOF;~Z0YyWGKIn+;&V@-428Q%6Pt;V9U%QrBYg=#awo$haCU9z> zm4LO3pTbUp$@^0nh001)rrA3^E}=QMcdx^kI-E=({?AV?LDej^Mu(K|K|;+WHb= z=D7e{MM0zL2X7|M8PD-9fhW6za4jG^Y?enXVSE<-@}Q2quDJSB;?z@UHMiu};bGj? zS0ahlxwc49U$Ih3*@2O*R)EWAKt&`wqx@^z%IVijHxrwM>AVl79XdEF5Kl|~`j@`Q z!0-?{FO?sh(M|jeJo&lMy$`^``EsvO1zt7v;_@8mG!^lcR94W``9Vkra( zA7px;&ZznvMdljTRrhM~XKURuSRD%c>7f}ZQ1)5AZ{?d>R=PX^gGaO+DtuR@!{o5s zp!nGi;h_uK<|k?%7fA-&4k;05JK+ zf&Oe`=Nd5=4Yb_mP+j!=9BCR4q__5O)3??yF*!&WPM* z^4j`#z2^ae%iSZ2fVB9*h}pP-z&DHo#yYI0-lY~LwPl`>o|Ght5gEYzq_c!A0R&Lo zd9UPmzq;yse#=3MG2Un0aG{Obo&QTMIe{XsZD;bGt0nX_brcc@%#)ts{s+loivc!D zQ0>NMj;j5|l3;hwsXdFBo?AeM4wYZM!p6!~K{wr#h2#**ajsi>Mv@lJ$77Z|#;nR= z$Wp@4;DVHisQF#37{_P+qyl+!Tl3@q;LO3+l%R3)jE?zV{5SxqN1()sB*&S#G@b&3 zJlSBhz{AFy!}e>(A(gpp_A}wrokHUlLPioa^J@a8$I2kPj#T8Vk_IrDnbnM8xOvIz zjiLJ?Z}AX@S=VFoV*;+l%BeilNfdF722oL!fMJp!-?(}?Gk(sWNc^@yo@%nV!`CZ< z6hq}O>k6q$k9mvYUdJNBtgOWjQ_V(fqW zP2f`;q=^-Yy92mJgemZ;D&DWslG8D%g80#^BwIfIvW#vKPY*m5( z<*j9Yl`M!97TFg5pT9Yre&k4hFef->{&X&Foh>4^7hL+meBHn)+*}jBtwCP3H)?4L zJYD&2-S6tvxuvwwa)T~{f5pggQXmHo9|hn+L3EmeVq{3cCsq(6pTJabNWXogYr%kT zS+$kgP8#;R05sLQVr%CuK#~+tvpbnieRq}TH)X4+v$w`0cDGya?F5V^d;AuHn!wqk z^*=p8xH4HAE<&!lf%A8*AIy?IK*_;>BIWivPgFs!dNqUwkS@;9KWY6tD z03|cCW`H6PnlWYoKenV)ooJ*5N)#mk-J0b7>+!zDARgknwdAkHuO_JJeK2# zBf63XHHhuaFLB#budaWQ^9)(5I&X&M?Pl^%H=?*$Vu0qszzHptf&2V>IPKw!F` zxl>GOs+<~Rqfhir^{*aYYs+mFM3P3TXwI8)uXGAd2EApxe4SG%_lULwNU35RH03~Y zTq3rubCxfi8_p&d*vjeA>$o$=<_qBLu!9b|{)=EM>M%n*#M|EiblzcOHZ{r%02gOP z(kQ2%o=_dof-W(@^;&vE@F#Q?jz={)4y)STT)#0_dHrTn+&VpOIAzy1aDLAuF+>Rh z{bLOZ;A*roG=)&zQl{gT_G5%Vu0aIyVyhLgLS#kRTc&zI|1I_%=d6 z^U-KS-0!I5si%2OYmQU+fFFWY(!k zWNCqO^;W29XE$jKcqijCtG=_)DI?c(K<@<#C#cBmxik+VV1`BpnX2f6{MzmIsB%-r ze+kQnL=wIgBLS`!fi`v!1H8VEmsBUPCQ*i|49V1C)EFt$*IBAy(y)?e0<$a}Y*9;} z{D9Zg2T>%V-G*{R*8c#&iN(X>b(#i3K9d)L%*^x!Eng`a)D5TwgYk1iD*pCPr$V$X#wR{BN1BMfY-qUjBFtqc`M1RKYA^2 zsQ;BFO9K&hll+lqHsB*Nm`o3l!Ad$@?VJPL1|yZ^5DC6b-_Jx5nG1DHOiXHkdld_* z#{~f4As`?y0g&!?bJBHL9;UG7mGVbTFFwhq(k<*UWJ{*r3&f~Tx+Qv_Y{`M{P4Krh zUSOiK11I}pL0k-Ja@jpYWF7i{*t44bkN#GZ*BWS*VT~RM<>>GN?EmtF%vxiiwe1%^EVOV125J&hfU7 zFFiGv(D0Cvk+JENIZht!oBjIER&aSMrzP>%shfRL|1qmr*V3Uv7q5-_k0h0@^`OJMqy)EE0O(}h+ca)Fr*8pholW&>;YFbMU=|QQSVK*57b+?XC4Y&3&f{9GG3ihdvRw(S&(-5(&wvGrCum1Q zFb{CBK_TTm#yoiD!1JfxGrX030EQQjo3Hg@3_!0C%;uWxdxJs6C@=%DZm68ce#-9< z6>QK0$d;2`>tz2ME}HWpn2+ks9qIe}% zZYV{%(L~Mn44$V~5%NLxSa6Ffm1}s3dxp;x$h@U6sfI86+2w+4do3gOBpM8*N|Tp_COd&f1)?h7F%=ZhyNh@4yy%N^yVB#a zq_3>FR(fUa5~xeTQ3hn?3DOcC#tU=5kVE%EMd=c)Tgq(wIxyIELYHrA)m2sRhzy(8 zs;*dkU#moSMoW_BWO+AwLG5nPYwBsVB347Q$!vQT4A$qa93`4Mth7iR6x&qgYJ7$v zJ!$ygWIRCvI4gCs*?wn}%66!uVYkVkOEnM(UPYGk*`~HX#Ss==G9SehYI=BeXR%_^ zyGBnxWT2<>>l&EOa=oPXWKXx;YWZMH&K(_y3!cVtq-XsXxjl{ynDf#i=XQ5lxZU!d z9mpD${?hm)CdlycNul0)75*LOi!}pDNz|aamO%5aM^PkbU7&)mff3%(a;JdisnP{= z-%Z~OgxwN-DuGSr{3+OV{)ECet@Wz?2?R$k%|>3FLfu%O%d@%~jiNv*S#$-y`E<7f zj0CX6k!9_NQKb(!Dm_u-{)Ge=DImozF=qm3M3`WB)IdtuX=&=2Q<9+wii(u1(0I2W ztE^A(P>%FUd@X+$SD!QyHsQQkluZUefpHU7uk9yZd#KuW?B=1Sec16{$NGN0VF zDwX?T$d($HD9K|tCAmD1qy0kzH3Woev?=eIWXJeo`SQ&p1_e)t9fns!Lt)827#%07 z*@N_%;3Tqy^yV0w{?|!4sDydMhkSZAo@Jo{q}oRr+?a#(MR&195#(r0nQ9A5-L-77 zEQTkw92iiHz^6b9BaqBu?E*I5L%dju+%dUb&4gi+~qt4~gZukqb6zW&2em_T)Mu7AhiiGw?)^zuRmX#z6WpVN|9_1j4E z9P^#DJyL&vn-{#9Qz`LS4ZxR2HqJ3O+7b{}<8Q{}Fnjgsyi#RpL)9X>5O{dXwY@TPj~gB^6x%<{l#M8a6!h2BhGJ7tbw>0Y>Y}r*P9A z#~@c+WrJS1NwjQP!$|;lr$4Dr@Y6aDeO78o2xlxnYP8Hl=TKBnT#51-?KIp}1efvn zX_^vZ6AYy^h~v18WmBt`-rUS$?t7@nm`%jsfF!Yz17wI5I3sj0;y0v)9(Np%ZWqWZ z6J0P2h1(z5EtHKpvW7KR$oTWH#VW-Ma2!!La1QzKzCaAo$~Smc03!{wSdCIw31)OM>sb53F;$rAUzwH9kB3%wvdnP;<12iWo5%$jOjR`^UBC^E9i#u!5W?4<5B0VGKx4TrF21y=_7W65z zQ7rKz?8#H(E#Dv!F1#=(UyDZjBtJ3>_6aRxM!TGj0)`J; zm2VraGyUAp_am<}yyIoJe{>XiXR_wv{_eV)*!4Tem4Ji8U;MYz_E1B}UT;!~$(nj( zZj)wSRv)xh z$eYG-vdRbp)u-;zpouL*hxWq)Q{&uQC(+x>ts_vAGkkd z^I}oAq1p0H{8ZIwpXD1`$D-?(YwKTE;(rHG5!H9JT%{X1w(%r;DQoc-%|sgV6<-~* zCbRq~KH}&K)QDe8T2brjv{3EJQ;9J2*rg(+mPuvo1L1Pfn+1vfy!V7l{1Z*tqDgan zbiw{T_bY+Q9+^W5VsavA&gjt~y7x1?{L<*pi=Ki*VFwR5-IaSC>*FouQ}GrSGGyjs z+^*Q~^ahVILdm_IB7iVB?m~Il9z5kP9GxXtsABte_KHQftZm5d>xoWft6F8RPrZ-) z<0a*5#keqgIg&FnV+q5R04Y2b9<~9WzA@N(m+|0 z%CV!VU&jG@4Ws4ZZg}6r5O31GzIP{@wn62>$1Uf9l)Q^pyPoqJ3A`gaE3`Z=s-N#A zs~2(Adyi^amuT<3aJaIy9hdGknqw)y7B@E_B%wZ(u5T!{h7<8oDxf`#UMh7+X?@l( zamU|5d3%)~;iB9gbN*3&1je&-5&{f*nE+D2$z2YCC1IfRhVZ!O1$u{b9|LGdd3lRb zS)9rla$n=1>g{AbzDOOav6&X1SCJ4#h6e*EVFbL?Pxud9QhXPnMAT8N7a%N{3BZYaIO%@d;f!*> zKTzrj=V=;LJ^E8Qia5Sf(klU#<8?zf`lFuv9Rxg9_)B9ZM>IFQKAnQgxzf~SMr=-! zuL_%OF~#VUCVGD?w$}L%R<`X2GBa_v4+3Xz_Pq|?Rh#nW3RLuRf|6|w|9(L1D!D>n zwm}{n&sJd_S&3tHZAtAtm6JzBQ8JksfvUojhX8WFn*43joU}WfiK`A3QG2r8QL{Kv z&iirlCKbR3P|JNSHe}oX0PT`sJ_X)Jt=)J3thlyA0%d2Cbb)w11$v&O>@qYYo9h*m zx>}-k&lE=rLP|o#b%UOVZT)au@_ryW1*~Vhqr*-}+0Pr_o5!-tOy@~{SRV%p9NtPA>6CH0c94oC?j|Ge8 z7TLh{OEGP0yMwk*{{W@oITo*q^{@VLl5PWoh!N!*;Lv(xPC>o!vOw-ju-G}i^q3jk zjQ@0_SK#brn-L((iQy~4g<>Z?^s-85R1qL!<_08f&x;qudu!bT?F|HYv+{qj4Rz{1 zW>w4ba0gI1$DxUvlw`vL9?wDmcju9A5PJVP}swQ(nSdabu?j?b6M(QD+{0`ZLNeD&Y(zU#$AQ!rHlT>X1Ulh2Qo*sk? z6L(xo$X|zl6ZG-}SULCeT-O-Uc?MEAUA#L;m-vv!c0f z-Ec8yAEi;j1asQcTY}dd!NuaO;NrDs`f^X7;w{~b0OL)gRM3yejk~Yrm9Ogjrz80V zz{ChE+6=L?_lL29*L`4kX8?)L9O!3JKpB_^(`)*;V>n>qIkcdr_VAHy<1D{o`C1f7 zUra}JqUp5`VcqHMtfUCgY10`D4+kAiv53cCUyoelaoc<(1g-}8Z~g^B^wlEu&gg?j z0-AV`*T$6W%mGOx02Fn&M7m8C}R%tf+%AJlx5$D@O~%VfPA8G zJe19*?P|loVHp*x^=$@rw_g#zG@BHsBo4N(OcCacL z{l06%lYIENPPF`4a9$u71Z@RXu_aLR5hgI6L)WS-IANmq=}5s#7U1`1FTjF52F{sO ztE-pu+|1&DnGH!7fjU#N5gS0d{VAaG(Q`ty%3c-Qh-4R^axYndcBg8s@)uG-BPMNmve3<;&-S28=FBj}8p6$UiJuoCy!MY_spQ z?09^|Vy!AQBI7MVI4iqg$5@A8dyw;3giYQ|D;NXIka>3vSz&Byg9o`CP?Z}PPXkCl z5_o>9e>Zwwr~m;k(WYb01mk7sUe_|s_I}T>tlRL~ z=~qr%X||?JoEBHKFq4d${(ZPe2fGP6pGSw}aOo(uQ)F&zcWgeVOwZIqN17=cLF3eY z7n;D34bk|*I+UY{bGY5X6+H0hGY8`}p1-|09>j9uJK@YwB=eQjg8G=aysw}qd1#wL zbbX_sg(sa=9tS#t=FYM^DxzD`Rp4?7Ufqk|>*!PHtcOVCE?>JHy>wL;VxIbkX8BIjdmK5$cNE@K`JQ zMbWIFg$IfeWiUQsdP^~DdZi~Vz@FyA8Q*}oB;lWc@LsG4tz0?tp(DV7M_A89o#~P! zh!t2&!MV^#A!uXIC%XPZ8W*4h#MVpuOg5*Q)Y4?#C;5;Cuk7$OYZZ=e8X@C?SjO8&-MBG&^=4&J@X=x8NyMZ2T6kOEXpLu6pVXaQut zMR9ZC>=9lHsJgeRO9dhW4)8jG6~6#N74M1akxQLq`2dcWSdCY5&%*#H%>a<}_W&qp z2lsb|Z+lxU`M`+8P+_9`O^pGX`U6wU3gD<_IslBJ z866_U?@`HkOP9vLwEUBv$!5MM&3Ma`4L(lu4z)hv z;lLOfNTTV(C}F}cq~#(I%rxA5E^EX8nJ)pV0*ADy*II+>Op6?VXc3pXSxF%+VgTew zGu)Db^Wvb!_>MS8(acCsFo&+|bd(1W-e$8M!T->X03VjD4pum)oe(Jd9G0*Qlsw5p zE?oYlDV|A4BQ${t&Iy#0-v%e7guu0-w+>Ct%mWgaUkXS`@Z(xOah)wY2M!h>^IO#+ zeet^kG}`|50U3;v`L^ZYttya6ab%!Yhg87;nt2-wlnn1O1zliCbZQeX0@$THxfQ98x)sFC6-9)KJlD|FJD%h$}so{pY<&2Po^j z-p>j-I!PEH6bF9m9{elL*@1qL4BW~PJ(%+E1*c;b#s4*pA`e;69LEa5nI2X_Y*0Lk zf&?`s0shw#0dXs*9{_ZfQ1Og`=3sIdOj&mSmvB-58*$nVG_IWsM;>e=V3QRRl_WT* zi44MNAZ{h}qY-gw1=IDmtV(Bj1nGZ-!UOgS_Xw>%7m5I+kdXqWkU|y$cl_kSYb!jH zu*Odh(o8@bC7KlStE`-y<^LW}dI5#Zn@OojXABO5a+|6>ta)g~H$bUBQcJ>*allDh z^XI^ZIB*-lMNz#ecQ5=GBW^;Bm|s;RY&wQAf zZ~EijbNAV?_KN3u7UEjt!(51R1JdOD_Y@(6+f{SavY!ka1H%BS4FStv`wDic2z-u^ zNKsi4eY-Yu!sP0@c&;N41n6Ms4;;!+Hw*yyh4(1jYghS`5u?vYpW<5bUqBW<5zL>+ z8qwBE^oG|(&{<_zw-6I945`2KV^GJVSJKvI1o__l3;+dTwSl)}`2YDL*4USTjs@xq zrINo8kS*v0yZ(>u9g6I$79D6@iL9zEgc-Ckn+)XezXlKFO^7K&$uaKxUJ8Sh{EG4m z@DgVl$!$hyLe8RJ9hnk*TI5aEuh=6I!znLaq=ruC){5BOKNRpMyR+ESzMk`g2Ynz- z1vXRBno;ZoeSw(e29W=ty&?Sd1ROREu}yf*Q}!U3l|zLFqVzt#dO#w$7xHcS2){ia z8!P~}r8=?|y^WFIK3Op2hfiatBi<^ff+Z46P^MwZiDAOf z4p?MK;wN7XzH_67Q7&tgdbag`485kGm^w%5HU zW*COWgv6|u(S9uq<0cIFCgU_>R-4(79gznH&R9}OcbwqPbr+cJ0UWbCC#yqg2lFC2 zt3rmPjZa~aQo|>yieiy76pD*{bw`p?`Ze{4D5cxUfZc+Z{tnFki#&jhaEN5W>g(Q_ zzz)4Z0PkA12S3$&-poSbWQG$fy+zT+4#xEdv=jVZ-?@t%5@1gt(YVDRkYt}h&l@HZNa5FrT?ZuHkITrrc45=La}>b&e)n`w@oV95!z?BKzh+n- z1HqC44p5=)_{imNG9Ogp(VmvBE;Hh30boT6@w5PDid@?WEFwN**JGo*TLqyXGHaJg zsZs6$hJ-oFdZMm&6H>t171k^VtMwj{0md|cd*M!Eu!TYr&exDs(U)LD6$1$CnD$2JscDC*Md9(kB{&iYxg-=%RlISON1f) zQ&0Acs{(pKc1O+C@X)im9^U|^H*i<<2~b;%^Kbkk6!@e!Z&2Pc2ca!ALP*D^c19~o zor4U3e1KyfU{ARES8TZprqgdiKnw*NqJ|P#_Adld>z`2^Y_Bopnr>i7<6al-`peFe zV?cMNXt?><_+&ntTpk!g&|An&1}OH$VU5g72qyHVeEf9Iop8+uN#09KA3B4Bi==g% z*#25VOE@H-e=83|6$~>Ru^l)fl;_j$<=AV#3OrI@|JM9^=psk9ff_C0-cu*> zX`kgV)^hp@`bW!9VxvajPHt;%;ZRnW0<|{-}BGYHKR8$DfJwB>gjxd%y zH>JC`{ILoY7}&tE=LHXPNB7fT&Ko5yVkai0;^FXPCLmM5B1(C_+#sj+7;Uu)j}L;# zPNrb@{2aH0H2`pjpwylb#4?%{MI@Q|iqT?3;X*=1sCSh;%YjK)U6) zE`vec_Y(QX5`edv-d`+?IfKhX`Aoe(+5_-px3EyZrHpg#s7#~=CuQ7X6W4syRx2KJ zS@m0{d{{YaOv$$i*`dSLbjwFG4txR0Z` zPs+)_g&k?I4bY+<5P2Q3l?Uipb45QeyFN8%sT%@@Xsv)RO8D@m>rw z0zpC8J$$LOBhr#TbX}IqQYWrUNBiULl|^py>-zI8#aOaWRjv2sW!NpumMl);Wjuaj z^in$94xroOLMACjmo?BodI}Q*K^;E5o%R_CW5kumQx%R)9#d006CI ztZ|VTozsCVVF}1r==6NUx_0%~Q|~!!vH=FRt^N+j@$jT$SNfbBNM886C%inl3h=Qn zf}t&X5$*t%)$1m5#C%{Tmo*qEPMur@r-IY`(MJql1_QXe(Z9aDZqrBzyYvd8#=eQ9 z8*vh?F7Hz98tsPG4nL?AJADYV0(F`hfab=4xw|^x5~roCOckSR>xaP7EV^Nkdf&SNPS*- zHatUu9@$scacG>5+!moWm=MF>1o#*`S9P|kX%5$mxq6{G>(S3{CVTOHC5GJwG!wI$ zmK^=il8jpDE?wKCwyGt|BKMXXpgx4d*#3 zC#X{d;``&RF!pr_OjLGkeAlZs8u@y@3okao(%{9fSGJHNjn!}p-x+sVZGo&)Q2FMW z%|4G~EQV}@iOTWC@7Ke^XsZSHf0%8<)esGg>5j@^xf63D{2!khn+3Vb$peH5wPSkj zq~N2y0KsRteJ3|+4b@v!>G@vm%=8{}bqW)G)8X-mn&usza@36ehbMufO`X_ti^%l8! z{j;*^!Bw_ETehoSw7u3@&jcya-lFTztGS4)1sP(CLtq%$!tsKX16OUBrv>v4#@1@8H(b^0#yb46X%}BsE72U^LtLs1Pc;|ha-S2Xj zM%kgA2_1iPK>iA@s8|d|Bzs+ zT~+q_8kuXrA?MFW@lZP|iXV0EwR^yq$W8KV*ZE%1Ya+x(1%WL!1}3HtL=R@Zda2BK z1b>(|WlD1$QY_gjf7UoD5qOmfIsIqL-4qOfqWX||tqV2D)Ed1?sVYtG` z7T7(V@5G=q(wg)6PS+FuDEuHF(+f5Wv z-K0G%-xhlxHc@!g&9F8!OIo| z6?FcckpE|&ZW_l+mbF&Rq%+yd+$JdOf$XpG1I_j4QOaD7!IlYsJTrD_LikmztLAUY zx>u1RdK-Wi9WT-8>*)v$r*YOoJ__d0@?#_8Acys+X0pr5M_u&*bWRA)z-^x|OLx z+VFK5u=m@b^i?I99P#QRYmL$sX3d8~WxuT)WklMMil~WY3Xh0PeVM=ZJ}MHz^XU=` zbUFyG!>8a$ekI^?K>V$rX!9Wp4sbMkKV_&C+K;1Ed3=(LqWme~1;@Qj{ehnGU0vJF zyDD4h6EGb~#cu1g3`yIsGRUl7A)*T{=jjt47VGp{8|1iuv>#?^^VRAY-0H$AnL4{@ z76X@$Ouaj*M3>y(2YwUGK_wD5CR3raV3o)Pl&g$MIAC@c3Kb`ZM8)Xuirx#+p9o7} z)Z-X+MW?T$Udg}uur)koF;FkXmG)k;OTS7G0^SeiTev0bo=Xb%K?GEPw73dwzZa_* z|7=03!;#0khSOeZ{%rCxgLZeVhX9Y;k*N`ZGe8tcE3XFpQsRt=}>zMTa|g*jxPQc;Rl z>V^7aQM_dzKus+tJNr#J=MqYD1USC9Ily*ZbkG0ZO9J`ZF2j8I_4lp>i%r8GWfV_5 z93tdUgLX?9jG(^)^ItFdO`_AD-P2R7D!<9@h-;=;fZk^Kz*Cq=<@yn*GTYk8($rl@NmmiiF z4$O*JrOfLOSXI~7^_#mrp0zAYZW}8PLl=o4*@Oj~PZ4{reGoW^X(AS!*?2W*Kh2M`x8Zgg)K|wG=t|NEgh$Q5J{XKTTz&$fd zYfgR)SC_cYt6~&L^eppu=HW5&9e?nrjW4BmqLXytA=#h)*W;Qh_YRgTQl{K%EQi?k zK7$S52+gtD z%B9GAk%k(~d02+1W_NOaAY*~PO6WwH!}MEf^8ags3HjJBKarqe!z(D1o3&I99x z5253-@S+=Uhu8MwH5ep#irCUG3cPZaGa6WzhLj;TN zN?5n;)Lk0=x9lszb<6S2)sGF6^=E@2UX25gW&0aZlhbFL96B&z?oa$0`o3 zcg7-bi7p(GNj;e0llJrW8h&~i`-l6riz-bEQ)(}N{QNlIZY5xn6!=i)lJgVPrY_+{ zqi+id3x##G?^tEOX2lkfro=2v0118U{%$ua(I1~t&>c$W(Wy!P=KGI*F5vq~7aA)_ zc+j0I?S1c0>RezSPI&fA**lt+#0#5%{T@b9i{=Z$(1_XlD*Vh5^eF8G`g1IO@5e{3 zBYuu}dTHXV%tZx#Oi@+Fn(&~<7;pTki|d(&=$P^@QqPXYJ8DXr8)f`yz2JB4Afd1( zzodX)$G~v&Dud;M+@GC%LN4F(aW0Kk8rGFvy7@ARLX-%BYz;qNE?$khiE4tw0AJRQ zrA5!;^YUR@B&7V~4Ve)VqGAZt#V8b}CMwlw6@1dj4(|$N^Wp?A=y>iwO^HoBw|9Ah zENIA{Y6P+sDU?vz{QP za@}jnj*NV{$?tJ}4+*bbZ~hHazZUb*3EJ>ntCL4=Azhv%6|PDj^Q;xgxc6Lp14}H#{Uk`C_JU89Ztz4*e6$^}`{TB}I5}FcUCI zxYKOp{`rW=eYZzs?Ne$$iZ0`xd{@;=>z@JSP~0Qt*mM{5IrKwmzG9_U^^EgY;@g)H zSE-c%P&16yORJ45==U>rflPCb5LJ;c3Os5egDq+fH$Q#LX~&e2uG#YQO%(haCB8Wd;QU4z&bGyr06*N7BCD)Lf8eGz%3z z6JN}m!1*>^gq&Vgfk6T1%G?k<1jw?F*09q zrFUGMM9(O(q^Sj)5V5)nq!zaOzLs{g&BvSa6iL+7+g$YtKa+<8@q{goe+cI#ga|eT zJSY45N0`K&VYOV?9|{bk-~%G9`o5!>-b5@EvK%KvV!8Aq*97S z-f!?;n>9JGa@XXoGnaZ$>Vv)e@Cyl)Wy3w@#igs}DP6*Fzt@iQ$*-X_4X*ug&qsBd z`;#1Qz4&o(Aq%B>P?((d7&RC)n7XViBGuWnPw0D-o0WR*2tHoO>n!N7-;Q5KG7+Nw z6h>dK5MHi)P`oW9?_8f&_mp~k?#P+n47KSl9+5y|Vds%_Mb_q`hmP#(u?V5Wq>jeodzY;=`8BP&-D>{sf%0)XxBdkcx7`etQiFY( zvJ&M_ECc#1#0f_S1<96?^O=|GO+$0UO}=fVLPi#A4^aVti8Yk@maFc_-@=K79`v?QnO(or08e6iMf5rmQS} z${w4aYPVuLYu=Z6*^5?#1P8ui#jUue|XD25GztyHp!@?4V^2b{jMTPp0 zCZkw4uf3gBVC`T4|9jRU>-1_Y#pZ($4*|Rv_PE&ELcykCD-*>}(o#B`^tTOJ3xnbo znw-D$5mQW}3c-(8)xS;>$3rpuMId=!Y?Y~IscFrf3$?6q)h{JlN|Mdozmq*`o^}-= z%n{I2M$=(yvDvR=^><4-*2;%oM~JVxK_e3hxA;odLBS! zk_0&oC~HDx^%m5cx>SXeFP~K4#u?1eaD!8XFDCuy9Xvb*q86I!mnkkwsSI_tSplAn z9wLelvCQ*Mm1+W0%(Cw9*~^9aMPy7n%hjeJ$Kt0jX08<`(OBGcquQmlZqOPs08&AD zl1*3KHmWz0DvV|q3$+%WgYZBlnY7{ESQFnHXX3$WB|^#1cwp1zC_N@e?AtGhu?gKN zdPE^-c?w;5HTTzX+ql0gPZw;15* zYp(aSzgWo0nugi>u%t2b@<=7azys!#t@QzOOu>|OG10_(y?bcH$lk{eDfL^w##slF zQ(ciM`iKC`b&euIwMCdxreWpR6|a}L7+&8AVI}!#O{01H(fQv70WY^Jnc#qw*}I0(-p_#E4_KFoHpeb z2&=i9*D3BD9eFS9|8$ehL3Ht+ckniauf_zby$XjUk>qCViMofdI|0BOSUq~6?%9xq zUN=SzKgH`3Wuo_C-Sz}xiO6<2Fk#_NVO}FpdL~wA&=RiSX4m^>!lxD_yuwbOIVa`h z^f80Cukb3BxYdaE`iCUt{dVS|qh>MLVnkkt#K5&He3fHrGJrK2S}kci6gn@grl&i? zi7sbn>@SF58Az!2TZg2hEXdfD=w+0)$%@}vTQtinC2zw$$z39FpGf6mDrAAbqf5dZ zAWm}^YBQ1O+QvH@m|uu-49>s2+u#DhN!yb`EunqA-r_Vh3J2*9z2dFlk@Ul+jA-W< z0v4aGIpjr_hgHDS%6O}7FMGWtu6Sy`rR2%NY&10#&3CEhCi6CjrxXNs z$FUn77=~O8P<eAJ}0Jz}iH*8GH zlX8@fZa#WAUe^e+<*s!-d4QkUTk^$mK+mxSGFaBSdMN)qYE`Q;_V;Z@Ci6=+Mh9Q; zNCU0{IkX_#aFpuXcMv~PSDR+b$jwTkqLbWYD`+*-M@k)5he=R_iO5KN8+1e3(sN$$ zjyt(n_zSW_1Xe;>1Fq)hMICJfhWN&FteqD_+A~o~{fzX973hvaAQbtO-uEbcRWHT&){;rZxb{5)^WWARaY-HgclI>n_e!K5~|wu&-9eom@ZTJ@Gvsn zTFQaK!Ku^rh%XjEpN$1|odIOepY;|F9~kp)H1&erbn*UMhKqBzPC)P_Vn4Iff=gH! zfptyh@u^oyS@c-|>6Pz?}7kE}5pvaWsJm86~6)`6lAxzXwL5^_8b^kx?hJ|FvDwV3gh^fkXSkmPu_4q*Pq zD?7U^gL&`O?zt2ZEysK$xVT>sbUh-4=sNEsnXE)x|JoOihCe+ThbdV^5R z`EuOOQ9GR(sG}|u;O;+>0e~oR(iIjthB&FuwU1)?<GgKPgY2{iQN9t(yK;n{&h}F@JDt zO|oQamGwrwkIAl|gGCJH{LnI1Dtk{Ytho2Nifu_x9znw@DpE53;WL3&~-jczF@vP89Sjpb@ zq#6h<%s^p1666g=`Wfb!YE;3 zm^T^c7R3w@^nzzNgzSRQi8>WJ)D z^U}}7-Q5aqF=BmmOYDcAf_aM@8 zd#;OyvA0+1zN>W0(|B@qt}%_8kDsGmWRi-C~iF%mx+ zka9ALQ~+~u%;Fyz)3j=R=#tX><~niyu;kU*`|-TbGv@?S8J?w>rHUe)7O#f@YCB(k zoGk$U0U^4)h~q^|iEZmO=>-d{s%CVC@EmGPW!}|(?D6o z1^F!vGOw>Y@6)_NUJU`hVXUa-B4ujI2FxXikm)=g!sj!X(`(!}MFrnIIZ3>9C)q%Qq)-xyN7JA9vl5M-Wbnt@Dy|AL9vXrE+@_C#XFeYxkcf!`4#V6k zh&HNlA&9E^;j+9r$wBBln7HolK8E;T>p(_8bJmmG^YZhVGjbQ~9z%z>qH7L=kLnUt zm@_1d9?|rmQm-Ves!kZ677&N2#cu|!*6{p?nSen&h0$_;aLkSd@Mn$M*p@*Yf)ipwz5j=B6#_#5OBBkNXtIaEUjYvq$sZY4_e zNNzqPl&ZWkV;}Q&O_1(kl}&FhTYEI(&LS(Gj=;>tl`IkR9B{Fgtjh_2j&*$mKXlHu zdtc(AEA=g6MW6Z;ixq(xtIrf+SB!Jo=KNp~wa7`$64^J1YD&4EDw+lNx0qAtS#-gl z_To>56UQMHa|x4BKl~jaNQ;egg8Wi|r@1o=OM**-rIV*EW$2C5J+*1tKV#)XXmk+7@Sn?o))aE`se%VN z$$FA6*Vqf-bpB7^pcg;YLvCl#-b7>Y`QHh%Q{gjXWV%RNo$j$b+ZKX`ehMR6kAHh` zk}xr)PL~-Zao-I>)fm}}>y0Q5B4mja&Vwc*ruOrND;Hiv2Q|45QABu5$b1361w)#0 zOdg?UF5_Zd#|W3Am;c#Qh)bg9UY9C(Lj4S`1hwx z@-ny#MI{ig?Iv~T%kNzrO^zp(_SI3bSI8h=274w6f5sZVKWjloB#LpZp306<9+9a7 zMw_&LbxI?wt2-KfItGe@lT-@?8Ydeoe>_tUIh2R@J-L6NrY>4wc-g_JNS6&i#kuM_6$fe-FSI)6e|)o_GS)<$aEI7TGfZ2m~?AA zk~kFM0k3~<=KzAtpL@t{z8EMx>DZ}U|Mnzs7TICkLeq6UkniodAMIS{eCbKZvyh#} zOo=bWEY*w{SRgioXy?<*Q7=KX6AgIh+6cYo(AiU`d-9l%+ZWIj3v8j&JzJ7G?X?EG zZi*Mbh&0fKKE)$FL6uS-BX?s;r}aJzhwA{>(*)z+Ox6-e0sYN_ZJhkVJw$BVF?melzqFn~EhpWxGml0rM+NsRs|ACp2_9v7!wLFG(>1w1A9>bi(^Mmm0vV z6L^hR1tM(Ie>f&UFwlD#MPi8N7Z`<{YI?SB>U+9j!&+ADw^`(KIIOaAKTS4=)pBG!Qk}6zTzUw z69a$H#V~Tj=AkSCx$P;bFp!XK0j~Q>19dgnG-P(d;rFFGUYFrxUpSpS@i(K0k{8VK zw5#StxDvUBmAxQC-nQVkXx|Ru`d4B$ub>x)`XRR*xz|^w&x2l`aSb4jM}NHAB*e@F zijYj8)>ZlsW$7v8t=vOaLwM*~-SL~RgHG+s*^1GgRw7#uc7h{Bsz6Z#xoE z25$H-B|HXmX8 zd?*%#oJ?BqLBj)a7mbzYwO zq(FwKJb-A-kwz*E_&p1_zTY?6!aQ2_hi0Wb!1U$UgkBGBYU`|N;gDYY8ma<1*0aFueHd{dqjW3c`X>qnz25qewr3--)!fPO=g*y?p@u?qV~K^ic2#KKIZkk>d}d zw}93#!Qsg2iH5d2ve0H~1~8wIU>LZQ^v+Z-LklR1>n!1ep1^*V*l?`iK&JUlL5rkE5zd6oT;btL1srP zMbK>;wzPudPJ^Wl3D3;8n1;$wf6=!p7})QksBO!<1P9#Ursovs< zx4I`V`&HE+ka*n|OfHqd&@Rh6P>?bw(JHH=!J-&I-#3OpL^bB|E*Po8Uo!z zlE#C1N3|!0-R3i5rlD7cq7G4Uo&$)<89~6@_4-1!)#uPc8Vfrjzq->&vFVU)BxXQ)LqWtPb?fCZU z#6-?{gB8giy)oABRPdXzkwgDg$+VJ*k#QQtOalcc=H*W5XCT)?3w_EpH-hg>34?GT zyyd~V3?8Lz!)|(W{*FJb{p``iF)D?BZ#p7Xb=`;&&DMtoCIMZJtA8sE#FMW~iOskm?0>Vim&oNTmic;|7GW+pX zrrd}Vd=OkOpyx0;A1~i`WBZAb!s8y z32TyM&8aFaNtyQ|LzOuKuG2%f?87IHvbMl`>O}V6*IVHp_Y|@^tAPBN*5hK-7PW7( z53inBFsX7|GK=0Bgdc(?I;DoB$jEPa9Ih$j0+&g~u<)m)a|>O})hw3Ovt!YH!fi)fGKi-ld_0-YcpMCxF}%NwjUjT!)q6{Y|=8>goLJ6CHXw88xZ=EadwlJBSc8 z06cq!S+U6%v(t5~KYr8fL((a!7#A4qG01fLrnV480=KN@XwKXfI?lZQsPNR>)E?%g zTVsp~6@?K=VH0-Y3JIC2>0z)8o9CG|izuBBZ|hdxDM(sWv!U(F>y1wR{Fou{R>bl> z?zYV4t#bZ#$J`v?A;6ggick2$?Yg1caXjIxJ5TGL5fXa|pM%@@_qE&$P0sa|;O9jK z5lJQ+TRgC#RF}1oNM5~=QR*6FIZ~|5Wu>@!FLIwhtqe0{X!LP7vUU7x=$oep-e z6xtBAw+~=hRx^YWRttL(NmXaQmRXd*lHPDWkoFmNp|>W~>wnmuizC!rQZ~_oSN}aS zBhQq)BA_#0In5W*iAa4u_?s~*DyjuULTG_^_AEm^TSG-woBJrLG=D-leyLAmq2KbR zIyGp|4cE98n>i2WY zNq3Rz4%&;M79&>$!si}EQ{msnXi1(xTGdFDlZ~Q)I0n6;FXKGf{?2$imwGw=P+OZ5 z_9?DPp~i*)_im?(`H+HEnQ{tL$tTs>)9M>>rr5|iEr{Ie7D3rk_qGQTRSU!9Q`E?j zLqIHiIeUrf-h_3@)_pfT{y)MhDg8NJQto{VFL)oWrcv8m?vtu2FrR5GsBd1PI94<{ z{8PTfD1US=BAAWm@Xr@{6SnvY00)2k9sk4#nt({R4ONSn@;*&TGp_$F5Mgbq{n33< zCT?R2YrQ?(qjg(PM=oKhqZ*n7rBU1)O(O6X;<&>D!U!YL*}_+%dDpGT=WSoP+lU>^ zPksxC&|Vl!jk(sl5StwuXLmm#Y9q$u#&VXB_zmxfjH%#z3W(omlu!g7#4er}p5QJe zV=(QR)PX^RGC_UZ(Sx};@|u=%&VrG;Cm&p^?fk#%t}7VXNv~J9fr^bSDbLflErF}+ zOLg~BiQ5NabX;d?88N*s5&qm`5vb%NeivLN8%@@0!Jwy2LE==mc$qX#Gu%>js;Vpj z^q?}1-RttC*evP*Ns9zLGH^u%Iv{;AJ|o=nME4AJSo~(}rx7Td*U-{<(H079L(UH* z12;!kIp)h(rOQXsKZuHZW)80nFtBOiMkWmWTle7@^Q&3xo5s+2av z_}+BDE7q1$KrzkyX%ae8R^a*dGJOQo%Gfw9D%s=6sjH%0N&_Q-+a?;WG!;AN8`bG) z;?54cR;BgBRuKwE^%}0XQ`$ZB`>nJdRgXY|RBU1if%0u*LMHSkqCq`DS@Juy{FrHM zB!!SPNK;_mA?N+!<4;1iK-41I-t)V8$(tbD}D=Cuhm23)uO^lf4a4`K>Pg@73A zQ0U9^cu_?I8YN+dCtb#uJ`xy1lpZV>kITbI%)vu**zzqO;n6GpGi$!?5jI<0aF6S% zJREGnu8?nwwSs`jg&YWG_+Ke7D(cWe?jaD`4n_DCkkB>W{C(PGC(017>PKZqo*b%j zDS`c%T9*2GWh*)jiVY7t^3R7Yet+HA2e9xr z`lg#Y58Y+qD9Y+!75)-K69bc>BKxQs&Z)X) z$K~&7N%o9-Nh*lg=*twyY3#bmT}0>aq#Poy_j89KMxsaW+ zA=gVoRVBq-_(M7^$|+xdyV*zhD)yX{CA5TS%(OG>*V}ij9j&~5IgXFeQe&E26UcA8 zH_6p4hUp`mTD}sn#DtWXh6|Qc{q#Ug{JOneoI>0#_&!&Y2GYLdgfU!GB?KQPP25%kbziT9p2XI&|r#4!{)V>AUInKpCYCTvwaX zg6GpTZpY7mh&S5DeRqAd&|c2nMMjHgYfJbmZjXgURO@XVFO6Zv5{VBttT~=py3Nch z-4Xu5|1IS$M(?W)r&8&!b|jv0|GhL%dgop8{C6!SHDPgl%(FzavXO~r{n~!pjPqhS zW>uS-I;$vl)+b{*WL0HVEezDMkD(mYr6aV9ljQy^o@I5Wf5!bJpPc><>+Rdy3ivgS ztNI^q%34_DBJzNU@K4W7OSlt|rP`oB^QCr{m}mymol2brHNS$8@rQln1t#<0d=VG0 zLdif3(k}CE@dMvepS1_GRURyx1f<4uuMNx^%{NlWRs4$7f7oKDU{XcX34*m~bGa-sVSGW;|`{ks998wi({da_+;8dG0d4nsPm2z~#9^bG6Xu zZLIa}tK}pGN>lqeNDwJ9`(iD zaR3e`=4kp#7Bqi4tJ_p?5!&{$BaP|IH#S?IyI~02m_MMiX&r@?AG71|iO(dby$tQ> z%pSUcgpKd8v1xzkuO>lSVzyke=x@~b>uoAl^sNTfYF=~qJ}*WpV4GP`T>TiUe$Wh= zx}RsgKe|8fxo@q(w%^bNECKn~3jL3+b^h6W6CycWXBsrSzkr09dxY_GVs;) z7GY=^hmBA9tP?yo2qBFyJ|!B_@4wmuJ!Y2um)Tlm^;mO$V$Y)bn=eW*Noi+!K25Hz>zAV4RicWFZviB2eGjm> z2HyPuKJD-F%$T8MgzgJdb!pMw+}3~7-0>ZiP&Dc4;UA9@tDRL}dE9F}N_M-D^KD@+ zGV7vlOAzv@8lz>wy~k$F&&PFmz`nvZ=3e8>bn^>uECWv*^x2IT*7~w${>#rDrEw*+ z&((jXn|+3&mV)GZFsD9)QsES^U=^WfM9~ZNpk>ghh&+oODn=SRNdW$m*@a3JhMtY+ zV!PRKZ|%&(d8b|8qoKPVsF~5O64wny-(`t?-$gJTw<;#AuMN`*6x0MRZIl|BHdeIl zC^89#QN(x{Jv9T$jw7|+7Yq_3%dskTU~N9)El%fB4)FZDTnqK4=;WG&JXzj?q zO|s>t-K{cpZ=flDdBb?ga>H{Gn1{@q7QKH8`3ec7$WG^dtQ+oKojL#6fY|Z%e0?pG zP~rZlQpk%-vR`l%zF1R24N4UBLOVm@sx#oS9yP;X?AaEb@7^T3@1D;tcr<@n>-hEB z{zlD4$A@daZ2k5|E-*<>u;s(JGsK|J9OZ8e;~ZZf(ah8&nv+ zb7fnk`cl3rLgx0?7lesch$w|8J5VxtFb(I=dOMLxYZ1AcR_hjhdxm-V@5Q|XW| zoov%hOkZ+IT96DiWH@XG`-ba1>Jyz8a(rz$gzbj>j>7^sn=v0z9?nON$wTBtrYq{vvM#sjot;6x${0`sz9ySi+}#^@vXA^8D?NkZ~7~aRSb!?NJ&^Nhd(6rEZ z*JUgjIEJR{RPmEzlfBhm@=FOJ{ZE5Gos%dP zUAwx_c1i?ktdlN$YrsaK&f(%Y1Bxr%`T=wif}8d;(uIDw*OwwojlS%#nIB#BU7-Eq zW8fzpvX1zI!++`5$`dKAhFk|k_3(~=VHOZUVI#(lre*T>ZD zWnF6?QMFiv>|Ye%eWP1HY>2*I*gbqcRk>(-W6PsEJ^F+1`)=W|bAg8uxYFI($~4E8 z1GNw&6wZ8{O^$%>)zL#-Zu@dRW&pT~0zgO9$GV;eAi#$;jkK;*dp>s;Piice1!r&V zdr>^QDR4Nu>+9KlF#@viMic~3C$Y{$o68I`eTF<=UE9NrjP=AnlRj~dZJBy)IO=m!*rvXyy&hPmI#aDb z8!E6?Z!t~syY>0yW5*=jlE*+uC3d^zj~tfEa9HZxWCD=pqN}fJ9fok}co{U=pd~51 zkjte$q0Bxd-rR4bzK1VN;d0Q@DsS1{EZDvU$t4lZKK6tu>M{1AEzYdSF&(OWoRJqB zaN7YYwZ{BKmcktm05FrSkHx-wK07DAvNY7_bo={oLU=eeq+Qqar?;1eRO7jAtjpu4 zHtZ%_k#1zUSv|f3>+04UDNHjcQpbJ$8~aY6^nrLq9&?=al%ba}5PTsR-N7Hd4mkVG z(n+_ur|ur`-f7HMLToyzNv1aG z0h>BLEmSX7-R39O<8*<^GSix#Zu$>5BJJ;8AA3V?_ZBw_GSp#eDTW-Eeuq-}V%3;%v8CSK7bH8jeXWuYHpg)vC zMgoY-=|x3Fy*gxSHZ-O?cg}^%6cD{8(c$M#o?!pD#t(hVx`Y+kGT>g)d%%)z z@yGH4ylt2osNH!NceZ^+i?9jeyj#>l*upx-rZLT$w_*S8K19kcb`Pm$qDz-lR) zG$5^`l)4k~J|Rlv;a9m={FwM}UV4r#avK!U>JQi$m!SX=baz?rIAlCTHb~}|fRaXk z`7(1#@zR;NTHE|z1;aWudiQ4aLdY0p;B>yE751d1y{!Q)ec8PLqTC8Vx!M4vmAjUf zmPuYzT!m@pjV_Ow)~JYh&L=Y3+S+lecXfWj-5Z*ic&+98_swX-as@3X=&6IWv2E$> z+!;^ISILmkvU#>?TTMTF6tr$X)BjVNBO>@!o$ZXwmz%eDq@AE#!2_>!UT<{ zQ-b-|rOZdl3y>XP@4XFAA1rcjYNT&HzZ|4^NTw0^Ou{Sc-u{Q1V_jRB9w9dbEc;BL zo#^;Z-+R4-9|xC>rE(`HAFn>E0VH9Un>lwzR+fv&~&IWDQrsYq9!jMDRhfH$;Z=;mxSHi zS5XswoHVi_nDd}Oy@grwux{rk#d_;>-X;64aYsI*cdR*4oh`GOZ8AqII^2;d`>3=P zzjXm3nGsEr)0l~LZ)Y21qnXcFtJHNeO>`DrEY)g&guM|ciDrx^#fRSyVv;@64Kk#AGUX>s#s}GS zBs-kvs1kn!%2!9XV6g`YY=hjY3@_w&la$04JWpW|z{O7}BIhqkNqQE-Z8{HtujOPu zc`D{Z`z{@L%CaINA~_Ykc@Gp9pkX5;>yL|Wq9c|(3DDV#Kb^|r(1)|Cy|-6wmH)I? zd5TmU;a#OvZi`LdpvygocX{}CdTG-Q>%3MH~5E4Mu&yJaVPZw-=&+e%j1d+#k8Mr3E*WMn6M z^EYONag^7&!iZL>O+!_-0{YXH@& zk~9n-GD!0wEfkl3|NL~a!(1j(B1U?E{sS`lYDHqY^yU3?zon#3cQ5N9>%Lk+~fDp}NKv{J79 ztWu1~fFkhlPpk-lOxODm>fWN5`Acs`JfK5h6uMlI0>wgkC~isS+_JRHF%=X_MajuM zcd9u~d+$PbQiDe}aK$VyFIQEI+S{6DtEZtJUxU2hI)N^)KxdiF{lU<7zlw*cS&MV} z32Pm1{JGy>akky_-l+QMm&>KJW#)iOo4rljGo~Pz(o^2SaV^U>enp`(ZZRq=_KA4z z)rQGu#*b-;mX?%{f7%v1BV1s4^C%80p`P>5Nj1^Rnv)r78OQMB79&kWN3H;$7u}JJi1N9@QR_1g8z!T@(A-l& zS1rCnjvySgF2x#ykKJ|xgLSqU=q&u;NW~(C-R<~3eXc@Hvr6DeC4uXRezm0MCCUJb z1CNt~^BZWF`miZo;Aw9=v3KL>?K^;}OgO2zoq8-#DcF;iYpva}@j+pn(!k0Ru)Nz! z?WH|&!$Y|3I=mdwR%L1<%A5_ePQsIOZY)YMSO({B3#sRA0(djKJ_ny)e6irx>l!yr zYm^4TehfX7Sv}L;_dMBVG|cI_k$GY|mFI?`A<94$p+RTOkZty;^2_%MN7l7=9X>Nf z6>K_4dYlJFr+I_XTAit43g}hd5+2VTS*;Web5SG35B52P8-xUB zpN1upk*S+FqXjpj9#EtaJP8WiZX%-Px{Ua8kIyfLC<;eXW?c5%b#7{Fy9zIEGx)Hy z3G^;WtU8G19L8K(lflVkxAQt^R!R0c*cq{HJaOafa$@*fiaT%Hk`qOb0Am47Hb;5Q z5U||nw8qI*El>0@F;+iH9H@0)c^P-^_J>1RN@eLMt9}p~RecgLA~j zmne$>9Zf0XocFdO(7YrPq7z;i*!|933d1JfMbaK;py)#|0>19QEJ6(ZCD1o3hY*@H zdl@D>81GS^AZ|U%p;Ya+l_5@uQgF zKSgEiyT0FPaEJLMf0ms2Pr!lEjVjX z_#+fSqxtTtT!Gw$xlpZSwR!f$P~65-9lh?}KB8QQu?Gj;$?_B~%T1fdkDQqglz+su z?{=xI&+dC`2y~nD6v(Bt5HmG%cez$Gt9l#v^2!KPuY=NBowS?^Yt~)rD`b z^bVZ#&nlnqeiUJX6J`17!5O|duTH^@wre{U2Nnj5-rmG`gu!KZSUd5+oYn>6 z-gaMlifCr|rtsz3CB-;4Dk86~wf5Q>+7uDI-HG%FC6P*-4`e>|zqtU$WhAvtXgx}K zvpOJ_cu7KAAnV7qtc);6&-~=vpL;*%kY^W8#*|;mZ;zfBu9O$+&t{GtnNv;ID9^8! zVBF0quk~Ud;dW_JnFWR=Iw#Cb)wd@krdl+ev6(;a>|blB9C4lFH2d<#hd!PJibw8{obskmqh&0xw&2RmNi=;wrMu8q0f-{|T;-ZHR2OtK~nm@WlBeV(kdsU(?o z>P@qkT-;EBb+x%V#d2Kb6Xadt1WvX3RDJWN6&E$5u82}p7K%_*?<0_agpasHJ}P*R zqF#uez-6qtI!7ZR6dCN5q4pMJnXl-msjKZ#ykA{Aas>=x%d%6fJkCBk7LJ{le?vTkpA*o1SwPlFVHNz!_4S zl)U!;N)BY!$#v5miIY$pyz+1`_=l5h;%i!kNa-tf^%Yh|%M#<8QBbB-66NnDoS%%p%u3>*Il&5lr%X3C4!JCB%*`S)7b zuCi0)YUNApZz-=_PFj0(i`)HvtS#}ePmn!F$L?IEy80PE*XPL#I=p5o%?lx}L17j- z8rV|2 zVnb(9w>{;+3QoFya{68EqASL^G*gs1a~@rvr$rouZqp*$`~qzA>Ya#)U)vn`Y$KFDa?zE6z#&5rxGOi{cM+!A;TRO6-{fV4!=nRzF-9D2V=#JRu?|G z*S$aPRa~Ts(sjyR_iUf|zQ}(n*G_k4L&4Fp1Y1RdX=pp61&nUf4S2@sI`1oZl6GR% zAUD@PmNvnO`C08QdVt?m0Ir{@3(y@aM23J)#}Byg#^O9s@<^=EcU0lkrR(S1THT%F zC+V5BhTWhF3c{{bV7feO3QVQ)|MQM7z!Q zlPru-Uy+um%$gQ6Jz19c8;r3fQ3B*+`j!_N_?Q(5Xl+7yGl%)Kcn0Sl?%O?maj-?y z4`GuTrPGj&AA+W_r-Z@_G658X-49gHdv+~GXPlmus-_RL+-SATgMMJPnpap6f+|MuF+W}I@wh?F`;xzHlQJhNB^9Et4pnNaJoDu z@1mUFX{xIhuO^peIki9~SkLDDX>??yiB8h7QilYSvx%mkQOkoVlyR(C24NDRO~l0# z;B^KgD5@e$e>5tXX;)N3^JUCGo<6CUwuep5$yey;j*^Ev_7W}8YP$B=gLOkmyau!=0*wW2B@VIW-kATLlmhWOX9i^raZr%$~7lQ**DX zg&cD=pn&wv!4!g@9uE4cDGHh}q&&3J=lO681M*@1ZIt7_-KAEyTa3SiHlkCyabtzq zzAUMv%3YZmUdJ_4sa8zic)D}%TSe3I9-5!I_EKL_8eCo-7z2p0dG(c8aQAVI@OQ~Q zv^WpeXXii-UaMy7J9VKnrmBS0)veZ~p!rsb9&0!Lz}$m@&Qj^~vUzehXl$PB?u;Ci z=qOx6?tgJyqWO{9+eJ=Q*A5wnyV7_@&}Uy z?&Gwt4Le5_+h$Fc*k3=`-!p>TKTh9WdYBw}n4HRW)@)>Q^tj)!wj*_sEUmlO?1PTK zd2Z6G@RNB#$BP@uIh?}@&m^VdxC zTMfj~RCfv<6V}u@ z{fO4XPtS$mxDQUU;u1FAf*gzcUcV5sCA~>~z=Lj*JTA%vs^{t|i@k?XP-c-<&bR=H zL+!}|(a;~sZc4!`kf$=}_qYRM^_7}e7j(GX?Dh#WT0sOwarJ|CRp_zOHG~Pb4K;SV zma3mLNi+OI9P0S-(4&+Z$$C1zrraSgeMTbyuYyb+80pMrl4D!VqU&GpPlEFh39X{H zBQiY+j-oGhYyPZsMOyfwW^-iC*VZfw^TuF-LoxYv>Mhj!7v^eQ`Y*fgHE{~i?TC0K zdd&~A!7YUTT#0)`&VxEjJvC@Lsf||1{-@E>Nx(wjuAM`r)Oyi&Li|_5(ItN?Ek;9q zR`UUKF$e!LrmB;^tW&K86_R?(SZUv&-|kN3)Q)59hSe=U0U{PNWkrum5`G^Uk(-&Y zO#RMWZ#HJG@+mO|L67)?hFYl(aeBzG3WrOL8&8W{&Z1gp(wi@K9+9QGj*BB4gBw{P zft3+1IU9~gG*a4F(^upWb+Ro8ZkR@{rM~}V=OA1=_yjE$6f5g`cAxn`IdAlpRiF9e z?e6flHL`j9ry7E9zq7WGz7xNW21xmn=7^%X1JEH$u;T+9|K$_{SGzSJS3eae@{+ys z!d2rQw;&;lsRX`mj_cwP8$2G(JJ!ga}Q-J(Oh2?`PtUUttG1%UN@46exB4 zHru{t?ZX-(q_|XTfyGuM23*yvYBNBs-Ca5vMyfT;rWiq|nzSwn*}E#hMutmBt7x?4zSlclttmqZsN=ICyfz?z>ALmw_F< z<-8Xjzm(>T(-F^PoDfh#MR;*>bEmzNfw^UAJp&0+7!9HUV})q(Fv4y5%(-8Q`_dN` z;eiVh@ocR4+<74BX{mG>%J!<6xYC`*a35`~} z)d32xd7E}TD#0X@^Mq^_J1z`h7Ikr>Lza?GA({CQ{MB!{0N@n2cb=3@g<8k+W9i8a z_@RjJgUdv77`6vQJ>+MBH(en{KsDl`Ho0RZSX?e4)e#_vpm5aA=Mw+86bs4-?f_oD zd)zE3R43lP69oiO@ml;1s)L2I(&!fr3sUAEP2WgOUETa1ZN(%eCYE&%p>^qadjj!} zApks@c@YycgsWM7V^f#gLl2>|ghddv`wpn0@$k0e-L+l^NhEcHqhp-N71l{;x;Tzt z#CJRj0Xm;II1G8P#kE+25l{kwmQ`?N;_<|=v#-beM!95`>Bsg&ErI+xBm|6i%zLxg zk?xcn{mSN!8|OKiT1-yvvGoyrnt(Lcq1^K}douZajp%|BDMDkC7|LTbu}|Iw%U_thaNO zF)=aZ@}Ir35o_|ZPRKBkKpG^`%xf6R;jZe2$PDee(dCRRqYK_DLD03+S(1-8Qy=73 z_YTT4X|vd13Qb;GjMNNM^LA2qxh4u;xX?mP*b(rosEF5#!Qg;doE!a>=ID`QG{I|g zeQqz+MGe*j6j7+9bE&R(Ge4$dRC+|naYIgxT8X$T($3m~1|EiE{+TO za!&2VL9CmzVI{E3sfvpoKM(OV4KtGYzP6h@6$GU~HSpjd33$fiXEv||;7Osk4!vC< zd_o5)RJ@Ct*sPMQ1y1H&^|R{N@;4q_e9+!Cncr=!OW0`OU4G-9RofZtG9t?go?I%F zQk-WCA7XRdui%*ZvzI>LH(q-p=ySF2hb>hE6RswX%J*|9^kHvfY#f9&9!$ybxul^5 zUZfx;s|0(yseFWS*xWKfMkj2U%-Z)*y|T&t5pzg!Vz)f95*H!Q*Qgf#@Yjh4Dj}5K zmVU8KCUrVQtu`9MW+b>xOn3OI&Up1T*`NBNi3|yz4vA4isLG=)++x;|0iZ6zoBBI> zK8dzGO?UTIHld_C4en*~Hzryy-t?3F=j#sP9z$yRd9)G^*ykj$!pB@onlyR%)k7b? z>@p}NCR?@NIhW$`dw?+1SKzK*cA|&UBIKyRe97vSsgHkU@*VeQycR7(_YUTQ+_&%f zCEj0V3p*8bqnF_Ij}6k_1i2>IJFf7t1e53&%(}xWMz69BMW{^e7>K_xUJThd=WyfK z8)5%Y1U~j{YsnJ(ig@JkisHf-a5Bst)YqV)H4J(sn8bXm|yBY%mV z5pWv0!Itkqb2)ZRcA3S!pGeYo;u<$tIQ$sWUj4uUF(|pD_o8p(SLJRX@&ElvA5%bp z`1^7gVLvCJ<{f<}Xo!`<8x)w;5zEAGc91}iA^2$Sd>&2S%P=ZWod6FDOt@MbV0B@W6#=C{yYSQ zu$cuG4_$qU>{c>ioXqHcWzn^;X=@H9Jc-kzua0WGSUX`I=dJBM@z2T#_$1*Ryjl2yx7OrDZrzmj=Fp!%+f7YT1D9%@Iu9;9JW(9gTwYzIGhFh1^*Zcei5I{oC-O(!tb#moHgi>kh{MYhY|i- zaOctMU+;o=nRH~wVK_@FCehfCN?P+P)Yyp@E;Ul)Aj*Ej@`cdU^L#nB_s8LMt=nWzu#foMo8D7noo^|x=ta7 zudU_{%fAmGY?OpUA=2H?f;?4R5B%1~r^9R1NSmOPTfk;2$Z4S<9Q$W`ocL0HgI9bk ze){?t_qzwbcL>bOU->P-}0T9ssr37hLhJM{&D>TOa1#$+G%x4W+ z#3itw`Dc!EppH8>J1jqr-THEH9kQp_FNU1jgdIN#z482~TmVOm^v(XlL%HK;`Svl0 zlaK7Z>;2Q96vOt_3V^Onc#cmQauBGi=lH7tqEikjH`0aq;mT4Zn}|&c)79XTdwTckOFW%)d<2b+=_sHI~5)t9=XXCP&OUA zaOY}OM6YRc82eK8S=~QwnnOOnE_QxufI$A|&ad`x7Wi4N zFHJe&Q6)Q%vi~^Tqj+$dl)e&ogG@8rxGadhdKc48?E>W46QE>r`7H0lft%39#TXS8 z1qF&-&OGxT4(%e-km;lia6m&5i48Dk%-r0{kjY@Wa6uj%K@La)ghQwx4?3Vp_df;! z6IymHu}!@dIIn1Y;xnIsA?p6I>84FCl?pG3IJGhXxutw&f971CYAz1a{0^ zKjPQtdswvA&Xof#uORDd6|cgGol!Pqi;!P@e~$bsfNqhz zUjg`bk}fs3kpc$gHTSYbX0ZeEFLZUVX;`~bl_Md2_0<+yvjdTm9B@Fef{RDDFGbh21ot<`lcBBN@wxSyTUO2CqK?&bHfuSlHotcqthN@c zP=3-~q$K6-J&2cO86*mc)=ns|m-0ofH=^zUBmFW2cV|lsvUT`6{D+D+4x|N7{lYZ zx*q$P0R&B`KPG@KI0n8NZCBf6j_fOT{4C4hj>k;w$Nkc-x`K&c221qXavXZH(4a2; zn#<1ZJHlYP?Q}NtW}*zKm3W47{I&c6LzO{@I3h#Ew*#LGi z7q_4%1tHx7;mpP6(ji&Pn5Nl)m7__{BiGpn@^3vqK0%-#w$4JyXC8(iAf`9U?n-CA zEuIz~^R1Sr{&$>YgvwxPojt3lu>7||LT{F{a;*gb&e&(M$svD9P%fS zYgOYBsjdNpBSMU7IMo(wjCar?FCV__F@V~-uY)oVl_bi9YsPO3q!x+&s+(zKQHM1A z6UMhQdrYcx)gX{u+G|$+<6r@04crwd&qH6_ks}=iRt@|IR7xVriuYKLGAmY}(+E9Eu}&oN@!5>9WJ~LTQSn68R`q1Gy;qF&JEDjd z#(Z(s&TnuT;5ji-QU+>W{@grp!ds#F(uKE@tw?Z(Jh+ z{No?jD%jg^0$3Y>G{I1t3|*3cElHO{3ZfucNFRNK!xsYGzW$A=xN`YOi%KSVh0{>qQyv8$g$ zkyqg&!@<*3D;dY;Z|~2 zoA=7{)IOJ>l>*;zBjC0hHPKP{E*KZb7>Am<9|h*9dEbOZw&T0L z?`QMiX@nAB`fW+pwGod%W`VKLQOI}y>qCYChDuHCF+hpn6zOr^aX1_Z#TBQ@ajFgI zaA}@s3gw(jmOM9eat{zM5px0#J_KI8goY6udM*V?r5PqAY_l%qC%wXlh*lIzS0KQX zg_EQh+5frQfDvM>nwh~w!Kr661oDPwat7&l;?Le@m|DDz&>kbs+(Db{n~|(*gC2@> z|248raNCwBzWoA)J%DqP0Iur-my<;Ua*9F4DadFKTnARfP33)XCYt`$#S7%`9s%5% zlE?7b{D9H?+h7}8lcuv@n4feq)Xw@$vB=dw|LprH@$!+6K=Z{3m+KXa3)!9y>CSUA zzPnn+6N>s5k9=&qYQe+Fsd#N`;$+9$kL1jI(2%hV`Q&H&MRIsIvJC&UH-~UeLl9wu z-Il?;$n|G@i&aW;1jr^`fJ}vRP6yaz%8^(L*e+rA%<4Cn#wX#z=e@~ZtaQ4m#v_kj ztd~*;R!eQYIyuxWOs%5az{D6D(b>Cdx!2{JhMQY6k{9g+($e`y_$8^bGw>8bc2bq$x* z&AN(nP@uJis<9ko3UPBN7M~($AfU$iq!O655)^JbtV^{#9ul&$&f64U*qk>bi;+Em z8pI?pTJl*iR_jZq|7`aJc#q+!i+Zd_3@#sA>4i2HtCugFyV<`!oSiCTSr8;=leUc~ z7&zAQ$3-Ykmyh7q)`7ePyBF!fDJH@R(74Y6+#_-w^rfx7DID7Vz#i^=m>K$q*`gMn zjX0<|#>NbbCHJN=^*hB+#?zMeJ$+Exvd>z3$)XnGd>prxvlucuNw+#@y`A&txu4ECZ9)sk7T`nuk1c&~G+d+nar@RI#(jn=XfpVB>MnrI)8v`J zdYolSb+g~8L}m3#Gf(DRr=mf;bcEpmXD%)-?guG4#^wX1TCRaRiwJ+_8;x8d)>hH6 z+qY?r<^2Yil?NEFyQw@0-QQhNUff^eW-Kl-d6h3bCf2Ie>B zkVddG__Bf3Y7HAQijO@Xo_j7cO><5t z@`g5wwX>HF1?d5r%d5S=OUyTQS(*C$n_|P3S-8U<1GqD-6XC{4jdRVQ7YifdQz1SG zvynK`B(OkuFke=~wQW~BUpXZyNo&tm+L3sJdij3r+xWiM%~WzNmFOuk*T7AtO~57m zYeC*3OD7%RfHffGI&is3EY>8YS7FHZ0SYI@x^oPQM&us%_7R34p!$)4Y%@jcAVWnP z^aNc1p1mL8Av)O!h9hoDQl%&T-yP+j58FG~Z&e8+xWOw+uH*_0yOY4D!*@#cTXQIL zb-11D?j}ds!AQLPH(xP#?rO0sJUmDWA$j@U8bkM+XSoJ=H{8j0u`%&KE}{=8Oa?#U z_^^!~Hqixe7ws;NV`973NcZo}TmyYxF&)%l z!PHpC*Kap%W2xkOr9Vvxz=arR=f&%d48 zE3gTTIVR}6y*Yg#T>0d4_y`hdL0{@66bhBE5mbZvXw&DFo}R7-&U3m}e!@Zlg7{Xx zIyE&VpDY)i1c7czODF%Vdgso@B0~0zWJumsER_LbP4eWGxxiLfiNPG*>(_mdF4Usx zS&d4^hl55l)WBZ#&Et`Nyi8v~QMF^J>mGz4YBz5(0Y(*c^mH|CZ1Sh`BSJ$NPjlS- zdG0}dgwbg_#QAknyC-^eb+b6SNNF}@N5W4WGX39}_ERMKx}w*B5qGubvYMBAd3iZH zHkNjM!T) z-`>|xyth_4-?}~&TVD=P%b8B-I_h) zKgT^Hcr$7COw)?i?cr{@;dkZ6ojmRi#A`YdBXjX`2JJKDUS(6az(*}{h#kMp?UC)! zwZdD?b-dK_o8IH7o`*oag|TDFj`ob4Ex!s`YqDpNkUep3D%8i*V?p9}EAfWDi=y?= z?H|5qBrCQiVGXsWWK5#6#Gqh*H}zThNUk-^r=O?OC^A8f;b%3;Olq(E+?2fQ<<1mE zYTbG|(nIOGP2E*4cDnV4Mep`F&PvyC&-fdMvM9Q9OKztt=SEiLC-a{xigmOUpME-8 zV3ZuM0-3@51@Z8RWTS1g!Dfc;r6vw>+xMq$Ik$Wv#z2B?@F;rv^ZDT;t|uEJSBWgD zgG)kVw)vC!F-5Ud&sLaO4-_FFZ(S#4CRLJ}wQ4po+L(gQtLX*RW$0lZW?4-3J0E`X~A z0haJxN@!?$|A5?aZwRV{BXY6o(QRta$f4yDLtgIdd-Kd2fp)8I;@{Fyg_vmqj-1)?U+}74hf-&B4@lp#t!awhDy23JoG4Gg zWUU{xS`HMYFZg*BUF7VFToqEvotkGVe2`+@wrGETy5=V{X-Sbqf9D2IQat2j$VFx3 zSStJMbUq-Kfiyn@hj;HpUS(6TLEF73tsh^@(cTlQTkxDWr zeY=k)qbKTCLKD=E>{Tt-_8U7%Ql0JKh32WncIUlRNZ@{f52}& zdfuXAd)g%^Zt=-$Uad=37?Fm-c}&cP*R%w2$(-h|K9wHm zUQr2HCTK%Ma-HLc>wLWOq=|dQ!#$5beVx^I7w|wE$4g8-cvX6!a;}oeLM5(S<(&V` z!_lhk#&VVVVN>IgQuz1I5#6E=$H&(-r=DYf;w`d7e(LjewDP{OysPAvFwOVMt6=9+ zT>o0kuy=s=A;r1${hx6w`P&%w_>+CA(W2M7 zTYtRQE!b6@xy0l;-YV9=6}`;ONS?dq+(rGW#*5iaLpotsHY|j%AUAAoh>2WUy6Q!Y zlUMMrc5?h!;HJzz$EE6OR&m>O$-_(+uD1A0C0wC*E5jjA#gmphM`6sr?NGjMN>se@ zsK+UL_?*jrHHZ6J$%^>1xW`e$)1`xq@8@M|dvk$XsK%UNuWMGOcDPQBskSoPa&^$7 zu5sSD;+%=2tBQ7P!vN6YZs>iXHRYc+a!4GpyaQ%G*0@@=C_BVmpc#JboO*7|-&K4h ztgZdDc|k>o6@Ok*bvW;|8jtk$-3ygI$+8CRQEM_CR|h@BCG=&`=HolSQ_bzQR%#&J)v^; zt%xF}f_LMO_tE{*fx~gi=9AsE?1jeO(_1n+At^gG;{k#=4M_qr&g5#1krdpe#U{o*RUT-iH6gRzeLz~{y>@I$@TxM6&n26x-z0c+IoC(;tu^n-PV zz2S$BjJN!rciI)Ggla3xR35jPF8SPcckO#A2Jbju)S6!Jx8hazVI`h9(VS}4`PQbB zO)E+d0}cZ9JT|50j7==B75Z_{6G#_Z8c#jf;TM)1do5~VwDFB?fj-sY!d6{0!3La> z9MaD+czM)$Jr4-8qYczI(Y>f#T}fJf@sZr>qie5o3=7;uQya+4hvkmlNZV1HOZlju z)-?Z?r4nb*I=6mz&T_rZXV^R23rGhbXqX4@H$U-%8x-2;-YBn4`v+k}dIH2nw>qO! z97C@?OjFL7_Rlb|p7Bo^PNJ}Jy_c;_I~zz+?ObU0?ag4H_!9za-k%8+4|Q^_Sz2#i z8Hr&Lx?VP1u3BX28j{O8v&|cH%_osH%8|>p_+s^&jyJv->keY^YVo5cwr<&x26?m6 z{sg3}O~REZE0r0d+G{px%GcAiFEu=vSGmO+eV?v}nUs zVvkp^wYnc36rN17o{Z44kW-t}44)rtd(l)PF)WLD*)lo0b49{K7erP`g2PCh!)P48 zL@w@7-mJGVKpjM^#z@q%uEj`*O)U)l>@t?xuGF-unRN<|$gDq+t(zvQ>t5yBmNY`W z9ZT(xIA6pYJF4w=t9q@s_^VaTosJ!On7df1YbBQveN5PB$~Y-v(km_voS}1-sF!@j zT@_nH3>x5Y-Caw`+ZQz*ekTg_ip;~QuC#Bba&-8IDI7z46^9vYFzHj#t~EJR`vVW? zhbqEFSbN8NEmcqtciT@FTW*&d^3QA)$MKtbOU|YAkvoR|kay@3k{KJwo8VI~bLjF& zf7nwo!}r9XbhbEj_-29SH-q-@s)NjmKwKfyjA6po40Kj-8j1}*>v_FIW*tWn^kM}s7$K^c3VlG z6U|?U1{@L`4|wms!y6PO+iMxarf&WdD?(i#>P{HpZc-%Hrb__(#?XwqOGrar_RW`i z0;&+As{T54U7mIk zx#kTbj|5%G>mjXd91)_WRqSdiH#52cE7c6pBTbH%6#qu5@Goy&1Po5@|p_G)kRa!0Ri1-%pv@ftvx|IN-%N53{HFosD9F^p0U_vmP`FgoZP#x*NWxNr@ z8@ABsKd^%JFKe!JuN8GEog{S*`7^@-z#B3^rI5HqUB#v?o#C6U>FCI4eV}_BOU%V4 zQf;K4ZtZ~W5_`Tu8&>U9W%9L3scjpIrPRv{hI-cZ_!xh)y^&~-k9Sbn551Q44wm0} z#IC$%RpIpteESMuf46~o@E5G%k1w}Qgcsxgp)r6hf8POgMPt}g1v~}@YoL&924EeD zkw%rJcM@ObeW2x$`H^&+c_Y)sSMm7SOLX%@10GLnAUB6V1a}}`qP<^Mf)*^>dE2+N zn`}B4U$FQs-FL&>nb-?7Ta$2KVUb5BoeL zf^@EGo7jPS*K&c(dbn+D^ee6ajqml!p6DtwdD7?y!xhCA`ok&IG)8Bt17-*D~_jayCf#M=(??qqqvA1wp?Ysh}u~ z#5s6cu{#;sbs6P<8akLZQ7bb`y{j&?_^eG#0^39IpRY!v;QaC|JRAUBP+-#sx134r zkf~=kKF}}F38Bihev+`F?HBtz7h((>*z2`>f8 ze1Ss^N$eEsytn2*xw>OlyD4p3t9_?pAE|^QH`ojh&mZsr7zRINhHX|b$q%t-I~lyy zu(45mT-B3h0L?K1`i+#yNU`3NOoGDO*U@9cM<(hXs3%!}zS`wVLMwEWt8$*F%P6GT z{dc{aldITLBi*`VDWM*1nmV);H~OjcdwmFOf+hqYJu@@YkwY-~X8{^oj}drWnZCqR z?6f2xCRL{(EI`~0XF(JS(6?lP9%2UHs~Vyi(^p<5v%!X%h05$?~?vGN8`I8ee_;!94!gm*F|OjvSQ*qQ2RiA zs8))GdGTR_6f`sn-Fr(_&o>bb_fD)C7wlM_$lhEN9 z(Qr(-)izd^>npUbVdcWWjOd<;?#O*?NFwykwmcReKcKIrzWuvwNAGZiRV}Qk5fJo5 zv}pfPeu3>)rVr(f!Hr7`SYU(zl$s~V@N4PFPWnJ|pv&VEK35|!H6eyHBLLeGiiZj8w-H=^YvJ%h#@b)iuA8QGODLwvzsbBsftxat;Xa+ zi^C)BH!Il_9;LF&Q`$!ii(>xaM>wYtlJyIy`umb8(}W>CaN;Dz4(TU?_d$?13EX4~ z!BP)GddDuY_Z3Hl&3#c(%0rt5tj;Ik7oHp;oojP!-yJFEDpCHQo+;QP}MisDoT|jL=0i5R%E-P7n zhc0M~k)BHC%V9Tj$dG$(B3lrawak#t&ftQtR>Xe;sr@Dd3MX6doo607^K%2-326e7 zvt&ry%3llo_h_NcaoJm(1|PaDI1zjwR0-HpiUk1UFzHKe8Qk67{mPR6%LfEE0uX!E zHyR8zSuQKz4g3J*EnQ`C^v`@OpeqpsYr+q~Szz#hd_ut?QCSI=aZ*Alz0vGa47$(2 zjdN^rn?einrHdle9^0!+w)78{pjE!S)ka4MGq=5ZsMtmd2EY zW|%02?9%`U!#ifAPWMN;8}Nw)VDBuy1{D3m&#=Ky*DvRj<6b`bwZJIA9jI)VVOgFN zD855243<)m5|Yped{J#M!yioXKgrq*eh%&GE|caYxDx76>tO}iMY*9Vz5EQut-ntB z|3ooerWUU&@orOuc8A(e@17DHZ7ok0fT*OmQBS)14MeX1lDuf(tx}`nH6^p&lhRJR zo(RD8PrF+yMl+qs1%P_-)_`8m1jq%c@rpyDlz#@&FLi6$&wd}GUwif^bl{K}!Jh!2KtGA@bmYMrx#@{21(Q9qbKXLvEd_YV_xI^XuZFlY`$S z;K}D>C#emwzf~R*fqz^}|6EW$&IleTQe+UDPk)BoiLXKnumATG>+<1=G?fq1&LZn8 zo&Cvf0)!>hY{0vV!rCHzqKz^jw$zh8!q2KM2% z=_Lc?S3XY=K(tC_K=U6@tkbfiIdn+q5=!d&&EMx-6<&;7!{8|PRjkp-wZ1?)3u0G) zjTV0Vt3gVf1g5HyEL!pZ9X@2^97AmGPg@+eU_*dJAu>ZH{$k#Jfz`{t4JnlY^ z4F^n-W2BxBfbG9cCY(1{_*KrxdtNNOrw*Rj$XfpwO5yi{{ijC+1t3x;Ge!u8;`( z8xzO~*nIGO^C!acpI7oe3F{|tms{ls@|QlZ;4l4&u2BDh9{#wN}>0Iw7Rb9a3J?*m2V+lG*ZEdUf7Na(~`43;Q@47dD4)MF+sV9TvSZ?p~!^`Md=u5z*puq zML$Cwrgx2*HHjqnF%L45qP@@(G}HCb@j0`%KMMt~NDZ0j(d$XZYZv7RR^K&Bv61x0toyyXOLFhI$JrkU{*`2NN0T^Jd;!%wJH$H zYo>-L&v&K=WK$PyGDOGz4!{oK{&~E`afzng@!LgKUO44E($dhKVnyf|s#nf8nnzo( zI{P4WWx(Ar7yC(Q)9Gm`qQb)kG7zy&IN4A0NOh`7A3d6kpjNl<$DgvqOnT5N_)NA$Mna$ZXW<*DOLK)v7I^2t zd+(rbuTSaDIL!9nlSQ!VrtHyyi6{Uv)#9 zNypwEzkRSB30m1A)5dc(sB8^wwF^f6hG8d_yXBwTEhjl|;4Id#Z`*l&F`ChnfKumC z*FDpmG@L4sXW*SyOuE^IE)vpUes5Rh`m*b0yUpucKWoSBKN_%iw&&1=Doy8nG=r$@ zvHOwCzu*4{;^zO@lb=I2@vWmGfC<1{-f1z{(`k5(#-!&bm8Nq8ZZoX7;#AH62fqAO z+5OxHNv0p;c=~E*GcwQGW-264=jH($J;G9J2=qlpmKOorgj)O}+o{{LLePR+{T7M9 zxJDSZ0{T$(LnFyw?bL--GH#}$u}b6BFJ!%lr#(CN5_i~D$>?y#+Z* zhOUnoaMU{5Jk;_mLQh@Pn%k=+&C7671u2S2h)4nl^&|!!%g}m)epNxntmw5aVdra= zI&MMg%E4?Zo#O@8lV{kW%(6V?64bRbz%Y6?*`)O8e8*ReLLCe<*%y zp9xnWwm&PfP_C+_W!gXAZGJl0J>F4m|7$P9j9CPH+Z$2OQ?Sj||vJxKSCI zm>l8Y8fyWV)5jU8IpId=*O+7;(55Z zP3C`Ix6A+{hdHOqq)22r#Q3vQp1TP$8CLyoid=h`@2l{mtOo0BV^t_7+I8rUh#m9` zpNi~p14T|5AlMG>xwo9i_GaA&xNPRSrFfJUSkj9h63Fb%{h<8LzR^;S-uaAs!0cx* zM{}I&lQ3x99G_$!=N5|T1F z79eg$iKvnr;%4BT{$jxB+SSi7(=`=kvH{Q4^vpL=dLDD!SPrn?5tAa&o*UN8Z>6lW2yQMZM`Xs>HnU@uJh@&@;Qn8G-x<|Z+O|7`13D@S z77(dQGbpGOk*?Ca^j;Jxp-Tx6DI%gM2-2GYfk5a@dLKoRA{~*?L_r8GBSku#d*jT! z!}p!F&hN9<>kn2`2s?W}&;8uxDiTK|*QeN9&kAKivnXw1!I|zv{@|E|+9Bc1TZSs& zG*{Dvh9;A0Y~-GS!`?=bG0e$QXKw21q83Ba@ZO^s-C*_@zdw5nVx(808U@tzL^Qtk z49n$lwnK1~yb#}7>dCMpXup4+OY2j!cr`AcCZ+9*&wfsrNy-LrT4kBl`?|m}7su=3 zGXg%Q=OVe)uQ*s&S^LrsOQ8nhGg{fBvlb)AH$9%7V0|yZt9h^d1qd5ww4RO5qO!e} zARd{gcWOY-O`#!^tK(1S{YzI%5B!NCj|_`lFxH;Awl}T3FAbU8Rkx;_yvT{^Ur(7| zJuhld*x;>@*5-;;ShEm!rEfe(ayIgcq)%{k0JAuRl#|HEF5t4OO$z2Moo=SVQ3of| zUr%r=8h$klA2fd%A?~WorjHo-7hj^aaOB2Whb4EYQM(g zOhK|d#;;6{)U1}Cm^>B0A!@M3=(Oo~&x3+fHk=Q;Ig^e}N%T1$E=za)`fm^apgnng z>5ESOA+{iKW-5*ScP&r-`u480MiXVy({nFbD1RM0E5O5cz%l4tar0TWV78zK_m}GG z*IV=i`r-{bqoZ}AO}4^Zud2%G%F1o>SDzQC3ked&LvN2ZtL6LGCZ9QzvUv5vmxg3 zmeOwRz9w(jh$!-0NBUGNb4;}l0AP`3am?U*H@VN@@+{$q|012PL!+Op_ju6VCd;+J z2m#VQl!S9{g`(TRG=K#wfuRW6NZbm%aS^WB1Y`yo-#BrGM%lyefgE@*mBF*|8k@tu zPBb&pttN=z2{`pSRq_tC0bbdZj?dV}K&8rKJcwZ7+UfuaZ2YEw)a>R!$)T_&_E%OP zBA`PUUl#r^ZZ^cpo@H?!2m z4Mr{SO;xYZefoW?=7_2}>==IA-gB)-)0bzsQ1m#yKt!PIiSV14EFQcM0 z5wbf(xhiI^W!271(e_y4k=57gWkG7;*3oB+Zmxlj+C@-aC@13ZrXz})8*U{-bfmWK zHwF=lOH#4{*mrKwnF4Gr+qPZ*{oyi2IV`&{YG_rO$dkend5$Sg5SB*41{-xK43_ z-q(R>iN#hS3H*|^IALhWnABg8JU%&@m=KR~*CUHwpHqczevkVRJ|W74AjLu3>W{AM zzl0rb^9W@1p(5@!O=N7^SWQg{GgDee14)UOn(;~rQDzs^{Li^Z6ALBamxaRab4YS1 z`5WR`Ts89WayqrRnP$6_MO*0l%qFFo;#gCn>c-qi?Y%z(<5kIAX{i(dN{mW$svB6C zi?=8UZ`86ia16GxDh!t`AN8#U16ipAohR#c(4c4b0>r+`hN`WhI=|eq(1g5L+7iJc zLgKFfgd+jmv!Jek`)&ITAWh>{Gx@JJ_21!kcolgqSnqeDn%t%@TXfe~Lbp-C>Zc0z z--SL+kP&3tckkJtktWp5%H8%V7|mW5I$NaS(9GmoijtyZuryeUSpLjJOTIDut}*hH zT=<`6>NruI-o7|ZKAZFCecLu|q0KX?%lA0QLW}F?0Q?vOag>wG{pm+FCs17-D0)+K zMLqZ9V4H3 zeA0FVDyfY8{QsD3WEp-VUtM(V9_eHC%X4_{0Cwk;{1@hD3B9Zi7^!8Fyerx39aQ(=M(ZEI4 zf7dbB8Z2R17-Ozw9zlxm1~LVr_x0v6b?{4WE~RB|r{@o&?(|=Z-gH)QN%HB?PL=|L z*9E{VCoD7@(v4z0${<=SFTZ+9 z%XZY2v(#h11xCee=Kb%?*Ket)sk}OAkIlpPOe?&b$_c3sA7aQsl-~hKXL#ZpfFpNjsxu|SklVxNn@Lc0EDDGM17ROxyQQ02b=WyM z-@1f@=bup9CM;OQYpjy!nv?5^+JR!^8m;`wZ>P0<&oQ*dUW^6fgWjd^UP)%owT?nSIJ*ysV(ZSRu{<*gtbRPj_bCRG26MywErt9QMNm!l`GeL|k3&-#0?Fs0W$Nl=MD3sOHq(PL$#(#?~- zT~4A0`7UvCaz5FnR#Q+o(Dn_iM@RJL z*3F#OE3vH}|8PZJM*y^gNqwz8?54*1_5BYTBqS% zbE^PUk4_L87D|1s-Ap3@QyD9fh1aUV5sZmZLYSDS3A(6ON3avHxzGMW(5mf}K`l{S zH2s}`@DmR1M_O(&l8pA$0E6|EPPpQ5I^0JSDZ&49y`Q-tRyc~D?xpls;w$vq@r8~{ z42aB&fQBarFYgCTu-n{|H2JXgBy&Uv1$rKX7z0CsneUn`)XR7nXd)&fgO<@TF)aw5 zoSVC;Uy@gupjn*gR)=-4PW5nj26y>!-`#}hXih^@z@~uqKRGimx-|nZ@=4{5?(CQbIDvG}H8sYEeG86E(Xo>%tXJAJre2%m_n>ajiR# z&laM4xJJDgR^##Cs5!nSou_zZyto5XaJ>0JEzJ}_KJ!sScOA#7)=jqAlI$g0ZL#D6w14i)l0J*L=gor$4{`_y zXhEiFUwvWgrSGVnQEN+!cfZi`bl;&I@vS6D-_44GymgaEP$;MtcStMK8O)uqF?z!w zi}%;6jp>=;s(sXB_rf{iBTRjKkJr+5l}&#yOoZl<>|9A6*4M^pS+vcu8;dqlR#IX@ z&j9mo)-GpTRkIv4+z0a#$V9!tgxJ_gyByQlsdJaB6+47$KlYY=1lxpJJ0?G)QZ=G( zty+2F(t3Zho!*vM>dH}~F3^Tv%-A|a#-!KN6Lq(Sk51nDeXF*6gKulQZzIo{EKfm> zE-}Tj(yJY;sfP=yye&)`Rd&eP8Bw0eSV_P{#-vT+j)MmBer~cssW>;8u)Nt>Ha#j1 z?_a+&#<8`%5JvZrY`fKdgnO+X(3sQY*b!II>iWfBA4-!a(c&AjZ0*P>h*UkbBCIUN z<;LPFCVUIKugGas(hryoK-wuh$LDElVmGC=nwwf&|BqhXC(X>TU5a8>aTcFP+e@Uq#^gFNAr>V=1DC-rlagcmko8)&;eD- zEvy~RoYzM}CNxXN9%uIQ{*_y34CrNwp+;5%#?{Om*W>b0u5ijHEa2KZ+hZg~ga{N` zknnlq9{YQp+}Z)k`_8aeT5^{iO)Xu~B3l6FspCk^KtI^5)MFM(0b0ES(CX7n13>1l z><~nEH)8fL!c6h=Oxz-UCi5mySB&{F@mGM5u|1JO%a@u)eo`dCWkZF``;EE}tlp{p+&^{88FZ)V?BKbJI)irDa-!jB&H3 zi*^j}HfXm@I!AvOQw*@i7MzF^Jdov=Jj*fnynpfPC6e1K%)T4z8|n4d+jzgPOCzmn zzw}Sn-Q0I|z1D(T#H%BF!(+kMFyJiKc)7YQSunE;2m(8;&wXv#6zV-F06)HxLWS%$ zS-E3ZG@)Ql+Ub%2u=tJUK{0X0BA3+Rf^IU>LsS#G*(kx{j?^*4E{~R2w z$_h@pQN~cFW_S|>LU7)Ob;C!DeXt0bD1&Eu>K8F}X(M+k^cd0K!unxuQKWin@y4J2 z7|%R2Czceea*Gk3o9WB17$083ET9cAtmnEue}LUk8swrFmW890(Dmox{p3jV`cza; zu^^iV2zt->Nvi>?a`9Q!@0%}&+ijT#7PeNt)w`ZPTr9xz7QRo^*7_TavybZ!!D{50$E^jl2@|k#bmOOZsTwXEyiSnUG%sS;s3U;DyFID%&%~C1kKDdqvp!$P? z1C#viDBa=i@HDLW4OJql0JrF`n7m#l5QtOb#9!ZMQ7opUOLz=O+{**ZmFC_D4O{!r z4We#S=0mc9rgoPU0&e&>&%d|M2^Tla9}YhxN>mO&`zy+$NkXMK{Pns9!eHs)YV`^+ zpSkIA*pIC|r|zsIjFL8JbIhNAG}y<1kwfcK2FyxM(Oo!o$7B3K*?q;7;z$P1>s8Faw+&Q`*aM2b2Lt4Sh;!@ zLfGjc63%O@=&l6r{>R^kXC4LLn96aqkh|ekcFB-59-&i&roMK;a9eoYu|;_1E$lukVbXCo|KRTfu)bFcgyXT1 zY4o4A-?q-RAIh5`sXiPro}qpwc_!1LG_hA%EU_d_`r`e$e5urp{#zqhad(?)43iCa z=x}A-T*>5L-I6#-!>_EuSk@0E^%a>}*B8P8$i9VsF;g6@I=KPw!lRq_> zwqEyulM(-U-(txnjXaqUj~J}1wPJ1CNbmXZe86TO#oy`g2p@S?u-W)Xh|_hb^ylem zOfu2-o#*LkLL5(k2_-Ng5ALJMrP3y1uBBS-?QZ(pSoCDMBy}Dxk=UHd+=rR0?dM-k zkdsfD_DH3YU%J=V-pjGQPVy+BQYy(?FMMQren8dmQGFg9Uk=pPQ3OoHbZIP{hof%m zJYsg`tAlCm&X8HDlOEad#UN-jI_=ed_BHun6%?7<5xc$@&od@ZY*i^cJ1TN`glw~>soEe!iq4~ zDQ&JaacM+(FxxL}X{fL4kt^A<=3;LdkpcD9DJ*g&S8Y^%W>iHYA6qwNe;6`?>w^QTurhw*;WOXMowj_Faet@(b@y(t&cH8{X?2~^bMCGM4>@4cJa35OCB z>Ns4A9m7-t)w>gUfuYYfo;CF&@&xEJL(s(0+$y@BZ|^vgn5@*5un>I0PIj^*nN`<1 zmXR?>xD~t;gmpgjl&0g4R=$rSKMKj+KN3@%yK|vC*S#BUjdWuRvxUK%-2f4j;y-pA z|IXEqCwVeLZ|zWB#OtsRc0nJ&bnzorpASvfx3?VR0W=K$wRR0HGGAmfq<~4>F5bPQ zy2O7{xn(V-!`+VO{aDE68Zg=k-()$Ue*0py+BY_=)BJ_y-t6VIDfUI}Yw<^a#5Pv- znOH|`HBz7?Jadg}mzpa;ayF(lIF=qkh?29m8h&sjX)5zKnI4auc)AnI(jGA@KN&yW z4PvN>BaP}>${hsVTgMjnU4UW56s7oKR_eOlg^sL#k;qzhG$uj5`+`Z8U~c#^wZ5?b zF_`|TFL&uD1oYvBmy7R@vPupuiKtJ~MfhqR)dF{i;$&|QZ z=At(RRHG-UuQF&6U(QD?Gzo+ZT=p@+au^k}`Zj4l2E|W9s%;gg+=mP??A$eT@4apA zb9`)f(+NIU|A`B#XHLKq;@NI<9 zx~kS)wGjN}lZbQPzbi&fs2KSqE@^{+EluV$g|0fD8}Ef*Z#;AM_xY}_h9D}1kStJZ zT7YRT!z(Wbt!wlgwZKzWO;&RpmC2?iJG6vZ%8hXwaqRf%C1zg&e?bQR#(UfEZegLX zElaTE&nU60R#Xamg?uvHMhib`q^3jrajYp;(Jg$&Iifw%(LE$m&54b{!;Ds~E7VVy zaK9((a|qJ4=XV)bus~F^kY)YX7yk-?=NyZD94&{;_@d3s|Ee*yq`RFeZi@TaShj@e z$ZbpYD~!pHCSVHK&8d#E*tc-Msk@)=nlfW@DsoT`H7>r96t(I6USD!U9P>QWKf60?lng`8R&gsgRlQQB2 zP({-CF((k#mWdg(jPSOy&=Zu{e`+SUIReKcb+oR$9E^4SQ%j3ff<&XglACr=? z{mqF*LoUgPS%IL_$dC);Pe|3@`|g@J(o||n+oeDFU!}bY4Fm*ct$FjNxvfS^ptX|W z<9*Ti)XqQcn1lkCs#mv|p7*+>&7(Gx$S;z!;fG80fF>R4Ry@7#cbTyIGOOE;M^# zQ=%s5<1sqXNiBY08pYecprD{mYZAa-@r7Yu zDgW5w5s4h3|DMlqM+H)g<0et!$IyR8gw|B|mdiNo1tQ9WD6hFWXWBYE9aIbNXyC}c z7o-=Y2SM;V_70jJVk3<_MV0Rxh`bk!q9jWQo}yREhv=TPy$-88X)-myW8(e&ZOoxo zquN;U4GKUTL?Y0L;$LvSs`QlVx%9Q+2fduUFUZnXfuuL)f3fP&<^nWrze`q zP-TVq^zfD=z8`-tCU~`HwiI;SA8uGnrbgO>GA=`{^C!J4t{)sRvoi<-q}8ZGEHw4? z9l3i9{Et7opk#kjy8H2u1-?hjYZ{DR1@{8jXL1AB!ZxUr7|NMC>_n!d2f8P@QF*tQ5fgT-Db0|RG#!Z9N(ZoTXi`FdO zpl_v`i82JSs_;kD?4xTA2L7k4kxm{mZj=vWDb0OCMcG~3#9eetwRLXHiY{-Iea*?w zv63q!#^(=8`uAyJlBee#d6w3|s9TfnqN)AX$iv^h^O}$5l`j9d2G!hc-@f1d=b+k6Y3$*{-8!zh`?#)^{D#oP)<8As!8~83g9V_! z--0|DueS+EM&r72o_fwh0FLMZfykfU!yT4f&sN?1MCd88Js z$HyN&Q>rY?!!OxzSFr>FCvuV=wdtwcNs8Q@IC5H00nuvOQTCNHFb6!_LV5j`s$1=2 zbH)C93UhrUgS37zE8%N+b0;x$om#q1g?D7stc3ULl~C}JhdHbJ>lN+jXE2PAA}Kt> zTP<^@TZ?sNmD_WZDP~Vs@T}TIMoog z2I(&;WyYLd^5Gu5S%XeKk^5&7VU$&T%m~b~cCR@0sm5QdY9 zU+h;LXG*VamMSZ#(sfkwa#l)fDLQ_gV^df8)9K*4R=^qJm~jb4ue0<51#}4mjw*E| zN$h)-y}6?Yn(rWU2R>15bEl9bbExhKhftPB_lhz){)E%u#kY(rkjvvVb!|)!OB6l_ zF{@$l*H9DVy(DJ4BLye>)LId6FOHLY?wMk%N=~Rhwn|O=!gH**XIa;KGD|qfBelzM zn|Wusz%TlIe#PsZh@To|e?RYQ(NMB#S{{|4Mu6cwZ-xXbIV1OApD3(U-r_5d8l2VrWn zV`dKVzwWCmOvDbxbKJqNWwi8Ym{-Ml-FWKpw4|6H)p|avd(=g?|2}G9BS!&^4s3yh zFENBsSk1P#P%1n3hL~t`B1}QNVNqt=DA^j4R~Ab!kEV#C^~r>{nlTd+>PJ~)Beq5s zd+)XDa##Yr5l}Mtdn@xNy~N0SA=+O6d89bR`V$x3SkM4s{fKyBs-1?+d64!{0k{bqB+t>CdZsw>Mv1z=Fxud%IbFHfQ@_2Q1EvUylAv~(sq6gy z`5AuuPlcKDi+98lK?Jh~_-x$w64M6VRhdigGVzTL3V@ zdpM4nhS73zj|yHH?6X3nxy=UOb>W&6Z-MtKKZupZ3p*u^kB?s^W`il}`}@D{lksyp zFWpTlTM~z5k(1W9>?z(|U?$bISON(^XpHKuzgP0l&|HHc?PJ+tkbs;_-W2QFWuaVM zYE4l24Cr>DZ6MT4%-lirS{-8F?6-KKu<0 ziYH|8KGfSKaR*QhpSah&m&QeN2|mH)#OJ##JQ@N8@p}FY%T$yk@q4ybr;R*tYTbQy z!g{QFa*rQilJXrA?p7dp^PE4w`0Oxzx<}*}SN~}Gbb|9&YTN0{-QirP4|k$IAh2a@ zNVaD8FHb^)dg^bbr>ED!LA5*$;#phSjn|C!6pX(%A&4wBCI3B7d`|+!_89}`w4>CG z-n~HQ|70o1D<*`vaur4SFUHKC5(-(;g9wg)>wt#Tzn$o zjd+>w9NdbxZ{Lo#5qP%uK);P@*z&uu%wF#_|BBtIjfG~_@8MWdGuNSCI)8ybin3}l J`8UiT{2wlc;Tiw{ literal 0 HcmV?d00001 diff --git a/docs/release_lifecycle.md b/docs/release_lifecycle.md new file mode 100644 index 00000000..3a8533ba --- /dev/null +++ b/docs/release_lifecycle.md @@ -0,0 +1,63 @@ +This document presents the Aggkit [Software release lifecycle](https://simple.wikipedia.org/wiki/Software_release_life_cycle). The Aggkit team has adopted a process grounded in industry-standard best practices to avoid reinventing the wheel and, more importantly, to prevent confusion among new developers and users. By adhering to these widely recognized practices, we ensure that anyone in the industry can intuitively understand and follow our internal procedures with minimal explanation. + +## Versioning + +The versioning process follows the standard [Semantic Versioning](https://semver.org/) to tag new versions + +Summary + +1. MAJOR version when you make incompatible API changes +2. MINOR version when you add functionality in a backward compatible manner +3. PATCH version when you make backward compatible bug fixes + +At this time the project is in development phase so refer to the [FAQ](https://semver.org/#faq) for the current versioning criteria: + +### How should I deal with revisions in the 0.y.z initial development phase? + +The simplest thing to do is start your initial development release at 0.1.0 and then increment the minor version for each subsequent release. + +### How do I know when to release 1.0.0? + +If your software is being used in production, it should probably already be 1.0.0. If you have a stable API on which users have come to depend, you should be 1.0.0. If you’re worrying a lot about backward compatibility, you should probably already be 1.0.0. + +## Pre-Releases + +Refer to the [Software release lifecycle](https://simple.wikipedia.org/wiki/Software_release_life_cycle) Wikipedia article for a definition and criteria this project is following regarding pre-releases. + +## Release process + +The release process is based on the [Gitflow workflow](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow) for managing the source code repository. + +For a quick reference you can check https://cheatography.com/mikesac/cheat-sheets/gitflow/ + +As a quick reference this is the diagram of the branching cycle: + +![image.png](assets/gitflow.png) + +## FAQ + +### Should I cherry pick commits made to a release branch while it’s still unmerged? + +As stated by the Gitflow workflow, release branches should be short-lived and merged back to `main` and `develop` branches, but it can happen from time to time that `develop` branch needs a commit from a release branch before it’s released. + +In that case, a cherry-pick commit can be merged into `develop` containing the desired changes, as they would have end-up in `develop` at some point in the future anyway. + +### How do we manage several developments in parallel? + +Sometimes there's a necessity to release a new stable version of the previous branch with certain features while simultaneously working on the next version. In that case, we'll maintain two release branches like `release/4.0.0` and `release/5.0.0`. These branches will evolve in parallel, but most of the changes from the lower release will need to be cherry-picked onto the newest release. Additionally, if any critical fix is made to the newest release, it should be back-ported to the older release. + +### How to create a hotfix for an older release? + +When a release branch is merged into `main` and `develop`, it is removed, and only the tag is left. To create a hotfix release, a new release branch will be created from the tag so the necessary fixes can be applied. Then follow the normal release cycle: create a new beta for the release, test it in all environments, then create the final tag and release it. + +The fixes may need to be cherry-picked into any open release branches. + +### Why we should not squash merge when merging a release branch to `main` or `develop` ? + +This is opinionated but in general there’s quite a lot of downsides when squash merging release branches, see this response for some of them https://stackoverflow.com/questions/41139783/gitflow-should-i-squash-commits-when-merging-from-a-release-branch-into-master/41298098#41298098 + +Another big downside is that `main` and `develop` branch will distance more and more in terms of commits as time passes, making them totally different after some time. + +## Reference + +Comparison of popular branching strategies https://docs.aws.amazon.com/prescriptive-guidance/latest/choosing-git-branch-approach/git-branching-strategies.html diff --git a/go.mod b/go.mod index 8ffa0c0e..cb6d0833 100644 --- a/go.mod +++ b/go.mod @@ -27,9 +27,9 @@ require ( go.opentelemetry.io/otel v1.24.0 go.opentelemetry.io/otel/metric v1.24.0 go.uber.org/zap v1.27.0 - golang.org/x/crypto v0.27.0 + golang.org/x/crypto v0.31.0 golang.org/x/net v0.29.0 - golang.org/x/sync v0.9.0 + golang.org/x/sync v0.10.0 google.golang.org/protobuf v1.34.2 modernc.org/sqlite v1.32.0 ) @@ -75,7 +75,7 @@ require ( github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 // indirect github.com/gofrs/flock v0.12.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang-jwt/jwt/v4 v4.5.0 // indirect + github.com/golang-jwt/jwt/v4 v4.5.1 // indirect github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect github.com/google/uuid v1.6.0 // indirect github.com/gorilla/websocket v1.5.3 // indirect @@ -139,8 +139,8 @@ require ( go.opentelemetry.io/otel/trace v1.24.0 // indirect go.uber.org/multierr v1.10.0 // indirect golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect - golang.org/x/sys v0.25.0 // indirect - golang.org/x/text v0.18.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect golang.org/x/time v0.5.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect diff --git a/go.sum b/go.sum index aec6d2d7..7ccb9294 100644 --- a/go.sum +++ b/go.sum @@ -119,8 +119,8 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-collections/collections v0.0.0-20130729185459-604e922904d3 h1:zN2lZNZRflqFyxVaTIU61KNKQ9C0055u9CAfpmqUvo4= github.com/golang-collections/collections v0.0.0-20130729185459-604e922904d3/go.mod h1:nPpo7qLxd6XL3hWJG/O60sR8ZKfMCiIoNap5GvD12KU= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= +github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= @@ -365,8 +365,8 @@ go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= -golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= +golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= @@ -389,8 +389,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= -golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -414,16 +414,16 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= -golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= -golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= diff --git a/l1infotreesync/mocks/eth_clienter.go b/l1infotreesync/mocks/mock_eth_clienter.go similarity index 99% rename from l1infotreesync/mocks/eth_clienter.go rename to l1infotreesync/mocks/mock_eth_clienter.go index 270c40d9..72d67a97 100644 --- a/l1infotreesync/mocks/eth_clienter.go +++ b/l1infotreesync/mocks/mock_eth_clienter.go @@ -1,6 +1,6 @@ // Code generated by mockery. DO NOT EDIT. -package mocks_l1infotreesync +package mocks import ( context "context" diff --git a/l1infotreesync/mocks/mock_reorgdetector.go b/l1infotreesync/mocks/mock_reorg_detector.go similarity index 99% rename from l1infotreesync/mocks/mock_reorgdetector.go rename to l1infotreesync/mocks/mock_reorg_detector.go index f92260c3..b8c62ba4 100644 --- a/l1infotreesync/mocks/mock_reorgdetector.go +++ b/l1infotreesync/mocks/mock_reorg_detector.go @@ -1,6 +1,6 @@ // Code generated by mockery. DO NOT EDIT. -package mocks_l1infotreesync +package mocks import ( context "context" diff --git a/reorgdetector/reorgdetector.go b/reorgdetector/reorgdetector.go index d11f90d5..752dbeb1 100644 --- a/reorgdetector/reorgdetector.go +++ b/reorgdetector/reorgdetector.go @@ -159,10 +159,11 @@ func (rd *ReorgDetector) detectReorgInTrackedList(ctx context.Context) error { // and hashes matches. If higher than finalized block, we assume a reorg still might happen. if hdr.Num <= lastFinalisedBlock.Number.Uint64() { hdrs.removeRange(hdr.Num, hdr.Num) - } - if err := rd.removeTrackedBlockRange(id, hdr.Num, hdr.Num); err != nil { - return fmt.Errorf("error removing blocks from DB for subscriber %s between blocks %d and %d: %w", - id, hdr.Num, hdr.Num, err) + + if err := rd.removeTrackedBlockRange(id, hdr.Num, hdr.Num); err != nil { + return fmt.Errorf("error removing blocks from DB for subscriber %s between blocks %d and %d: %w", + id, hdr.Num, hdr.Num, err) + } } continue diff --git a/reorgdetector/reorgdetector_db.go b/reorgdetector/reorgdetector_db.go index f5bed3a7..5900bfc5 100644 --- a/reorgdetector/reorgdetector_db.go +++ b/reorgdetector/reorgdetector_db.go @@ -70,7 +70,7 @@ func (rd *ReorgDetector) saveTrackedBlock(id string, b header) error { // updateTrackedBlocksDB updates the tracked blocks for a subscriber in db func (rd *ReorgDetector) removeTrackedBlockRange(id string, fromBlock, toBlock uint64) error { _, err := rd.db.Exec( - "DELETE FROM tracked_block WHERE num >= $1 AND NUM <= 2 AND subscriber_id = $3;", + "DELETE FROM tracked_block WHERE num >= $1 AND num <= $2 AND subscriber_id = $3;", fromBlock, toBlock, id, ) return err diff --git a/reorgdetector/reorgdetector_test.go b/reorgdetector/reorgdetector_test.go index f5cf50b4..7bb78bda 100644 --- a/reorgdetector/reorgdetector_test.go +++ b/reorgdetector/reorgdetector_test.go @@ -2,14 +2,18 @@ package reorgdetector import ( "context" + big "math/big" "path" "strings" + "sync" "testing" "time" aggkittypes "github.com/agglayer/aggkit/config/types" common "github.com/ethereum/go-ethereum/common" + types "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/ethclient/simulated" + "github.com/ethereum/go-ethereum/rpc" "github.com/stretchr/testify/require" ) @@ -158,3 +162,98 @@ func TestNotSubscribed(t *testing.T) { err = reorgDetector.AddBlockToTrack(context.Background(), "foo", 1, common.Hash{}) require.True(t, strings.Contains(err.Error(), "is not subscribed")) } + +func TestDetectReorgs(t *testing.T) { + t.Parallel() + + ctx := context.Background() + syncerID := "test-syncer" + trackedBlock := &types.Header{Number: big.NewInt(9)} + + t.Run("Block not finalized", func(t *testing.T) { + t.Parallel() + + lastFinalizedBlock := &types.Header{Number: big.NewInt(8)} + client := NewEthClientMock(t) + client.On("HeaderByNumber", ctx, big.NewInt(int64(rpc.FinalizedBlockNumber))).Return(lastFinalizedBlock, nil) + client.On("HeaderByNumber", ctx, trackedBlock.Number).Return(trackedBlock, nil) + + testDir := path.Join(t.TempDir(), "reorgdetectorTestDetectReorgs.sqlite") + reorgDetector, err := New(client, Config{DBPath: testDir, CheckReorgsInterval: aggkittypes.NewDuration(time.Millisecond * 100)}) + require.NoError(t, err) + + _, err = reorgDetector.Subscribe(syncerID) + require.NoError(t, err) + require.NoError(t, reorgDetector.AddBlockToTrack(ctx, syncerID, trackedBlock.Number.Uint64(), trackedBlock.Hash())) + + require.NoError(t, reorgDetector.detectReorgInTrackedList(ctx)) + + trackedBlocks, err := reorgDetector.getTrackedBlocks() + require.NoError(t, err) + require.Equal(t, 1, len(trackedBlocks)) + + syncerTrackedBlocks, ok := trackedBlocks[syncerID] + require.True(t, ok) + require.Equal(t, 1, syncerTrackedBlocks.len()) + }) + + t.Run("Block finalized", func(t *testing.T) { + t.Parallel() + + lastFinalizedBlock := trackedBlock + client := NewEthClientMock(t) + client.On("HeaderByNumber", ctx, big.NewInt(int64(rpc.FinalizedBlockNumber))).Return(lastFinalizedBlock, nil) + + testDir := path.Join(t.TempDir(), "reorgdetectorTestDetectReorgs.sqlite") + reorgDetector, err := New(client, Config{DBPath: testDir, CheckReorgsInterval: aggkittypes.NewDuration(time.Millisecond * 100)}) + require.NoError(t, err) + + _, err = reorgDetector.Subscribe(syncerID) + require.NoError(t, err) + require.NoError(t, reorgDetector.AddBlockToTrack(ctx, syncerID, trackedBlock.Number.Uint64(), trackedBlock.Hash())) + + require.NoError(t, reorgDetector.detectReorgInTrackedList(ctx)) + + trackedBlocks, err := reorgDetector.getTrackedBlocks() + require.NoError(t, err) + require.Equal(t, 0, len(trackedBlocks)) + }) + + t.Run("Reorg happened", func(t *testing.T) { + t.Parallel() + + lastFinalizedBlock := &types.Header{Number: big.NewInt(5)} + reorgedTrackedBlock := &types.Header{Number: trackedBlock.Number, Extra: []byte("reorged")} // Different hash + + client := NewEthClientMock(t) + client.On("HeaderByNumber", ctx, big.NewInt(int64(rpc.FinalizedBlockNumber))).Return(lastFinalizedBlock, nil) + client.On("HeaderByNumber", ctx, trackedBlock.Number).Return(reorgedTrackedBlock, nil) + + testDir := path.Join(t.TempDir(), "reorgdetectorTestDetectReorgs.sqlite") + reorgDetector, err := New(client, Config{DBPath: testDir, CheckReorgsInterval: aggkittypes.NewDuration(time.Millisecond * 100)}) + require.NoError(t, err) + + subscription, err := reorgDetector.Subscribe(syncerID) + require.NoError(t, err) + + var wg sync.WaitGroup + + wg.Add(1) + go func() { + <-subscription.ReorgedBlock + subscription.ReorgProcessed <- true + + wg.Done() + }() + + require.NoError(t, reorgDetector.AddBlockToTrack(ctx, syncerID, trackedBlock.Number.Uint64(), trackedBlock.Hash())) + + require.NoError(t, reorgDetector.detectReorgInTrackedList(ctx)) + + wg.Wait() // we wait here to make sure the reorg is processed + + trackedBlocks, err := reorgDetector.getTrackedBlocks() + require.NoError(t, err) + require.Equal(t, 0, len(trackedBlocks)) // shouldn't be any since a reorg happened on that block + }) +} diff --git a/rpc/mocks/bridge_client_interface.go b/rpc/mocks/mock_bridge_client_interface.go similarity index 100% rename from rpc/mocks/bridge_client_interface.go rename to rpc/mocks/mock_bridge_client_interface.go diff --git a/rpc/mocks/bridger.go b/rpc/mocks/mock_bridger.go similarity index 100% rename from rpc/mocks/bridger.go rename to rpc/mocks/mock_bridger.go diff --git a/rpc/mocks/claim_sponsorer.go b/rpc/mocks/mock_claim_sponsorer.go similarity index 100% rename from rpc/mocks/claim_sponsorer.go rename to rpc/mocks/mock_claim_sponsorer.go diff --git a/rpc/mocks/client_factory_interface.go b/rpc/mocks/mock_client_factory_interface.go similarity index 100% rename from rpc/mocks/client_factory_interface.go rename to rpc/mocks/mock_client_factory_interface.go diff --git a/rpc/mocks/client_interface.go b/rpc/mocks/mock_client_interface.go similarity index 100% rename from rpc/mocks/client_interface.go rename to rpc/mocks/mock_client_interface.go diff --git a/rpc/mocks/l1_info_treer.go b/rpc/mocks/mock_l1_info_treer.go similarity index 99% rename from rpc/mocks/l1_info_treer.go rename to rpc/mocks/mock_l1_info_treer.go index e7441155..b9c04a2a 100644 --- a/rpc/mocks/l1_info_treer.go +++ b/rpc/mocks/mock_l1_info_treer.go @@ -27,7 +27,7 @@ func (_m *L1InfoTreer) EXPECT() *L1InfoTreer_Expecter { return &L1InfoTreer_Expecter{mock: &_m.Mock} } -// GetFirstInfo provides a mock function with given fields: +// GetFirstInfo provides a mock function with no fields func (_m *L1InfoTreer) GetFirstInfo() (*l1infotreesync.L1InfoTreeLeaf, error) { ret := _m.Called() @@ -376,7 +376,7 @@ func (_c *L1InfoTreer_GetInfoByIndex_Call) RunAndReturn(run func(context.Context return _c } -// GetLastInfo provides a mock function with given fields: +// GetLastInfo provides a mock function with no fields func (_m *L1InfoTreer) GetLastInfo() (*l1infotreesync.L1InfoTreeLeaf, error) { ret := _m.Called() diff --git a/rpc/mocks/last_ge_rer.go b/rpc/mocks/mock_last_ge_rer.go similarity index 100% rename from rpc/mocks/last_ge_rer.go rename to rpc/mocks/mock_last_ge_rer.go diff --git a/scripts/local_config b/scripts/local_config index aab17d20..87fe60df 100755 --- a/scripts/local_config +++ b/scripts/local_config @@ -413,12 +413,10 @@ cat << EOF "args":[ "run", "-cfg", "$DEST_TEMPLATE_FILE", - "-components", "sequence-sender,aggregator", + "-components", "aggsender", ] }, - To run AggSender change components to: - "-components", "aggsender", EOF echo " -----------------------------------------------------------" diff --git a/sync/driver.go b/sync/driver.go deleted file mode 100644 index f85c04fb..00000000 --- a/sync/driver.go +++ /dev/null @@ -1,19 +0,0 @@ -package sync - -import ( - "context" - "errors" -) - -var ErrInconsistentState = errors.New("state is inconsistent, try again later once the state is consolidated") - -type Block struct { - Num uint64 - Events []interface{} -} - -type ProcessorInterface interface { - GetLastProcessedBlock(ctx context.Context) (uint64, error) - ProcessBlock(block Block) error - Reorg(firstReorgedBlock uint64) error -} diff --git a/sync/evmdriver.go b/sync/evmdriver.go index 72476cef..cfa96f41 100644 --- a/sync/evmdriver.go +++ b/sync/evmdriver.go @@ -9,6 +9,13 @@ import ( "github.com/ethereum/go-ethereum/common" ) +var ErrInconsistentState = errors.New("state is inconsistent, try again later once the state is consolidated") + +type Block struct { + Num uint64 + Events []interface{} +} + type evmDownloaderFull interface { EVMDownloaderInterface downloader diff --git a/sync/mock_l2_test.go b/sync/mock_eth_clienter.go similarity index 100% rename from sync/mock_l2_test.go rename to sync/mock_eth_clienter.go diff --git a/sync/mock_downloader_test.go b/sync/mock_evm_downloader_full.go similarity index 100% rename from sync/mock_downloader_test.go rename to sync/mock_evm_downloader_full.go diff --git a/sync/mock_processor_test.go b/sync/mock_processor_interface.go similarity index 100% rename from sync/mock_processor_test.go rename to sync/mock_processor_interface.go diff --git a/sync/mock_reorgdetector_test.go b/sync/mock_reorg_detector.go similarity index 100% rename from sync/mock_reorgdetector_test.go rename to sync/mock_reorg_detector.go diff --git a/test/Makefile b/test/Makefile index b99b372e..cced2c9d 100644 --- a/test/Makefile +++ b/test/Makefile @@ -1,52 +1,8 @@ -.PHONY: generate-mocks -generate-mocks: generate-mocks-bridgesync generate-mocks-reorgdetector \ - generate-mocks-da \ - generate-mocks-l1infotreesync generate-mocks-helpers \ - generate-mocks-sync \ - generate-mocks-aggsender generate-mocks-agglayer - COMMON_MOCKERY_PARAMS=--disable-version-string --with-expecter --exported -.PHONY: generate-mocks-bridgesync -generate-mocks-bridgesync: ## Generates mocks for bridgesync, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../bridgesync --output ../bridgesync/mocks --outpkg mocks_bridgesync ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-reorgdetector -generate-mocks-reorgdetector: ## Generates mocks for reorgdetector, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=EthClient --dir=../reorgdetector --output=../reorgdetector --outpkg=reorgdetector --inpackage --structname=EthClientMock --filename=mock_eth_client.go ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-rpc -generate-mocks-rpc: ## Generates mocks for rpc, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../rpc --output ../rpc/mocks --outpkg mocks ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-l1infotreesync -generate-mocks-l1infotreesync: ## Generates mocks for l1infotreesync, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../l1infotreesync --output ../l1infotreesync/mocks --outpkg mocks_l1infotreesync ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=ReorgDetector --dir=../sync --output=../l1infotreesync/mocks --outpkg=mocks_l1infotreesync --structname=ReorgDetectorMock --filename=mock_reorgdetector.go ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-helpers -generate-mocks-helpers: ## Generates mocks for helpers, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=EthTxManager --dir=../aggoracle/chaingersender --output=./helpers --outpkg=helpers --structname=EthTxManagerMock --filename=mock_ethtxmanager.go ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-aggoracle -generate-mocks-aggoracle: ## Generates mocks for aggoracle, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=EthTxManager --dir ../aggoracle/chaingersender --output ../aggoracle/mocks --outpkg mocks --structname=EthTxManagerMock --filename=mock_ethtxmanager.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=L2GERManager --dir ../aggoracle/chaingersender --output ../aggoracle/mocks --outpkg mocks --structname=L2GERManagerMock --filename=mock_l2germanager.go ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-sync -generate-mocks-sync: ## Generates mocks for sync, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=EthClienter --dir=../sync --output=../sync --outpkg=sync --inpackage --structname=L2Mock --filename=mock_l2_test.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=evmDownloaderFull --dir=../sync --output=../sync --outpkg=sync --inpackage --structname=EVMDownloaderMock --filename=mock_downloader_test.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=processorInterface --dir=../sync --output=../sync --outpkg=sync --inpackage --structname=ProcessorMock --filename=mock_processor_test.go ${COMMON_MOCKERY_PARAMS} - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=ReorgDetector --dir=../sync --output=../sync --outpkg=sync --inpackage --structname=ReorgDetectorMock --filename=mock_reorgdetector_test.go ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-aggsender -generate-mocks-aggsender: ## Generates mocks for aggsender, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --all --case snake --dir ../aggsender --output ../aggsender/mocks --outpkg mocks ${COMMON_MOCKERY_PARAMS} - -.PHONY: generate-mocks-agglayer -generate-mocks-agglayer: ## Generates mocks for agglayer, using mockery tool - export "GOROOT=$$(go env GOROOT)" && $$(go env GOPATH)/bin/mockery --name=AgglayerClientInterface --dir=../agglayer --output=../agglayer --outpkg=agglayer --inpackage --structname=AgglayerClientMock --filename=mock_agglayer_client.go ${COMMON_MOCKERY_PARAMS} +.PHONY: generate-mocks +generate-mocks: + mockery ${COMMON_MOCKERY_PARAMS} .PHONY: test-e2e-fork12-pessimistic test-e2e-fork12-pessimistic: stop diff --git a/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml b/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml index 47aa6d78..18e555d2 100644 --- a/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml +++ b/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml @@ -27,7 +27,7 @@ args: cdk_node_image: cdk:latest - cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.60.0 + cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.61.2 zkevm_contracts_image: leovct/zkevm-contracts:v9.0.0-rc.3-pp-fork.12-patch.1 additional_services: [] consensus_contract_type: pessimistic @@ -37,4 +37,3 @@ args: zkevm_use_real_verifier: true enable_normalcy: true verifier_program_vkey: 0x00766aa16a6efe4ac05c0fe21d4b50f9631dbd1a2663a982da861427085ea2ea - diff --git a/test/combinations/fork12-pessimistic-multi.yml b/test/combinations/fork12-pessimistic-multi.yml index 46845991..36bb57ea 100644 --- a/test/combinations/fork12-pessimistic-multi.yml +++ b/test/combinations/fork12-pessimistic-multi.yml @@ -1,7 +1,7 @@ args: cdk_node_image: cdk:latest agglayer_image: ghcr.io/agglayer/agglayer:0.2.0-rc.20 - cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.60.0 + cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.61.2 zkevm_contracts_image: leovct/zkevm-contracts:v9.0.0-rc.3-pp-fork.12-patch.1 additional_services: [] consensus_contract_type: pessimistic @@ -12,4 +12,3 @@ args: enable_normalcy: true verifier_program_vkey: 0x00766aa16a6efe4ac05c0fe21d4b50f9631dbd1a2663a982da861427085ea2ea agglayer_prover_sp1_key: {{.AGGLAYER_PROVER_SP1_KEY}} - diff --git a/test/combinations/fork12-pessimistic.yml b/test/combinations/fork12-pessimistic.yml index f4f229d3..96b3ec18 100644 --- a/test/combinations/fork12-pessimistic.yml +++ b/test/combinations/fork12-pessimistic.yml @@ -1,6 +1,6 @@ args: agglayer_image: ghcr.io/agglayer/agglayer:0.2.0-rc.20 - cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.60.0-beta8 + cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.61.2 cdk_node_image: cdk zkevm_bridge_proxy_image: haproxy:3.0-bookworm zkevm_bridge_service_image: hermeznetwork/zkevm-bridge-service:v0.6.0-RC1 diff --git a/test/scripts/env.sh b/test/scripts/env.sh index 298d4f73..5dbde9de 100644 --- a/test/scripts/env.sh +++ b/test/scripts/env.sh @@ -1,7 +1,7 @@ #!/bin/bash ### Common variables KURTOSIS_ENCLAVE=cdk -TMP_CDK_FOLDER=tmp/cdk -DEST_KURTOSIS_PARAMS_YML=../$TMP_CDK_FOLDER/e2e-params.yml +TMP_AGGKIT_FOLDER=tmp/aggkit +DEST_KURTOSIS_PARAMS_YML=../$TMP_AGGKIT_FOLDER/e2e-params.yml KURTOSIS_FOLDER=${KURTOSIS_FOLDER:=../kurtosis-cdk} USE_L1_GAS_TOKEN_CONTRACT=true From 777d3b2424d48e8a2fe553b46f4fa14ee024ab2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Negovanovi=C4=87?= <93934272+Stefan-Ethernal@users.noreply.github.com> Date: Wed, 22 Jan 2025 09:33:33 +0100 Subject: [PATCH 3/3] chore: merge develop into main (#143) Signed-off-by: Arpit Temani Signed-off-by: dependabot[bot] Co-authored-by: Goran Rojovic <100121253+goran-ethernal@users.noreply.github.com> Co-authored-by: Arpit Temani Co-authored-by: Goran Rojovic Co-authored-by: Rachit Sonthalia <54906134+rachit77@users.noreply.github.com> Co-authored-by: Arnau Bennassar Co-authored-by: Joan Esteban <129153821+joanestebanr@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Victor Castell <0x@vcastellm.xyz> --- .github/assets/aggkit-logo.svg | 28 - .github/assets/agglayer-logo.png | Bin 0 -> 1777 bytes .github/workflows/arm_deb_packager.yml | 39 +- .github/workflows/arm_rpm_packager.yml | 59 +- .github/workflows/build-aggkit-image.yml | 35 + .github/workflows/codeql.yml | 2 +- .github/workflows/test-e2e-multi-chains.yml | 85 + .github/workflows/test-e2e-single-chain.yml | 77 + .github/workflows/test-e2e.yml | 175 - .github/workflows/test-resequence.yml | 12 +- .github/workflows/x86_deb_packager.yml | 40 +- .github/workflows/x86_rpm_packager.yml | 8 +- .gitignore | 1 - Cargo.lock | 5479 ----------------- Cargo.toml | 22 - Dockerfile | 52 +- LICENSE | 619 -- LICENSE.Apache-2.0 | 201 + LICENSE.MIT | 19 + Makefile | 30 +- README.md | 17 +- SECURITY.md | 17 - book.toml | 14 - cmd/main.go | 16 +- cmd/run.go | 4 +- config/config.go | 21 - crates/aggkit-config/Cargo.toml | 23 - crates/aggkit-config/src/l1.rs | 38 - crates/aggkit-config/src/lib.rs | 29 - crates/aggkit-config/src/log.rs | 108 - crates/aggkit-config/src/network_config.rs | 15 - crates/aggkit-config/src/telemetry.rs | 23 - crates/aggkit/Cargo.toml | 30 - crates/aggkit/build.rs | 103 - crates/aggkit/src/allocs_render.rs | 99 - crates/aggkit/src/cli.rs | 38 - crates/aggkit/src/config_render.rs | 125 - crates/aggkit/src/helpers.rs | 13 - crates/aggkit/src/logging.rs | 23 - crates/aggkit/src/main.rs | 110 - crates/aggkit/src/versions.rs | 39 - crates/aggkit/versions.json | 15 - docs/SUMMARY.md | 1 - docs/da_integration.md | 82 - rpc/openrpc.json | 2 +- scripts/local_config | 208 +- test/aggregator.keystore | 1 - test/bats/helpers/common-multi_cdk-setup.bash | 2 +- test/bats/helpers/common-setup.bash | 2 +- test/bats/pp-multi/bridge-l2_to_l2-e2e.bats | 2 +- ...12-pessimistic-multi-attach-second-cdk.yml | 2 +- .../combinations/fork12-pessimistic-multi.yml | 2 +- test/combinations/fork12-pessimistic.yml | 2 +- test/config/test.config.toml | 3 - test/config/test.genesis.json | 100 - test/config/test.prover.config.json | 93 - test/docker-compose.yml | 48 - test/run-e2e-multi_pp.sh | 8 +- test/run-e2e.sh | 12 +- test/scripts/agglayer_certificates_monitor.sh | 20 +- test/scripts/batch_verification_monitor.sh | 2 +- test/scripts/env.sh | 2 +- 62 files changed, 651 insertions(+), 7846 deletions(-) delete mode 100644 .github/assets/aggkit-logo.svg create mode 100644 .github/assets/agglayer-logo.png create mode 100644 .github/workflows/build-aggkit-image.yml create mode 100644 .github/workflows/test-e2e-multi-chains.yml create mode 100644 .github/workflows/test-e2e-single-chain.yml delete mode 100644 .github/workflows/test-e2e.yml delete mode 100644 Cargo.lock delete mode 100644 Cargo.toml delete mode 100644 LICENSE create mode 100644 LICENSE.Apache-2.0 create mode 100644 LICENSE.MIT delete mode 100644 SECURITY.md delete mode 100644 book.toml delete mode 100644 crates/aggkit-config/Cargo.toml delete mode 100644 crates/aggkit-config/src/l1.rs delete mode 100644 crates/aggkit-config/src/lib.rs delete mode 100644 crates/aggkit-config/src/log.rs delete mode 100644 crates/aggkit-config/src/network_config.rs delete mode 100644 crates/aggkit-config/src/telemetry.rs delete mode 100644 crates/aggkit/Cargo.toml delete mode 100644 crates/aggkit/build.rs delete mode 100644 crates/aggkit/src/allocs_render.rs delete mode 100644 crates/aggkit/src/cli.rs delete mode 100644 crates/aggkit/src/config_render.rs delete mode 100644 crates/aggkit/src/helpers.rs delete mode 100644 crates/aggkit/src/logging.rs delete mode 100644 crates/aggkit/src/main.rs delete mode 100644 crates/aggkit/src/versions.rs delete mode 100644 crates/aggkit/versions.json delete mode 100644 docs/da_integration.md delete mode 100644 test/aggregator.keystore delete mode 100644 test/config/test.config.toml delete mode 100644 test/config/test.genesis.json delete mode 100644 test/config/test.prover.config.json delete mode 100644 test/docker-compose.yml diff --git a/.github/assets/aggkit-logo.svg b/.github/assets/aggkit-logo.svg deleted file mode 100644 index cba03359..00000000 --- a/.github/assets/aggkit-logo.svg +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/.github/assets/agglayer-logo.png b/.github/assets/agglayer-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..3a6dbdf1fabb2d1e4f499ebcdeb85221adfc61d0 GIT binary patch literal 1777 zcmW+%e^gUt7#^UgAWobNB$b0iOJETi84NTHE1)Qe26|05j5HV{rhp7$U@8qxVJcIx zVS(fUyEcZqW6EHFgmT5-TX$n(Vq*qVlpjM>Snt;#chA}PzR&wU&vU0&iGY{UGuIh@S42i!q0wBMV`J7Q?fuzyH(InbZDHwCRmg&O%U-XHU3@6p z#rloU2DfvwH6!dy$7HL{-s(=z{ms!d(vl^Kx;~E}iXIE&FV>Rlil!%r#BBlZ zom!{WT-5qZ2c_=G&MSC2b^GMa(17=o|9Uaf+U5tR;}iG%2pgZII(gl)Xu)Rj1s_Ek z9V^$qv#}9yY?;Gv4rW(`-@YWNMh9oAX6okyAwzj&-Q!MRXoZihmnmf_Udt?6U&1b+ z#QmdLt!PHYiK4nPIEegm`ip3w#1|i6uM~WJ>WvPWc7XV&0_T(4(y)xRbSb+aEfatF z3!AX%&#Pt_W=ffS!?1a~hfbzVC$3Fjn@89xmHNNQkD$=aJ(+a{n4VZ~YY{NL1ge+Y zLJ3=)TK~-b0i`_F$9=)_YX`vVJ{_C_qW!f8rYt&X> z%syc99lIkE7~Rt|s0UA=p$7j+veOB(cNl$N&WVDNGW@Azi7RGzHu}=tvBXUR4VbTA zNVMoWWJe{~k34>SkA}Tnp$N1XWt%#w+g zPN4Z=PKYMOeGsjk>jN}jVelfL{h;!i4>VU(*&LwN3J&atZT>_}M~5s)f|c<3@eNHl z1vLF_?p~k`eHH8iw28*bSwM@VdR%~p^~KEwIBDu}1?V%z%>{VH*fS5+s5m63H!}?c z)f2>F5R~*DHiafv;)~GaOI$CQ9C?rHkKi_!ji8$w0k9?W>UDb2emC$~8OOQy6@*ewv zOd3R`k`jqzsl%%JdD6>pU!E#lvO~C8T&(JQbR4rE$I7GI#f4`SY5zWXgFl=U%4=lQ zQPS1RI#XG3jf_vioys+VUFIr*FLQT%nP%d`5k<45uGvyi^<7uO(NxyjEGH&sQZ6#|C8HXk6Jyj0ZF|j3ylAhpA8ZT*Jc^$Fan4t06AMJeoK= zYidG4NfY(C1Dg@nXl5&+@zhd`Uj}T#gy_>plb&4&few@=77?N9)^jJ>$ zz^@puN#-*6aVwXgmmlF?3Z_ zCTSp=Q*{*&LNcw_@eSyvTx+pppw#3|3=$#UFZDv{8G7*=)E7L+{Q{*o#NO@;sl=J2 zdOoY1n^I7EPL+4WF4u?aZuvp7wkBuB;-K^YA;lHQE#MxxY){9w+p& zc^`M1m1`o@#W|~l`2~MKXKz=+`MmRt64gS^&Hgdyv_R)_D^u}NnA)~%X7-FXjplHi YKJjZ9BaRympE6qP#)O!28+iNv15;9lQUCw| literal 0 HcmV?d00001 diff --git a/.github/workflows/arm_deb_packager.yml b/.github/workflows/arm_deb_packager.yml index 64d451c6..3301ed57 100644 --- a/.github/workflows/arm_deb_packager.yml +++ b/.github/workflows/arm_deb_packager.yml @@ -41,49 +41,42 @@ jobs: - name: Build the binary run: make build - - name: Build the rust binary - run: | - BUILD_SCRIPT_DISABLED=1 - cargo build --release --bin cdk - - name: making directory structure - run: mkdir -p packaging/deb/cdk/usr/bin/ + run: mkdir -p packaging/deb/aggkit/usr/bin/ - name: copying necessary binary for arm64 - run: cp -rp target/cdk-node packaging/deb/cdk/usr/bin/cdk-node - - name: copying rust binary for arm64 - run: cp -rp target/release/cdk packaging/deb/cdk/usr/bin/cdk + run: cp -rp target/aggkit packaging/deb/aggkit/usr/bin/aggkit # Control file creation - name: Create control file run: | - echo "Package: cdk" >> packaging/deb/cdk/DEBIAN/control - echo "Version: ${{ env.VERSION }}" >> packaging/deb/cdk/DEBIAN/control - echo "Section: base" >> packaging/deb/cdk/DEBIAN/control - echo "Priority: optional" >> packaging/deb/cdk/DEBIAN/control - echo "Architecture: arm64" >> packaging/deb/cdk/DEBIAN/control - echo "Maintainer: devops@polygon.technology" >> packaging/deb/cdk/DEBIAN/control - echo "Description: cdk binary package" >> packaging/deb/cdk/DEBIAN/control + echo "Package: aggkit" >> packaging/deb/aggkit/DEBIAN/control + echo "Version: ${{ env.VERSION }}" >> packaging/deb/aggkit/DEBIAN/control + echo "Section: base" >> packaging/deb/aggkit/DEBIAN/control + echo "Priority: optional" >> packaging/deb/aggkit/DEBIAN/control + echo "Architecture: arm64" >> packaging/deb/aggkit/DEBIAN/control + echo "Maintainer: devops@polygon.technology" >> packaging/deb/aggkit/DEBIAN/control + echo "Description: aggkit binary package" >> packaging/deb/aggkit/DEBIAN/control - - name: Creating package for binary for cdk ${{ env.ARCH }} - run: cp -rp packaging/deb/cdk packaging/deb/cdk-${{ env.GIT_TAG }}-${{ env.ARCH }} + - name: Creating package for binary for aggkit ${{ env.ARCH }} + run: cp -rp packaging/deb/aggkit packaging/deb/aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }} env: ARCH: arm64 - name: Running package build - run: dpkg-deb --build --root-owner-group packaging/deb/cdk-${{ env.GIT_TAG }}-${{ env.ARCH }} + run: dpkg-deb --build --root-owner-group packaging/deb/aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }} env: ARCH: arm64 - name: create checksum for the arm64 package - run: cd packaging/deb/ && sha256sum cdk-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb > cdk-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb.checksum + run: cd packaging/deb/ && sha256sum aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb > aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb.checksum env: ARCH: arm64 - - name: Release cdk Packages + - name: Release aggkit Packages uses: softprops/action-gh-release@v2 with: tag_name: ${{ env.GIT_TAG }} prerelease: true files: | - packaging/deb/cdk**.deb - packaging/deb/cdk**.deb.checksum + packaging/deb/aggkit**.deb + packaging/deb/aggkit**.deb.checksum diff --git a/.github/workflows/arm_rpm_packager.yml b/.github/workflows/arm_rpm_packager.yml index 614b80f2..54d733b1 100644 --- a/.github/workflows/arm_rpm_packager.yml +++ b/.github/workflows/arm_rpm_packager.yml @@ -35,14 +35,9 @@ jobs: - name: Download deps for project run: go mod download - - name: Building cdk-node for amd64 + - name: Building aggkit for arm run: make build - - name: Building the cdk - run: | - BUILD_SCRIPT_DISABLED=1 - cargo build --release --bin cdk - - name: Installing some dependencies run: sudo apt-get update && sudo apt-get install -y rpm @@ -53,51 +48,49 @@ jobs: mkdir -p packaging/rpm/RPMS mkdir -p packaging/rpm/SRPMS - touch packaging/rpm/cdk.spec - echo "Name: cdk" >> packaging/rpm/SPECS/cdk.spec - echo "Version: ${{ env.GIT_TAG1 }}" >> packaging/rpm/SPECS/cdk.spec - echo "Release: 1%{?dist}" >> packaging/rpm/SPECS/cdk.spec - echo "License: GPL/AGPL" >> packaging/rpm/SPECS/cdk.spec - echo "BuildArch: aarch64" >> packaging/rpm/SPECS/cdk.spec - echo "Summary: cdk rpm package" >> packaging/rpm/SPECS/cdk.spec + touch packaging/rpm/aggkit.spec + echo "Name: aggkit" >> packaging/rpm/SPECS/aggkit.spec + echo "Version: ${{ env.GIT_TAG1 }}" >> packaging/rpm/SPECS/aggkit.spec + echo "Release: 1%{?dist}" >> packaging/rpm/SPECS/aggkit.spec + echo "License: GPL/AGPL" >> packaging/rpm/SPECS/aggkit.spec + echo "BuildArch: aarch64" >> packaging/rpm/SPECS/aggkit.spec + echo "Summary: aggkit rpm package" >> packaging/rpm/SPECS/aggkit.spec - echo "%description" >> packaging/rpm/SPECS/cdk.spec - echo "cdk rpm package" >> packaging/rpm/SPECS/cdk.spec + echo "%description" >> packaging/rpm/SPECS/aggkit.spec + echo "aggkit rpm package" >> packaging/rpm/SPECS/aggkit.spec - echo "%pre" >> packaging/rpm/SPECS/cdk.spec - echo "getent group cdk >/dev/null || groupadd -r cdk" >> packaging/rpm/SPECS/cdk.spec - echo "getent passwd cdk >/dev/null || useradd -s /bin/false -d /opt/cdk -r cdk -g cdk" >> packaging/rpm/SPECS/cdk.spec + echo "%pre" >> packaging/rpm/SPECS/aggkit.spec + echo "getent group aggkit >/dev/null || groupadd -r aggkit" >> packaging/rpm/SPECS/aggkit.spec + echo "getent passwd aggkit >/dev/null || useradd -s /bin/false -d /opt/aggkit -r aggkit -g aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "%install" >> packaging/rpm/SPECS/cdk.spec - echo "mkdir -p %{buildroot}/usr/bin" >> packaging/rpm/SPECS/cdk.spec - echo "cp /home/runner/work/cdk/cdk/target/cdk-node %{buildroot}/usr/bin/cdk-node" >> packaging/rpm/SPECS/cdk.spec - echo "cp /home/runner/work/cdk/cdk/target/release/cdk %{buildroot}/usr/bin/cdk" >> packaging/rpm/SPECS/cdk.spec + echo "%install" >> packaging/rpm/SPECS/aggkit.spec + echo "mkdir -p %{buildroot}/usr/bin" >> packaging/rpm/SPECS/aggkit.spec + echo "cp /home/runner/work/aggkit/aggkit/target/aggkit-node %{buildroot}/usr/bin/aggkit-node" >> packaging/rpm/SPECS/aggkit.spec + echo "cp /home/runner/work/aggkit/aggkit/target/release/aggkit %{buildroot}/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "%files" >> packaging/rpm/SPECS/cdk.spec - echo "/usr/bin/cdk" >> packaging/rpm/SPECS/cdk.spec - echo "/usr/bin/cdk-node" >> packaging/rpm/SPECS/cdk.spec - + echo "%files" >> packaging/rpm/SPECS/aggkit.spec + echo "/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec - name: Construct rpm package run: | - rpmbuild --define "_topdir /home/runner/work/cdk/cdk/packaging/rpm_build" \ + rpmbuild --define "_topdir /home/runner/work/aggkit/aggkit/packaging/rpm_build" \ --define "_builddir %{_topdir}/BUILD" \ --define "_rpmdir %{_topdir}/RPMS" \ --define "_srcrpmdir %{_topdir}/SRPMS" \ --define "__spec_install_post /bin/true" \ - -bb packaging/rpm/SPECS/cdk.spec + -bb packaging/rpm/SPECS/aggkit.spec - name: Rename file for post rpm build and for checksum - run: mv /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/aarch64/cdk-${{ env.GIT_TAG1 }}-1.aarch64.rpm /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/aarch64/cdk-${{ env.GIT_TAG1 }}.aarch64.rpm + run: mv /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/aarch64/aggkit-${{ env.GIT_TAG1 }}-1.aarch64.rpm /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/aarch64/aggkit-${{ env.GIT_TAG1 }}.aarch64.rpm - name: Checksum for the rpm package - run: sha256sum /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/aarch64/cdk-${{ env.GIT_TAG1 }}.aarch64.rpm > /home/runner/work/cdk/cdk/packaging/rpm_build/RPMS/aarch64/cdk-${{ env.GIT_TAG1 }}.aarch64.rpm.checksum + run: sha256sum /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/aarch64/aggkit-${{ env.GIT_TAG1 }}.aarch64.rpm > /home/runner/work/aggkit/aggkit/packaging/rpm_build/RPMS/aarch64/aggkit-${{ env.GIT_TAG1 }}.aarch64.rpm.checksum - - name: Release cdk Packages + - name: Release aggkit Packages uses: softprops/action-gh-release@v2 with: tag_name: ${{ env.GIT_TAG }} prerelease: true files: | - packaging/rpm_build/RPMS/aarch64/cdk-**.rpm - packaging/rpm_build/RPMS/aarch64/cdk-**.rpm.checksum + packaging/rpm_build/RPMS/aarch64/aggkit-**.rpm + packaging/rpm_build/RPMS/aarch64/aggkit-**.rpm.checksum diff --git a/.github/workflows/build-aggkit-image.yml b/.github/workflows/build-aggkit-image.yml new file mode 100644 index 00000000..dc701725 --- /dev/null +++ b/.github/workflows/build-aggkit-image.yml @@ -0,0 +1,35 @@ +name: Build Aggkit Image +on: + workflow_call: + inputs: + go-version: + required: true + type: string + docker-image-name: + required: true + type: string + +jobs: + build-aggkit-image: + runs-on: amd-runner-2204 + timeout-minutes: 20 + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Go + uses: actions/setup-go@v5 + with: + go-version: ${{ inputs.go-version }} + + - name: Build Aggkit Docker Image + run: make build-docker + + - name: Save Aggkit Image to Archive + run: docker save --output /tmp/${{ inputs.docker-image-name }}.tar ${{ inputs.docker-image-name }} + + - name: Upload Aggkit Archive + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.docker-image-name }} + path: /tmp/${{ inputs.docker-image-name }}.tar diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 90b7643b..b06260c4 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -52,7 +52,7 @@ jobs: # Build project - name: Build Go project if: ${{ matrix.language == 'go' }} - run: make build-go + run: make build-aggkit # Perform CodeQL Analysis - name: Perform CodeQL Analysis diff --git a/.github/workflows/test-e2e-multi-chains.yml b/.github/workflows/test-e2e-multi-chains.yml new file mode 100644 index 00000000..a6fa1997 --- /dev/null +++ b/.github/workflows/test-e2e-multi-chains.yml @@ -0,0 +1,85 @@ +name: Test E2E (multi chains) +on: + push: + branches: + - '**' + workflow_dispatch: {} + +jobs: + build-aggkit-image: + uses: ./.github/workflows/build-aggkit-image.yml + with: + go-version: 1.22.x + docker-image-name: aggkit + + test-e2e: + needs: build-aggkit-image + runs-on: amd-runner-2204 + timeout-minutes: 30 + strategy: + fail-fast: false + matrix: + e2e-group: + - fork12-multi-pessimistic + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Go + uses: actions/setup-go@v5 + with: + go-version: 1.22.x + + - name: Build Tools + run: make build-tools + + - name: Checkout kurtosis-cdk + uses: actions/checkout@v4 + with: + repository: 0xPolygon/kurtosis-cdk + path: kurtosis-cdk + ref: vcastellm/adapt-aggkit + + - name: Install Kurtosis CDK tools + uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk + + - name: Setup Bats and bats libs + uses: bats-core/bats-action@2.0.0 + + - name: Download aggkit archive + uses: actions/download-artifact@v4 + with: + name: aggkit + path: /tmp + + - name: Load aggkit image + run: | + docker load --input /tmp/aggkit.tar + docker image ls -a + + - name: Run E2E tests + run: make test-e2e-fork12-multi-pessimistic + working-directory: test + env: + KURTOSIS_FOLDER: ${{ github.workspace }}/kurtosis-cdk + BATS_LIB_PATH: /usr/lib/ + AGGLAYER_PROVER_SP1_KEY: ${{ secrets.SP1_PRIVATE_KEY }} + + - name: Dump enclave logs + if: failure() + run: kurtosis dump ./dump + + - name: Generate archive name + if: failure() + run: | + archive_name="dump_run_with_args_${{matrix.e2e-group}}_${{ github.run_id }}" + echo "ARCHIVE_NAME=${archive_name}" >> "$GITHUB_ENV" + echo "Generated archive name: ${archive_name}" + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: ${{ env.ARCHIVE_NAME }} + path: ./dump diff --git a/.github/workflows/test-e2e-single-chain.yml b/.github/workflows/test-e2e-single-chain.yml new file mode 100644 index 00000000..c0685420 --- /dev/null +++ b/.github/workflows/test-e2e-single-chain.yml @@ -0,0 +1,77 @@ + +name: Test E2E (single chain) +on: + push: + branches: + - '**' + workflow_dispatch: {} + +jobs: + build-aggkit-image: + uses: ./.github/workflows/build-aggkit-image.yml + with: + go-version: 1.22.x + docker-image-name: aggkit + + test-e2e: + needs: build-aggkit-image + runs-on: amd-runner-2204 + timeout-minutes: 30 + strategy: + fail-fast: false + matrix: + e2e-group: + - fork12-pessimistic + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Checkout kurtosis-cdk + uses: actions/checkout@v4 + with: + repository: 0xPolygon/kurtosis-cdk + path: kurtosis-cdk + ref: vcastellm/adapt-aggkit + + - name: Install Kurtosis CDK tools + uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk + + - name: Setup Bats and bats libs + uses: bats-core/bats-action@2.0.0 + + - name: Download aggkit archive + uses: actions/download-artifact@v4 + with: + name: aggkit + path: /tmp + + - name: Load aggkit image + run: | + docker load --input /tmp/aggkit.tar + docker image ls -a + + - name: Run E2E tests + run: make test-e2e-${{ matrix.e2e-group }} + working-directory: test + env: + KURTOSIS_FOLDER: ${{ github.workspace }}/kurtosis-cdk + BATS_LIB_PATH: /usr/lib/ + AGGLAYER_PROVER_SP1_KEY: ${{ secrets.SP1_PRIVATE_KEY }} + + - name: Dump enclave logs + if: failure() + run: kurtosis dump ./dump + + - name: Generate archive name + if: failure() + run: | + archive_name="dump_run_with_args_${{matrix.e2e-group}}_${{ github.run_id }}" + echo "ARCHIVE_NAME=${archive_name}" >> "$GITHUB_ENV" + echo "Generated archive name: ${archive_name}" + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: ${{ env.ARCHIVE_NAME }} + path: ./dump diff --git a/.github/workflows/test-e2e.yml b/.github/workflows/test-e2e.yml deleted file mode 100644 index 6522d256..00000000 --- a/.github/workflows/test-e2e.yml +++ /dev/null @@ -1,175 +0,0 @@ -name: Test e2e -on: - push: - branches: - - '**' - workflow_dispatch: {} - - -jobs: - build-cdk-image: - runs-on: amd-runner-2204 - timeout-minutes: 20 - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Install Go - uses: actions/setup-go@v5 - with: - go-version: 1.22.x - - - name: Build cdk docker image - run: make build-docker - - - name: Save cdk image to archive - run: docker save --output /tmp/cdk.tar cdk - - - name: Upload archive - uses: actions/upload-artifact@v4 - with: - name: cdk - path: /tmp/cdk.tar - - test-e2e: - name: E2E tests (different groups) - runs-on: amd-runner-2204 - timeout-minutes: 30 - needs: build-cdk-image - strategy: - fail-fast: false - matrix: - e2e-group: - - "fork12-pessimistic" - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Checkout kurtosis-cdk - uses: actions/checkout@v4 - with: - repository: 0xPolygon/kurtosis-cdk - path: kurtosis-cdk - ref: v0.2.25 - - - name: Install Kurtosis CDK tools - uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk - - - name: Setup Bats and bats libs - uses: bats-core/bats-action@2.0.0 - - - name: Download cdk archive - uses: actions/download-artifact@v4 - with: - name: cdk - path: /tmp - - - name: Load cdk image - run: | - docker load --input /tmp/cdk.tar - docker image ls -a - - - name: Run E2E tests - run: make test-e2e-${{ matrix.e2e-group }} - working-directory: test - env: - KURTOSIS_FOLDER: ${{ github.workspace }}/kurtosis-cdk - BATS_LIB_PATH: /usr/lib/ - AGGLAYER_PROVER_SP1_KEY: ${{ secrets.SP1_PRIVATE_KEY }} - - - name: Dump enclave logs - if: failure() - run: kurtosis dump ./dump - - - name: Generate archive name - if: failure() - run: | - archive_name="dump_run_with_args_${{matrix.e2e-group}}_${{ github.run_id }}" - echo "ARCHIVE_NAME=${archive_name}" >> "$GITHUB_ENV" - echo "Generated archive name: ${archive_name}" - kurtosis service exec cdk cdk-node-001 'cat /etc/cdk/cdk-node-config.toml' > ./dump/cdk-node-config.toml - - - name: Upload logs - if: failure() - uses: actions/upload-artifact@v4 - with: - name: ${{ env.ARCHIVE_NAME }} - path: ./dump - - test-e2e-multi-pp: - name: E2E tests - needs: build-cdk-image - strategy: - fail-fast: false - matrix: - go-version: [ 1.22.x ] - goarch: [ "amd64" ] - e2e-group: - - "fork12-multi-pessimistic" - runs-on: amd-runner-2204 - timeout-minutes: 30 - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Install Go - uses: actions/setup-go@v5 - with: - go-version: ${{ matrix.go-version }} - env: - GOARCH: ${{ matrix.goarch }} - - - name: Build Tools - run: make build-tools - - - name: Checkout kurtosis-cdk - uses: actions/checkout@v4 - with: - repository: 0xPolygon/kurtosis-cdk - path: kurtosis-cdk - ref: main - - - name: Install Kurtosis CDK tools - uses: ./kurtosis-cdk/.github/actions/setup-kurtosis-cdk - - - name: Setup Bats and bats libs - uses: bats-core/bats-action@2.0.0 - - - name: Download cdk archive - uses: actions/download-artifact@v4 - with: - name: cdk - path: /tmp - - - name: Load cdk image - run: | - docker load --input /tmp/cdk.tar - docker image ls -a - - - name: Run E2E tests - run: make test-e2e-fork12-multi-pessimistic - working-directory: test - env: - KURTOSIS_FOLDER: ${{ github.workspace }}/kurtosis-cdk - BATS_LIB_PATH: /usr/lib/ - AGGLAYER_PROVER_SP1_KEY: ${{ secrets.SP1_PRIVATE_KEY }} - - - name: Dump enclave logs - if: failure() - run: kurtosis dump ./dump - - - name: Generate archive name - if: failure() - run: | - archive_name="dump_run_with_args_${{matrix.e2e-group}}_${{ github.run_id }}" - echo "ARCHIVE_NAME=${archive_name}" >> "$GITHUB_ENV" - echo "Generated archive name: ${archive_name}" - kurtosis service exec cdk cdk-node-001 'cat /etc/cdk/cdk-node-config.toml' > ./dump/cdk-node-config.toml - - - name: Upload logs - if: failure() - uses: actions/upload-artifact@v4 - with: - name: ${{ env.ARCHIVE_NAME }} - path: ./dump diff --git a/.github/workflows/test-resequence.yml b/.github/workflows/test-resequence.yml index eb733cf2..684db5e4 100644 --- a/.github/workflows/test-resequence.yml +++ b/.github/workflows/test-resequence.yml @@ -17,10 +17,10 @@ jobs: # matrix: # da-mode: [ "rollup" ] steps: - - name: Checkout cdk + - name: Checkout aggkit uses: actions/checkout@v4 with: - path: cdk + path: aggkit - name: Checkout cdk-erigon uses: actions/checkout@v4 @@ -50,8 +50,8 @@ jobs: /usr/local/bin/polycli version - name: Build docker image - working-directory: ./cdk - run: docker build -t cdk:local --file Dockerfile . + working-directory: ./aggkit + run: docker build -t aggkit:local --file Dockerfile . - name: Remove unused flags working-directory: ./kurtosis-cdk @@ -62,14 +62,14 @@ jobs: - name: Configure Kurtosis CDK working-directory: ./kurtosis-cdk run: | - /usr/local/bin/yq -i '.args.cdk_node_image = "cdk:local"' params.yml + /usr/local/bin/yq -i '.args.aggkit_node_image = "aggkit:local"' params.yml /usr/local/bin/yq -i '.args.zkevm_rollup_fork_id = "12"' params.yml /usr/local/bin/yq -i '.args.zkevm_prover_image = "hermeznetwork/zkevm-prover:v8.0.0-RC5-fork.12"' params.yml /usr/local/bin/yq -i '.args.cdk_erigon_node_image = "jerrycgh/cdk-erigon:d5d04906f723f3f1d8c43c9e6baf3e18c27ff348"' params.yml - name: Deploy Kurtosis CDK package working-directory: ./kurtosis-cdk - run: kurtosis run --enclave cdk-v1 --args-file params.yml --image-download always . + run: kurtosis run --enclave aggkit --args-file params.yml --image-download always . - name: Test resequence working-directory: ./cdk-erigon diff --git a/.github/workflows/x86_deb_packager.yml b/.github/workflows/x86_deb_packager.yml index 584aad6d..3953af1a 100644 --- a/.github/workflows/x86_deb_packager.yml +++ b/.github/workflows/x86_deb_packager.yml @@ -40,50 +40,42 @@ jobs: - name: Build the binary run: make build - - name: Build the rust binary - run: | - BUILD_SCRIPT_DISABLED=1 - cargo build --release --bin cdk - - name: making directory structure - run: mkdir -p packaging/deb/cdk/usr/bin/ + run: mkdir -p packaging/deb/aggkit/usr/bin/ - name: copying necessary binary for amd64 - run: cp -rp target/cdk-node packaging/deb/cdk/usr/bin/cdk-node - - name: copying rust binary for amd64 - run: cp -rp target/release/cdk packaging/deb/cdk/usr/bin/cdk + run: cp -rp target/aggkit packaging/deb/aggkit/usr/bin/aggkit # Control file creation - name: Create control file run: | - echo "Package: cdk" >> packaging/deb/cdk/DEBIAN/control - echo "Version: ${{ env.VERSION }}" >> packaging/deb/cdk/DEBIAN/control - echo "Section: base" >> packaging/deb/cdk/DEBIAN/control - echo "Priority: optional" >> packaging/deb/cdk/DEBIAN/control - echo "Architecture: amd64" >> packaging/deb/cdk/DEBIAN/control - echo "Maintainer: devops@polygon.technology" >> packaging/deb/cdk/DEBIAN/control - echo "Description: cdk binary package" >> packaging/deb/cdk/DEBIAN/control + echo "Package: aggkit" >> packaging/deb/aggkit/DEBIAN/control + echo "Version: ${{ env.VERSION }}" >> packaging/deb/aggkit/DEBIAN/control + echo "Section: base" >> packaging/deb/aggkit/DEBIAN/control + echo "Priority: optional" >> packaging/deb/aggkit/DEBIAN/control + echo "Architecture: amd64" >> packaging/deb/aggkit/DEBIAN/control + echo "Maintainer: devops@polygon.technology" >> packaging/deb/aggkit/DEBIAN/control + echo "Description: aggkit binary package" >> packaging/deb/aggkit/DEBIAN/control - - name: Creating package for binary for cdk ${{ env.ARCH }} - run: cp -rp packaging/deb/cdk packaging/deb/cdk-${{ env.GIT_TAG }}-${{ env.ARCH }} + - name: Creating package for binary for aggkit ${{ env.ARCH }} + run: cp -rp packaging/deb/aggkit packaging/deb/aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }} env: ARCH: amd64 - name: Running package build - run: dpkg-deb --build --root-owner-group packaging/deb/cdk-${{ env.GIT_TAG }}-${{ env.ARCH }} + run: dpkg-deb --build --root-owner-group packaging/deb/aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }} env: ARCH: amd64 - name: Create checksum for the amd64 package - run: cd packaging/deb/ && sha256sum cdk-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb > cdk-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb.checksum + run: cd packaging/deb/ && sha256sum aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb > aggkit-${{ env.GIT_TAG }}-${{ env.ARCH }}.deb.checksum env: ARCH: amd64 - - - name: Release cdk Packages + - name: Release aggkit Packages uses: softprops/action-gh-release@v2 with: tag_name: ${{ env.GIT_TAG }} prerelease: true files: | - packaging/deb/cdk**.deb - packaging/deb/cdk**.deb.checksum + packaging/deb/aggkit**.deb + packaging/deb/aggkit**.deb.checksum diff --git a/.github/workflows/x86_rpm_packager.yml b/.github/workflows/x86_rpm_packager.yml index d62772ba..001e9932 100644 --- a/.github/workflows/x86_rpm_packager.yml +++ b/.github/workflows/x86_rpm_packager.yml @@ -37,11 +37,6 @@ jobs: - name: Building aggkit for amd64 run: make build - - name: Building the aggkit - run: | - BUILD_SCRIPT_DISABLED=1 - cargo build --release --bin aggkit - - name: Installing some dependencies run: sudo apt-get update && sudo apt-get install -y rpm @@ -69,12 +64,11 @@ jobs: echo "%install" >> packaging/rpm/SPECS/aggkit.spec echo "mkdir -p %{buildroot}/usr/bin" >> packaging/rpm/SPECS/aggkit.spec - echo "cp /home/runner/work/aggkit/aggkit/target/aggkit-node %{buildroot}/usr/bin/aggkit-node" >> packaging/rpm/SPECS/aggkit.spec + echo "cp /home/runner/work/aggkit/aggkit/target/aggkit %{buildroot}/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec echo "cp /home/runner/work/aggkit/aggkit/target/release/aggkit %{buildroot}/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec echo "%files" >> packaging/rpm/SPECS/aggkit.spec echo "/usr/bin/aggkit" >> packaging/rpm/SPECS/aggkit.spec - echo "/usr/bin/aggkit-node" >> packaging/rpm/SPECS/aggkit.spec - name: Construct rpm package diff --git a/.gitignore b/.gitignore index 5f16e099..6598acd7 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,6 @@ cmd/__debug_bin **__debug** target/ -book/ index.html tmp .vscode diff --git a/Cargo.lock b/Cargo.lock deleted file mode 100644 index 65b948cf..00000000 --- a/Cargo.lock +++ /dev/null @@ -1,5479 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "Inflector" -version = "0.11.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" -dependencies = [ - "lazy_static", - "regex", -] - -[[package]] -name = "addr2line" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] - -[[package]] -name = "aggkit-config" -version = "0.1.0" -dependencies = [ - "ethers", - "jsonrpsee", - "serde", - "serde_json", - "serde_with", - "thiserror", - "toml", - "tracing", - "tracing-appender", - "tracing-subscriber", - "url", -] - -[[package]] -name = "ahash" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "aho-corasick" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" -dependencies = [ - "memchr", -] - -[[package]] -name = "alloy-json-rpc" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af5979e0d5a7bf9c7eb79749121e8256e59021af611322aee56e77e20776b4b3" -dependencies = [ - "alloy-primitives", - "alloy-sol-types", - "serde", - "serde_json", - "thiserror", - "tracing", -] - -[[package]] -name = "alloy-primitives" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "260d3ff3bff0bb84599f032a2f2c6828180b0ea0cd41fdaf44f39cef3ba41861" -dependencies = [ - "alloy-rlp", - "bytes", - "cfg-if", - "const-hex", - "derive_more 1.0.0", - "hashbrown 0.14.5", - "hex-literal", - "indexmap 2.6.0", - "itoa", - "k256", - "keccak-asm", - "paste", - "proptest", - "rand", - "ruint", - "rustc-hash", - "serde", - "sha3", - "tiny-keccak", -] - -[[package]] -name = "alloy-rlp" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26154390b1d205a4a7ac7352aa2eb4f81f391399d4e2f546fb81a2f8bb383f62" -dependencies = [ - "arrayvec", - "bytes", -] - -[[package]] -name = "alloy-rpc-client" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fc2bd1e7403463a5f2c61e955bcc9d3072b63aa177442b0f9aa6a6d22a941e3" -dependencies = [ - "alloy-json-rpc", - "alloy-primitives", - "alloy-transport", - "alloy-transport-http", - "futures", - "pin-project", - "reqwest 0.12.8", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tower 0.5.1", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-sol-macro" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68e7f6e8fe5b443f82b3f1e15abfa191128f71569148428e49449d01f6f49e8b" -dependencies = [ - "alloy-sol-macro-expander", - "alloy-sol-macro-input", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "alloy-sol-macro-expander" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b96ce28d2fde09abb6135f410c41fad670a3a770b6776869bd852f1df102e6f" -dependencies = [ - "alloy-sol-macro-input", - "const-hex", - "heck", - "indexmap 2.6.0", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.68", - "syn-solidity", - "tiny-keccak", -] - -[[package]] -name = "alloy-sol-macro-input" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "906746396a8296537745711630d9185746c0b50c033d5e9d18b0a6eba3d53f90" -dependencies = [ - "const-hex", - "dunce", - "heck", - "proc-macro2", - "quote", - "syn 2.0.68", - "syn-solidity", -] - -[[package]] -name = "alloy-sol-types" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86a533ce22525969661b25dfe296c112d35eb6861f188fd284f8bd4bb3842ae" -dependencies = [ - "alloy-primitives", - "alloy-sol-macro", - "const-hex", -] - -[[package]] -name = "alloy-transport" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be77579633ebbc1266ae6fd7694f75c408beb1aeb6865d0b18f22893c265a061" -dependencies = [ - "alloy-json-rpc", - "base64 0.22.1", - "futures-util", - "futures-utils-wasm", - "serde", - "serde_json", - "thiserror", - "tokio", - "tower 0.5.1", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-transport-http" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fd1a5d0827939847983b46f2f79510361f901dc82f8e3c38ac7397af142c6e" -dependencies = [ - "alloy-json-rpc", - "alloy-transport", - "reqwest 0.12.8", - "serde_json", - "tower 0.5.1", - "tracing", - "url", -] - -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anstream" -version = "0.6.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" - -[[package]] -name = "anstyle-parse" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" -dependencies = [ - "anstyle", - "windows-sys 0.52.0", -] - -[[package]] -name = "anyhow" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" - -[[package]] -name = "ark-ff" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" -dependencies = [ - "ark-ff-asm 0.3.0", - "ark-ff-macros 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.3.3", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm 0.4.2", - "ark-ff-macros 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", - "derivative", - "digest 0.10.7", - "itertools 0.10.5", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.4.0", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" -dependencies = [ - "num-bigint", - "num-traits", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-serialize" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" -dependencies = [ - "ark-std 0.3.0", - "digest 0.9.0", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-std 0.4.0", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-std" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" -dependencies = [ - "num-traits", - "rand", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand", -] - -[[package]] -name = "arrayvec" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" - -[[package]] -name = "ascii-canvas" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" -dependencies = [ - "term", -] - -[[package]] -name = "async-trait" -version = "0.1.81" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "async_io_stream" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" -dependencies = [ - "futures", - "pharos", - "rustc_version 0.4.0", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "auto_impl" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "autocfg" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" - -[[package]] -name = "backtrace" -version = "0.3.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" -dependencies = [ - "addr2line", - "cc", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", -] - -[[package]] -name = "base16ct" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" - -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "base64ct" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" - -[[package]] -name = "bech32" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86b93f97252c47b41663388e6d155714a9d0c398b99f1005cbc5f978b29f445" - -[[package]] -name = "bit-set" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "bs58" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" -dependencies = [ - "sha2", - "tinyvec", -] - -[[package]] -name = "bumpalo" -version = "3.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "byte-slice-cast" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "bytes" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" -dependencies = [ - "serde", -] - -[[package]] -name = "bzip2" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8" -dependencies = [ - "bzip2-sys", - "libc", -] - -[[package]] -name = "bzip2-sys" -version = "0.1.11+1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" -dependencies = [ - "cc", - "libc", - "pkg-config", -] - -[[package]] -name = "camino" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo-platform" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo_metadata" -version = "0.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037" -dependencies = [ - "camino", - "cargo-platform", - "semver 1.0.23", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "cc" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907d8581360765417f8f2e0e7d602733bbed60156b4465b7617243689ef9b83d" -dependencies = [ - "jobserver", - "libc", - "once_cell", -] - -[[package]] -name = "cdk" -version = "0.1.0" -dependencies = [ - "aggkit-config", - "alloy-json-rpc", - "alloy-rpc-client", - "alloy-transport-http", - "anyhow", - "clap", - "colored", - "dotenvy", - "execute", - "regex", - "reqwest 0.12.8", - "serde", - "serde_json", - "tempfile", - "tokio", - "toml", - "tracing", - "tracing-subscriber", - "url", -] - -[[package]] -name = "cesu8" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "num-traits", - "serde", - "windows-targets 0.52.6", -] - -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] - -[[package]] -name = "clap" -version = "4.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5db83dced34638ad474f39f250d7fea9598bdd239eaced1bdf45d597da0f433f" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e204572485eb3fbf28f871612191521df159bc3e15a9f5064c66dba3a8c05f" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "clap_lex" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" - -[[package]] -name = "coins-bip32" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b6be4a5df2098cd811f3194f64ddb96c267606bffd9689ac7b0160097b01ad3" -dependencies = [ - "bs58", - "coins-core", - "digest 0.10.7", - "hmac", - "k256", - "serde", - "sha2", - "thiserror", -] - -[[package]] -name = "coins-bip39" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3db8fba409ce3dc04f7d804074039eb68b960b0829161f8e06c95fea3f122528" -dependencies = [ - "bitvec", - "coins-bip32", - "hmac", - "once_cell", - "pbkdf2 0.12.2", - "rand", - "sha2", - "thiserror", -] - -[[package]] -name = "coins-core" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5286a0843c21f8367f7be734f89df9b822e0321d8bcce8d6e735aadff7d74979" -dependencies = [ - "base64 0.21.7", - "bech32", - "bs58", - "digest 0.10.7", - "generic-array 0.14.7", - "hex", - "ripemd", - "serde", - "serde_derive", - "sha2", - "sha3", - "thiserror", -] - -[[package]] -name = "colorchoice" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" - -[[package]] -name = "colored" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" -dependencies = [ - "lazy_static", - "windows-sys 0.48.0", -] - -[[package]] -name = "combine" -version = "4.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" -dependencies = [ - "bytes", - "memchr", -] - -[[package]] -name = "const-hex" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8a24a26d37e1ffd45343323dc9fe6654ceea44c12f2fcb3d7ac29e610bc6" -dependencies = [ - "cfg-if", - "cpufeatures", - "hex", - "proptest", - "serde", -] - -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "constant_time_eq" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" - -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" - -[[package]] -name = "cpufeatures" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" -dependencies = [ - "libc", -] - -[[package]] -name = "crc32fast" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" - -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-bigint" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" -dependencies = [ - "generic-array 0.14.7", - "rand_core", - "subtle", - "zeroize", -] - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array 0.14.7", - "typenum", -] - -[[package]] -name = "ctr" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" -dependencies = [ - "cipher", -] - -[[package]] -name = "darling" -version = "0.20.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.20.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.68", -] - -[[package]] -name = "darling_macro" -version = "0.20.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" -dependencies = [ - "darling_core", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "data-encoding" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" - -[[package]] -name = "der" -version = "0.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "deranged" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" -dependencies = [ - "powerfmt", - "serde", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "derive_more" -version = "0.99.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "derive_more" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", - "unicode-xid", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "const-oid", - "crypto-common", - "subtle", -] - -[[package]] -name = "dirs" -version = "5.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-next" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" -dependencies = [ - "cfg-if", - "dirs-sys-next", -] - -[[package]] -name = "dirs-sys" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.48.0", -] - -[[package]] -name = "dirs-sys-next" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" -dependencies = [ - "libc", - "redox_users", - "winapi", -] - -[[package]] -name = "dotenvy" -version = "0.15.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" - -[[package]] -name = "dunce" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" - -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" -dependencies = [ - "der", - "digest 0.10.7", - "elliptic-curve", - "rfc6979", - "signature", - "spki", -] - -[[package]] -name = "either" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" - -[[package]] -name = "elliptic-curve" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" -dependencies = [ - "base16ct", - "crypto-bigint", - "digest 0.10.7", - "ff", - "generic-array 0.14.7", - "group", - "pkcs8", - "rand_core", - "sec1", - "subtle", - "zeroize", -] - -[[package]] -name = "ena" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" -dependencies = [ - "log", -] - -[[package]] -name = "encoding_rs" -version = "0.8.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "enr" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a3d8dc56e02f954cac8eb489772c552c473346fc34f67412bb6244fd647f7e4" -dependencies = [ - "base64 0.21.7", - "bytes", - "hex", - "k256", - "log", - "rand", - "rlp", - "serde", - "sha3", - "zeroize", -] - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "errno" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "eth-keystore" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fda3bf123be441da5260717e0661c25a2fd9cb2b2c1d20bf2e05580047158ab" -dependencies = [ - "aes", - "ctr", - "digest 0.10.7", - "hex", - "hmac", - "pbkdf2 0.11.0", - "rand", - "scrypt", - "serde", - "serde_json", - "sha2", - "sha3", - "thiserror", - "uuid", -] - -[[package]] -name = "ethabi" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3", - "thiserror", - "uint", -] - -[[package]] -name = "ethbloom" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "tiny-keccak", -] - -[[package]] -name = "ethereum-types" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "primitive-types", - "scale-info", - "uint", -] - -[[package]] -name = "ethers" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "816841ea989f0c69e459af1cf23a6b0033b19a55424a1ea3a30099becdb8dec0" -dependencies = [ - "ethers-addressbook", - "ethers-contract", - "ethers-core", - "ethers-etherscan", - "ethers-middleware", - "ethers-providers", - "ethers-signers", - "ethers-solc", -] - -[[package]] -name = "ethers-addressbook" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5495afd16b4faa556c3bba1f21b98b4983e53c1755022377051a975c3b021759" -dependencies = [ - "ethers-core", - "once_cell", - "serde", - "serde_json", -] - -[[package]] -name = "ethers-contract" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fceafa3578c836eeb874af87abacfb041f92b4da0a78a5edd042564b8ecdaaa" -dependencies = [ - "const-hex", - "ethers-contract-abigen", - "ethers-contract-derive", - "ethers-core", - "ethers-providers", - "futures-util", - "once_cell", - "pin-project", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "ethers-contract-abigen" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04ba01fbc2331a38c429eb95d4a570166781f14290ef9fdb144278a90b5a739b" -dependencies = [ - "Inflector", - "const-hex", - "dunce", - "ethers-core", - "ethers-etherscan", - "eyre", - "prettyplease", - "proc-macro2", - "quote", - "regex", - "reqwest 0.11.27", - "serde", - "serde_json", - "syn 2.0.68", - "toml", - "walkdir", -] - -[[package]] -name = "ethers-contract-derive" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87689dcabc0051cde10caaade298f9e9093d65f6125c14575db3fd8c669a168f" -dependencies = [ - "Inflector", - "const-hex", - "ethers-contract-abigen", - "ethers-core", - "proc-macro2", - "quote", - "serde_json", - "syn 2.0.68", -] - -[[package]] -name = "ethers-core" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82d80cc6ad30b14a48ab786523af33b37f28a8623fc06afd55324816ef18fb1f" -dependencies = [ - "arrayvec", - "bytes", - "cargo_metadata", - "chrono", - "const-hex", - "elliptic-curve", - "ethabi", - "generic-array 0.14.7", - "k256", - "num_enum", - "once_cell", - "open-fastrlp", - "rand", - "rlp", - "serde", - "serde_json", - "strum", - "syn 2.0.68", - "tempfile", - "thiserror", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "ethers-etherscan" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79e5973c26d4baf0ce55520bd732314328cabe53193286671b47144145b9649" -dependencies = [ - "chrono", - "ethers-core", - "reqwest 0.11.27", - "semver 1.0.23", - "serde", - "serde_json", - "thiserror", - "tracing", -] - -[[package]] -name = "ethers-middleware" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48f9fdf09aec667c099909d91908d5eaf9be1bd0e2500ba4172c1d28bfaa43de" -dependencies = [ - "async-trait", - "auto_impl", - "ethers-contract", - "ethers-core", - "ethers-etherscan", - "ethers-providers", - "ethers-signers", - "futures-channel", - "futures-locks", - "futures-util", - "instant", - "reqwest 0.11.27", - "serde", - "serde_json", - "thiserror", - "tokio", - "tracing", - "tracing-futures", - "url", -] - -[[package]] -name = "ethers-providers" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6434c9a33891f1effc9c75472e12666db2fa5a0fec4b29af6221680a6fe83ab2" -dependencies = [ - "async-trait", - "auto_impl", - "base64 0.21.7", - "bytes", - "const-hex", - "enr", - "ethers-core", - "futures-core", - "futures-timer", - "futures-util", - "hashers", - "http 0.2.12", - "instant", - "jsonwebtoken", - "once_cell", - "pin-project", - "reqwest 0.11.27", - "serde", - "serde_json", - "thiserror", - "tokio", - "tokio-tungstenite", - "tracing", - "tracing-futures", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "ws_stream_wasm", -] - -[[package]] -name = "ethers-signers" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "228875491c782ad851773b652dd8ecac62cda8571d3bc32a5853644dd26766c2" -dependencies = [ - "async-trait", - "coins-bip32", - "coins-bip39", - "const-hex", - "elliptic-curve", - "eth-keystore", - "ethers-core", - "rand", - "sha2", - "thiserror", - "tracing", -] - -[[package]] -name = "ethers-solc" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66244a771d9163282646dbeffe0e6eca4dda4146b6498644e678ac6089b11edd" -dependencies = [ - "cfg-if", - "const-hex", - "dirs", - "dunce", - "ethers-core", - "glob", - "home", - "md-5", - "num_cpus", - "once_cell", - "path-slash", - "rayon", - "regex", - "semver 1.0.23", - "serde", - "serde_json", - "solang-parser", - "svm-rs", - "thiserror", - "tiny-keccak", - "tokio", - "tracing", - "walkdir", - "yansi", -] - -[[package]] -name = "execute" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a82608ee96ce76aeab659e9b8d3c2b787bffd223199af88c674923d861ada10" -dependencies = [ - "execute-command-macro", - "execute-command-tokens", - "generic-array 1.0.0", -] - -[[package]] -name = "execute-command-macro" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90dec53d547564e911dc4ff3ecb726a64cf41a6fa01a2370ebc0d95175dd08bd" -dependencies = [ - "execute-command-macro-impl", -] - -[[package]] -name = "execute-command-macro-impl" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce8cd46a041ad005ab9c71263f9a0ff5b529eac0fe4cc9b4a20f4f0765d8cf4b" -dependencies = [ - "execute-command-tokens", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "execute-command-tokens" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69dc321eb6be977f44674620ca3aa21703cb20ffbe560e1ae97da08401ffbcad" - -[[package]] -name = "eyre" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" -dependencies = [ - "indenter", - "once_cell", -] - -[[package]] -name = "fastrand" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" - -[[package]] -name = "fastrlp" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "ff" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" -dependencies = [ - "rand_core", - "subtle", -] - -[[package]] -name = "fixed-hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" -dependencies = [ - "byteorder", - "rand", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "fixedbitset" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" - -[[package]] -name = "flate2" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "form_urlencoded" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" - -[[package]] -name = "futures-executor" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" - -[[package]] -name = "futures-locks" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06" -dependencies = [ - "futures-channel", - "futures-task", -] - -[[package]] -name = "futures-macro" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "futures-sink" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" - -[[package]] -name = "futures-task" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" - -[[package]] -name = "futures-timer" -version = "3.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" -dependencies = [ - "gloo-timers", - "send_wrapper 0.4.0", -] - -[[package]] -name = "futures-util" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "futures-utils-wasm" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" - -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", - "zeroize", -] - -[[package]] -name = "generic-array" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe739944a5406424e080edccb6add95685130b9f160d5407c639c7df0c5836b0" -dependencies = [ - "typenum", -] - -[[package]] -name = "getrandom" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "gimli" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" - -[[package]] -name = "glob" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" - -[[package]] -name = "gloo-net" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" -dependencies = [ - "futures-channel", - "futures-core", - "futures-sink", - "gloo-utils", - "http 1.1.0", - "js-sys", - "pin-project", - "serde", - "serde_json", - "thiserror", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "gloo-utils" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" -dependencies = [ - "js-sys", - "serde", - "serde_json", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "group" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" -dependencies = [ - "ff", - "rand_core", - "subtle", -] - -[[package]] -name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap 2.6.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "h2" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http 1.1.0", - "indexmap 2.6.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash", - "serde", -] - -[[package]] -name = "hashbrown" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" - -[[package]] -name = "hashers" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2bca93b15ea5a746f220e56587f71e73c6165eab783df9e26590069953e3c30" -dependencies = [ - "fxhash", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" -dependencies = [ - "serde", -] - -[[package]] -name = "hex-literal" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "home" -version = "0.5.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - -[[package]] -name = "http-body" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" -dependencies = [ - "bytes", - "http 1.1.0", -] - -[[package]] -name = "http-body-util" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" -dependencies = [ - "bytes", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "pin-project-lite", -] - -[[package]] -name = "httparse" -version = "1.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" - -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - -[[package]] -name = "hyper" -version = "0.14.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "h2 0.4.5", - "http 1.1.0", - "http-body 1.0.0", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" -dependencies = [ - "futures-util", - "http 0.2.12", - "hyper 0.14.30", - "rustls 0.21.12", - "tokio", - "tokio-rustls 0.24.1", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" -dependencies = [ - "futures-util", - "http 1.1.0", - "hyper 1.4.1", - "hyper-util", - "log", - "rustls 0.23.11", - "rustls-pki-types", - "tokio", - "tokio-rustls 0.26.0", - "tower-service", -] - -[[package]] -name = "hyper-tls" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" -dependencies = [ - "bytes", - "http-body-util", - "hyper 1.4.1", - "hyper-util", - "native-tls", - "tokio", - "tokio-native-tls", - "tower-service", -] - -[[package]] -name = "hyper-util" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ab92f4f49ee4fb4f997c784b7a2e0fa70050211e0b6a287f898c3c9785ca956" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "hyper 1.4.1", - "pin-project-lite", - "socket2", - "tokio", - "tower 0.4.13", - "tower-service", - "tracing", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" -dependencies = [ - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-rlp" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" -dependencies = [ - "rlp", -] - -[[package]] -name = "impl-serde" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" -dependencies = [ - "serde", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "indenter" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - -[[package]] -name = "indexmap" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" -dependencies = [ - "equivalent", - "hashbrown 0.15.0", - "serde", -] - -[[package]] -name = "inout" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" -dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "ipnet" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" - -[[package]] -name = "jni" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" -dependencies = [ - "cesu8", - "combine", - "jni-sys", - "log", - "thiserror", - "walkdir", -] - -[[package]] -name = "jni-sys" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" - -[[package]] -name = "jobserver" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" -dependencies = [ - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "jsonrpsee" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126b48a5acc3c52fbd5381a77898cb60e145123179588a29e7ac48f9c06e401b" -dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-http-client", - "jsonrpsee-proc-macros", - "jsonrpsee-server", - "jsonrpsee-types", - "jsonrpsee-wasm-client", - "jsonrpsee-ws-client", - "tokio", - "tracing", -] - -[[package]] -name = "jsonrpsee-client-transport" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf679a8e0e083c77997f7c4bb4ca826577105906027ae462aac70ff348d02c6a" -dependencies = [ - "base64 0.22.1", - "futures-channel", - "futures-util", - "gloo-net", - "http 1.1.0", - "jsonrpsee-core", - "pin-project", - "rustls 0.23.11", - "rustls-pki-types", - "rustls-platform-verifier", - "soketto", - "thiserror", - "tokio", - "tokio-rustls 0.26.0", - "tokio-util", - "tracing", - "url", -] - -[[package]] -name = "jsonrpsee-core" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0e503369a76e195b65af35058add0e6900b794a4e9a9316900ddd3a87a80477" -dependencies = [ - "async-trait", - "bytes", - "futures-timer", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "jsonrpsee-types", - "parking_lot", - "pin-project", - "rand", - "rustc-hash", - "serde", - "serde_json", - "thiserror", - "tokio", - "tokio-stream", - "tracing", - "wasm-bindgen-futures", -] - -[[package]] -name = "jsonrpsee-http-client" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2c0caba4a6a8efbafeec9baa986aa22a75a96c29d3e4b0091b0098d6470efb5" -dependencies = [ - "async-trait", - "base64 0.22.1", - "http-body 1.0.0", - "hyper 1.4.1", - "hyper-rustls 0.27.2", - "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", - "rustls 0.23.11", - "rustls-platform-verifier", - "serde", - "serde_json", - "thiserror", - "tokio", - "tower 0.4.13", - "tracing", - "url", -] - -[[package]] -name = "jsonrpsee-proc-macros" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc660a9389e2748e794a40673a4155d501f32db667757cdb80edeff0306b489b" -dependencies = [ - "heck", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "jsonrpsee-server" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af6e6c9b6d975edcb443565d648b605f3e85a04ec63aa6941811a8894cc9cded" -dependencies = [ - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "hyper 1.4.1", - "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", - "pin-project", - "route-recognizer", - "serde", - "serde_json", - "soketto", - "thiserror", - "tokio", - "tokio-stream", - "tokio-util", - "tower 0.4.13", - "tracing", -] - -[[package]] -name = "jsonrpsee-types" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8fb16314327cbc94fdf7965ef7e4422509cd5597f76d137bd104eb34aeede67" -dependencies = [ - "http 1.1.0", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "jsonrpsee-wasm-client" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0da62b43702bd5640ea305d35df95da30abc878e79a7b4b01feda3beaf35d3c" -dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", -] - -[[package]] -name = "jsonrpsee-ws-client" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39aabf5d6c6f22da8d5b808eea1fab0736059f11fb42f71f141b14f404e5046a" -dependencies = [ - "http 1.1.0", - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", - "url", -] - -[[package]] -name = "jsonwebtoken" -version = "8.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" -dependencies = [ - "base64 0.21.7", - "pem", - "ring 0.16.20", - "serde", - "serde_json", - "simple_asn1", -] - -[[package]] -name = "k256" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" -dependencies = [ - "cfg-if", - "ecdsa", - "elliptic-curve", - "once_cell", - "sha2", - "signature", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "keccak-asm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6" -dependencies = [ - "digest 0.10.7", - "sha3-asm", -] - -[[package]] -name = "lalrpop" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" -dependencies = [ - "ascii-canvas", - "bit-set", - "ena", - "itertools 0.11.0", - "lalrpop-util", - "petgraph", - "regex", - "regex-syntax 0.8.5", - "string_cache", - "term", - "tiny-keccak", - "unicode-xid", - "walkdir", -] - -[[package]] -name = "lalrpop-util" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" -dependencies = [ - "regex-automata 0.4.8", -] - -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" - -[[package]] -name = "libc" -version = "0.2.155" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" - -[[package]] -name = "libm" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" - -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags 2.6.0", - "libc", -] - -[[package]] -name = "linux-raw-sys" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" - -[[package]] -name = "lock_api" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - -[[package]] -name = "md-5" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" -dependencies = [ - "cfg-if", - "digest 0.10.7", -] - -[[package]] -name = "memchr" -version = "2.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "miniz_oxide" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" -dependencies = [ - "adler", -] - -[[package]] -name = "mio" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" -dependencies = [ - "hermit-abi", - "libc", - "wasi", - "windows-sys 0.52.0", -] - -[[package]] -name = "native-tls" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - -[[package]] -name = "new_debug_unreachable" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" - -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", - "libm", -] - -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "num_enum" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" -dependencies = [ - "num_enum_derive", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "object" -version = "0.36.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "081b846d1d56ddfc18fdf1a922e4f6e07a11768ea1b92dec44e42b72712ccfce" -dependencies = [ - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" - -[[package]] -name = "open-fastrlp" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", - "ethereum-types", - "open-fastrlp-derive", -] - -[[package]] -name = "open-fastrlp-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" -dependencies = [ - "bytes", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "openssl" -version = "0.10.66" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" -dependencies = [ - "bitflags 2.6.0", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "openssl-sys" -version = "0.9.103" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - -[[package]] -name = "parity-scale-codec" -version = "3.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "parking_lot" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets 0.52.6", -] - -[[package]] -name = "password-hash" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" -dependencies = [ - "base64ct", - "rand_core", - "subtle", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "path-slash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" - -[[package]] -name = "pbkdf2" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" -dependencies = [ - "digest 0.10.7", - "hmac", - "password-hash", - "sha2", -] - -[[package]] -name = "pbkdf2" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" -dependencies = [ - "digest 0.10.7", - "hmac", -] - -[[package]] -name = "pem" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" -dependencies = [ - "base64 0.13.1", -] - -[[package]] -name = "percent-encoding" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" - -[[package]] -name = "pest" -version = "2.7.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdbef9d1d47087a895abd220ed25eb4ad973a5e26f6a4367b038c25e28dfc2d9" -dependencies = [ - "memchr", - "thiserror", - "ucd-trie", -] - -[[package]] -name = "petgraph" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" -dependencies = [ - "fixedbitset", - "indexmap 2.6.0", -] - -[[package]] -name = "pharos" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" -dependencies = [ - "futures", - "rustc_version 0.4.0", -] - -[[package]] -name = "phf" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" -dependencies = [ - "phf_macros", - "phf_shared 0.11.2", -] - -[[package]] -name = "phf_generator" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" -dependencies = [ - "phf_shared 0.11.2", - "rand", -] - -[[package]] -name = "phf_macros" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" -dependencies = [ - "phf_generator", - "phf_shared 0.11.2", - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "phf_shared" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" -dependencies = [ - "siphasher", -] - -[[package]] -name = "phf_shared" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "pkg-config" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - -[[package]] -name = "prettyplease" -version = "0.2.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" -dependencies = [ - "proc-macro2", - "syn 2.0.68", -] - -[[package]] -name = "primitive-types" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" -dependencies = [ - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" -dependencies = [ - "toml_edit 0.21.1", -] - -[[package]] -name = "proc-macro-error-attr2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "proc-macro-error2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" -dependencies = [ - "proc-macro-error-attr2", - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "proc-macro2" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "proptest" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" -dependencies = [ - "bit-set", - "bit-vec", - "bitflags 2.6.0", - "lazy_static", - "num-traits", - "rand", - "rand_chacha", - "rand_xorshift", - "regex-syntax 0.8.5", - "rusty-fork", - "tempfile", - "unarray", -] - -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quote" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", - "serde", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom", -] - -[[package]] -name = "rand_xorshift" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" -dependencies = [ - "rand_core", -] - -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "redox_syscall" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" -dependencies = [ - "bitflags 2.6.0", -] - -[[package]] -name = "redox_users" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" -dependencies = [ - "getrandom", - "libredox", - "thiserror", -] - -[[package]] -name = "regex" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata 0.4.8", - "regex-syntax 0.8.5", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", -] - -[[package]] -name = "regex-automata" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax 0.8.5", -] - -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - -[[package]] -name = "regex-syntax" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" - -[[package]] -name = "reqwest" -version = "0.11.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" -dependencies = [ - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.30", - "hyper-rustls 0.24.2", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls 0.21.12", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 0.1.2", - "system-configuration 0.5.1", - "tokio", - "tokio-rustls 0.24.1", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots 0.25.4", - "winreg", -] - -[[package]] -name = "reqwest" -version = "0.12.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" -dependencies = [ - "base64 0.22.1", - "bytes", - "encoding_rs", - "futures-channel", - "futures-core", - "futures-util", - "h2 0.4.5", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "hyper 1.4.1", - "hyper-rustls 0.27.2", - "hyper-tls", - "hyper-util", - "ipnet", - "js-sys", - "log", - "mime", - "native-tls", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls-pemfile 2.1.2", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 1.0.1", - "system-configuration 0.6.1", - "tokio", - "tokio-native-tls", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "windows-registry", -] - -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - -[[package]] -name = "ring" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" -dependencies = [ - "cc", - "cfg-if", - "getrandom", - "libc", - "spin 0.9.8", - "untrusted 0.9.0", - "windows-sys 0.52.0", -] - -[[package]] -name = "ripemd" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "rlp" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" -dependencies = [ - "bytes", - "rlp-derive", - "rustc-hex", -] - -[[package]] -name = "rlp-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "route-recognizer" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" - -[[package]] -name = "ruint" -version = "1.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c3cc4c2511671f327125da14133d0c5c5d137f006a1017a16f557bc85b16286" -dependencies = [ - "alloy-rlp", - "ark-ff 0.3.0", - "ark-ff 0.4.2", - "bytes", - "fastrlp", - "num-bigint", - "num-traits", - "parity-scale-codec", - "primitive-types", - "proptest", - "rand", - "rlp", - "ruint-macro", - "serde", - "valuable", - "zeroize", -] - -[[package]] -name = "ruint-macro" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" - -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" - -[[package]] -name = "rustc-hash" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver 0.11.0", -] - -[[package]] -name = "rustc_version" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver 1.0.23", -] - -[[package]] -name = "rustix" -version = "0.38.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" -dependencies = [ - "bitflags 2.6.0", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.52.0", -] - -[[package]] -name = "rustls" -version = "0.21.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" -dependencies = [ - "log", - "ring 0.17.8", - "rustls-webpki 0.101.7", - "sct", -] - -[[package]] -name = "rustls" -version = "0.23.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4828ea528154ae444e5a642dbb7d5623354030dc9822b83fd9bb79683c7399d0" -dependencies = [ - "log", - "once_cell", - "ring 0.17.8", - "rustls-pki-types", - "rustls-webpki 0.102.5", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-native-certs" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a88d6d420651b496bdd98684116959239430022a115c1240e6c3993be0b15fba" -dependencies = [ - "openssl-probe", - "rustls-pemfile 2.1.2", - "rustls-pki-types", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - -[[package]] -name = "rustls-pemfile" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" -dependencies = [ - "base64 0.22.1", - "rustls-pki-types", -] - -[[package]] -name = "rustls-pki-types" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" - -[[package]] -name = "rustls-platform-verifier" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e3beb939bcd33c269f4bf946cc829fcd336370267c4a927ac0399c84a3151a1" -dependencies = [ - "core-foundation", - "core-foundation-sys", - "jni", - "log", - "once_cell", - "rustls 0.23.11", - "rustls-native-certs", - "rustls-platform-verifier-android", - "rustls-webpki 0.102.5", - "security-framework", - "security-framework-sys", - "webpki-roots 0.26.3", - "winapi", -] - -[[package]] -name = "rustls-platform-verifier-android" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84e217e7fdc8466b5b35d30f8c0a30febd29173df4a3a0c2115d306b9c4117ad" - -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - -[[package]] -name = "rustls-webpki" -version = "0.102.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a6fccd794a42c2c105b513a2f62bc3fd8f3ba57a4593677ceb0bd035164d78" -dependencies = [ - "ring 0.17.8", - "rustls-pki-types", - "untrusted 0.9.0", -] - -[[package]] -name = "rustversion" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" - -[[package]] -name = "rusty-fork" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" -dependencies = [ - "fnv", - "quick-error", - "tempfile", - "wait-timeout", -] - -[[package]] -name = "ryu" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" - -[[package]] -name = "salsa20" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" -dependencies = [ - "cipher", -] - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "scale-info" -version = "2.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca070c12893629e2cc820a9761bedf6ce1dcddc9852984d1dc734b8bd9bd024" -dependencies = [ - "cfg-if", - "derive_more 0.99.18", - "parity-scale-codec", - "scale-info-derive", -] - -[[package]] -name = "scale-info-derive" -version = "2.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "schannel" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "scrypt" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f9e24d2b632954ded8ab2ef9fea0a0c769ea56ea98bddbafbad22caeeadf45d" -dependencies = [ - "hmac", - "pbkdf2 0.11.0", - "salsa20", - "sha2", -] - -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - -[[package]] -name = "sec1" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" -dependencies = [ - "base16ct", - "der", - "generic-array 0.14.7", - "pkcs8", - "subtle", - "zeroize", -] - -[[package]] -name = "security-framework" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" -dependencies = [ - "bitflags 2.6.0", - "core-foundation", - "core-foundation-sys", - "libc", - "num-bigint", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver" -version = "1.0.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" -dependencies = [ - "serde", -] - -[[package]] -name = "semver-parser" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" -dependencies = [ - "pest", -] - -[[package]] -name = "send_wrapper" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" - -[[package]] -name = "send_wrapper" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" - -[[package]] -name = "serde" -version = "1.0.210" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.210" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "serde_json" -version = "1.0.128" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" -dependencies = [ - "itoa", - "memchr", - "ryu", - "serde", -] - -[[package]] -name = "serde_spanned" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_with" -version = "3.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9720086b3357bcb44fce40117d769a4d068c70ecfa190850a980a71755f66fcc" -dependencies = [ - "base64 0.22.1", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.6.0", - "serde", - "serde_derive", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f1abbfe725f27678f4663bcacb75a83e829fd464c25d78dd038a3a29e307cec" -dependencies = [ - "darling", - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - -[[package]] -name = "sha3-asm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" -dependencies = [ - "cc", - "cfg-if", -] - -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest 0.10.7", - "rand_core", -] - -[[package]] -name = "simple_asn1" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" -dependencies = [ - "num-bigint", - "num-traits", - "thiserror", - "time", -] - -[[package]] -name = "siphasher" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" - -[[package]] -name = "slab" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] - -[[package]] -name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" - -[[package]] -name = "socket2" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "soketto" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" -dependencies = [ - "base64 0.22.1", - "bytes", - "futures", - "http 1.1.0", - "httparse", - "log", - "rand", - "sha1", -] - -[[package]] -name = "solang-parser" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c425ce1c59f4b154717592f0bdf4715c3a1d55058883622d3157e1f0908a5b26" -dependencies = [ - "itertools 0.11.0", - "lalrpop", - "lalrpop-util", - "phf", - "thiserror", - "unicode-xid", -] - -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "string_cache" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" -dependencies = [ - "new_debug_unreachable", - "once_cell", - "parking_lot", - "phf_shared 0.10.0", - "precomputed-hash", -] - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "strum" -version = "0.26.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.68", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "svm-rs" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11297baafe5fa0c99d5722458eac6a5e25c01eb1b8e5cd137f54079093daa7a4" -dependencies = [ - "dirs", - "fs2", - "hex", - "once_cell", - "reqwest 0.11.27", - "semver 1.0.23", - "serde", - "serde_json", - "sha2", - "thiserror", - "url", - "zip", -] - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.68" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "901fa70d88b9d6c98022e23b4136f9f3e54e4662c3bc1bd1d84a42a9a0f0c1e9" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn-solidity" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab661c8148c2261222a4d641ad5477fd4bea79406a99056096a0b41b35617a5" -dependencies = [ - "paste", - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - -[[package]] -name = "sync_wrapper" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" -dependencies = [ - "futures-core", -] - -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys 0.5.0", -] - -[[package]] -name = "system-configuration" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" -dependencies = [ - "bitflags 2.6.0", - "core-foundation", - "system-configuration-sys 0.6.0", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "system-configuration-sys" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "tempfile" -version = "3.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" -dependencies = [ - "cfg-if", - "fastrand", - "once_cell", - "rustix", - "windows-sys 0.59.0", -] - -[[package]] -name = "term" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" -dependencies = [ - "dirs-next", - "rustversion", - "winapi", -] - -[[package]] -name = "thiserror" -version = "1.0.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "thread_local" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" -dependencies = [ - "cfg-if", - "once_cell", -] - -[[package]] -name = "time" -version = "0.3.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" -dependencies = [ - "deranged", - "itoa", - "num-conv", - "powerfmt", - "serde", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" - -[[package]] -name = "time-macros" -version = "0.2.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "tinyvec" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" -dependencies = [ - "backtrace", - "bytes", - "libc", - "mio", - "pin-project-lite", - "socket2", - "tokio-macros", - "windows-sys 0.52.0", -] - -[[package]] -name = "tokio-macros" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls 0.21.12", - "tokio", -] - -[[package]] -name = "tokio-rustls" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" -dependencies = [ - "rustls 0.23.11", - "rustls-pki-types", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", - "tokio-util", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.20.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" -dependencies = [ - "futures-util", - "log", - "rustls 0.21.12", - "tokio", - "tokio-rustls 0.24.1", - "tungstenite", - "webpki-roots 0.25.4", -] - -[[package]] -name = "tokio-util" -version = "0.7.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" -dependencies = [ - "bytes", - "futures-core", - "futures-io", - "futures-sink", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "toml" -version = "0.8.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit 0.22.22", -] - -[[package]] -name = "toml_datetime" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" -dependencies = [ - "indexmap 2.6.0", - "toml_datetime", - "winnow 0.5.40", -] - -[[package]] -name = "toml_edit" -version = "0.22.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" -dependencies = [ - "indexmap 2.6.0", - "serde", - "serde_spanned", - "toml_datetime", - "winnow 0.6.20", -] - -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "pin-project", - "pin-project-lite", - "tokio", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite", - "sync_wrapper 0.1.2", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - -[[package]] -name = "tracing" -version = "0.1.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" -dependencies = [ - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-appender" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" -dependencies = [ - "crossbeam-channel", - "thiserror", - "time", - "tracing-subscriber", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "tracing-core" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-serde" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", - "tracing-serde", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - -[[package]] -name = "tungstenite" -version = "0.20.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" -dependencies = [ - "byteorder", - "bytes", - "data-encoding", - "http 0.2.12", - "httparse", - "log", - "rand", - "rustls 0.21.12", - "sha1", - "thiserror", - "url", - "utf-8", -] - -[[package]] -name = "typenum" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" - -[[package]] -name = "ucd-trie" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" - -[[package]] -name = "uint" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unarray" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" - -[[package]] -name = "unicode-bidi" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" - -[[package]] -name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "unicode-normalization" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-xid" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" - -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - -[[package]] -name = "url" -version = "2.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - -[[package]] -name = "utf8parse" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" - -[[package]] -name = "uuid" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" -dependencies = [ - "getrandom", - "serde", -] - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wait-timeout" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" -dependencies = [ - "libc", -] - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn 2.0.68", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" - -[[package]] -name = "wasmtimer" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7ed9d8b15c7fb594d72bfb4b5a276f3d2029333cd93a932f376f5937f6f80ee" -dependencies = [ - "futures", - "js-sys", - "parking_lot", - "pin-utils", - "slab", - "wasm-bindgen", -] - -[[package]] -name = "web-sys" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki-roots" -version = "0.25.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" - -[[package]] -name = "webpki-roots" -version = "0.26.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-registry" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" -dependencies = [ - "windows-result", - "windows-strings", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-result" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-strings" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" -dependencies = [ - "windows-result", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "winnow" -version = "0.5.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" -dependencies = [ - "memchr", -] - -[[package]] -name = "winnow" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" -dependencies = [ - "memchr", -] - -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - -[[package]] -name = "ws_stream_wasm" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" -dependencies = [ - "async_io_stream", - "futures", - "js-sys", - "log", - "pharos", - "rustc_version 0.4.0", - "send_wrapper 0.6.0", - "thiserror", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - -[[package]] -name = "yansi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" - -[[package]] -name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "zeroize" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - -[[package]] -name = "zip" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" -dependencies = [ - "aes", - "byteorder", - "bzip2", - "constant_time_eq", - "crc32fast", - "crossbeam-utils", - "flate2", - "hmac", - "pbkdf2 0.11.0", - "sha1", - "time", - "zstd", -] - -[[package]] -name = "zstd" -version = "0.11.2+zstd.1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "5.0.2+zstd.1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" -dependencies = [ - "libc", - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "2.0.12+zstd.1.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4e40c320c3cb459d9a9ff6de98cff88f4751ee9275d140e2be94a2b74e4c13" -dependencies = [ - "cc", - "pkg-config", -] diff --git a/Cargo.toml b/Cargo.toml deleted file mode 100644 index 9d60c985..00000000 --- a/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[workspace] -resolver = "2" -default-members = ["crates/aggkit"] -members = ["crates/*"] - -[workspace.package] -version = "0.1.0" -edition = "2021" - -[workspace.dependencies] -serde = { version = "1.0.210", features = ["derive"] } -serde_json = "1.0.128" -serde_with = "3.10.0" -thiserror = "1.0.64" -toml = "0.8.19" -tracing = "0.1.40" -tracing-appender = "0.2.3" -tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } -url = { version = "2.5.2", features = ["serde"] } -ethers = "2.0.14" -jsonrpsee = { version = "0.24.5", features = ["full"] } - diff --git a/Dockerfile b/Dockerfile index 3ac06bde..11fd8ae7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ # CONTAINER FOR BUILDING BINARY FROM --platform=${BUILDPLATFORM} golang:1.22.4 AS build -WORKDIR $GOPATH/src/github.com/agglayer/aggkit +WORKDIR /app # INSTALL DEPENDENCIES COPY go.mod go.sum ./ @@ -9,43 +9,23 @@ RUN go mod download # BUILD BINARY COPY . . -RUN make build-go build-tools - -# BUILD RUST BIN -FROM --platform=${BUILDPLATFORM} rust:slim-bookworm AS chef -USER root -RUN apt-get update && apt-get install -y openssl pkg-config libssl-dev -RUN cargo install cargo-chef -WORKDIR /app - -FROM chef AS planner - -COPY --link crates crates -COPY --link Cargo.toml Cargo.toml -COPY --link Cargo.lock Cargo.lock - -RUN cargo chef prepare --recipe-path recipe.json --bin cdk - -FROM chef AS builder - -COPY --from=planner /app/recipe.json recipe.json -# Notice that we are specifying the --target flag! -RUN cargo chef cook --release --recipe-path recipe.json - -COPY --link crates crates -COPY --link Cargo.toml Cargo.toml -COPY --link Cargo.lock Cargo.lock - -ENV BUILD_SCRIPT_DISABLED=1 -RUN cargo build --release --bin cdk +RUN make build-aggkit build-tools # CONTAINER FOR RUNNING BINARY -FROM --platform=${BUILDPLATFORM} debian:bookworm-slim - -RUN apt-get update && apt-get install -y ca-certificates sqlite3 procps libssl-dev && rm -rf /var/lib/apt/lists/* -COPY --from=builder /app/target/release/cdk /usr/local/bin/ -COPY --from=build /go/src/github.com/agglayer/aggkit/target/cdk-node /usr/local/bin/ +FROM debian:bookworm-slim +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + ca-certificates \ + sqlite3 \ + procps \ + libssl-dev && \ + rm -rf /var/lib/apt/lists/* +COPY --from=build /app/target/aggkit /usr/local/bin/ + +# ADD NON-ROOT USER +RUN addgroup --system appgroup && adduser --system --ingroup appgroup appuser +USER appuser EXPOSE 5576/tcp -CMD ["/bin/sh", "-c", "cdk"] +CMD ["/bin/sh", "-c", "aggkit"] diff --git a/LICENSE b/LICENSE deleted file mode 100644 index ca9b0551..00000000 --- a/LICENSE +++ /dev/null @@ -1,619 +0,0 @@ - GNU AFFERO GENERAL PUBLIC LICENSE - Version 3, 19 November 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU Affero General Public License is a free, copyleft license for -software and other kinds of works, specifically designed to ensure -cooperation with the community in the case of network server software. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -our General Public Licenses are intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - Developers that use our General Public Licenses protect your rights -with two steps: (1) assert copyright on the software, and (2) offer -you this License which gives you legal permission to copy, distribute -and/or modify the software. - - A secondary benefit of defending all users' freedom is that -improvements made in alternate versions of the program, if they -receive widespread use, become available for other developers to -incorporate. Many developers of free software are heartened and -encouraged by the resulting cooperation. However, in the case of -software used on network servers, this result may fail to come about. -The GNU General Public License permits making a modified version and -letting the public access it on a server without ever releasing its -source code to the public. - - The GNU Affero General Public License is designed specifically to -ensure that, in such cases, the modified source code becomes available -to the community. It requires the operator of a network server to -provide the source code of the modified version running there to the -users of that server. Therefore, public use of a modified version, on -a publicly accessible server, gives the public access to the source -code of the modified version. - - An older license, called the Affero General Public License and -published by Affero, was designed to accomplish similar goals. This is -a different license, not a version of the Affero GPL, but Affero has -released a new version of the Affero GPL which permits relicensing under -this license. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU Affero General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Remote Network Interaction; Use with the GNU General Public License. - - Notwithstanding any other provision of this License, if you modify the -Program, your modified version must prominently offer all users -interacting with it remotely through a computer network (if your version -supports such interaction) an opportunity to receive the Corresponding -Source of your version by providing access to the Corresponding Source -from a network server at no charge, through some standard or customary -means of facilitating copying of software. This Corresponding Source -shall include the Corresponding Source for any work covered by version 3 -of the GNU General Public License that is incorporated pursuant to the -following paragraph. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the work with which it is combined will remain governed by version -3 of the GNU General Public License. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU Affero General Public License from time to time. Such new versions -will be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU Affero General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU Affero General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU Affero General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS diff --git a/LICENSE.Apache-2.0 b/LICENSE.Apache-2.0 new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/LICENSE.Apache-2.0 @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/LICENSE.MIT b/LICENSE.MIT new file mode 100644 index 00000000..72dc60d8 --- /dev/null +++ b/LICENSE.MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/Makefile b/Makefile index a0e14560..8892cb15 100644 --- a/Makefile +++ b/Makefile @@ -12,7 +12,7 @@ endif GOBASE := $(shell pwd) GOBIN := $(GOBASE)/target GOENVVARS := GOBIN=$(GOBIN) CGO_ENABLED=1 GOARCH=$(ARCH) -GOBINARY := cdk-node +GOBINARY := aggkit GOCMD := $(GOBASE)/cmd LDFLAGS += -X 'github.com/agglayer/aggkit.Version=$(VERSION)' @@ -31,11 +31,6 @@ check-go: check-docker: @which docker > /dev/null || (echo "Error: docker is not installed" && exit 1) -# Check for Docker-compose -.PHONY: check-docker-compose -check-docker-compose: - @which docker-compose > /dev/null || (echo "Error: docker-compose is not installed" && exit 1) - # Check for Protoc .PHONY: check-protoc check-protoc: @@ -51,19 +46,14 @@ build: check-go lint: check-go build-docker: check-docker build-docker-nc: check-docker -stop: check-docker check-docker-compose install-linter: check-go check-curl generate-code-from-proto: check-protoc .PHONY: build ## Builds the binaries locally into ./target -build: build-rust build-go build-tools - -.PHONY: build-rust -build-rust: - export BUILD_SCRIPT_DISABLED=1 && cargo build --release +build: build-aggkit build-tools -.PHONY: build-go -build-go: +.PHONY: build-aggkit +build-aggkit: $(GOENVVARS) go build -ldflags "all=$(LDFLAGS)" -o $(GOBIN)/$(GOBINARY) $(GOCMD) .PHONY: build-tools @@ -71,16 +61,12 @@ build-tools: ## Builds the tools $(GOENVVARS) go build -o $(GOBIN)/aggsender_find_imported_bridge ./tools/aggsender_find_imported_bridge .PHONY: build-docker -build-docker: ## Builds a docker image with the cdk binary - docker build -t cdk -f ./Dockerfile . +build-docker: ## Builds a docker image with the aggkit binary + docker build -t aggkit -f ./Dockerfile . .PHONY: build-docker-nc -build-docker-nc: ## Builds a docker image with the cdk binary - but without build cache - docker build --no-cache=true -t cdk -f ./Dockerfile . - -.PHONY: stop -stop: ## Stops all services - docker-compose down +build-docker-nc: ## Builds a docker image with the aggkit binary - but without build cache + docker build --no-cache=true -t aggkit -f ./Dockerfile . .PHONY: test-unit test-unit: diff --git a/README.md b/README.md index a93bd6f1..52e60f2c 100644 --- a/README.md +++ b/README.md @@ -3,8 +3,8 @@
-Logo -Logo +Logo +Logo ## AggKit @@ -34,19 +34,24 @@ make build ### Run locally -You can build and run a debug release locally using: +You can build and run a debug binary locally using: +1. build the `aggkit` binary +```bash +make build-aggkit ``` -cargo run + +2. run the `aggkit` binary +```bash +cd target/ +aggkit run --cfg --components ``` -It will build and run both binaries. ### Running with Kurtosis 1. Run your kurtosis environment 2. build `cdk-erigon` and make it available in your system's PATH 3. Run `scripts/local_config` -4. cargo run -- --config ./tmp/aggkit/local_config/test.kurtosis.toml --chain ./tmp/aggkit/local_config/genesis.json erigon ## Contributing diff --git a/SECURITY.md b/SECURITY.md deleted file mode 100644 index e5d17798..00000000 --- a/SECURITY.md +++ /dev/null @@ -1,17 +0,0 @@ -# Polygon Technology Security Information - -## Link to vulnerability disclosure details (Bug Bounty). -- Websites and Applications: https://hackerone.com/polygon-technology -- Smart Contracts & Blockchain: https://immunefi.com/bounty/polygon - -## Languages that our team speaks and understands. -Preferred-Languages: en - -## Security-related job openings at Polygon. -https://polygon.technology/careers - -## Polygon security contact details. -security@polygon.technology - -## The URL for accessing the security.txt file. -Canonical: https://polygon.technology/security.txt diff --git a/book.toml b/book.toml deleted file mode 100644 index d396c6f2..00000000 --- a/book.toml +++ /dev/null @@ -1,14 +0,0 @@ -[book] -authors = ["Aggkit Team - Polygon Labs"] -language = "en" -multilingual = false -src = "docs" -title = "Aggkit Docs" - -[preprocessor.mermaid] -command = "mdbook-mermaid" - -[output.html] -additional-js = ["docs/assets/mermaid.min.js", "docs/assets/mermaid-init.js"] - -[preprocessor.alerts] diff --git a/cmd/main.go b/cmd/main.go index 41b36409..b2680104 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -24,18 +24,6 @@ var ( Usage: "Configuration file(s)", Required: true, } - customNetworkFlag = cli.StringFlag{ - Name: config.FlagCustomNetwork, - Aliases: []string{"net-file"}, - Usage: "Load the network configuration file if --network=custom", - Required: false, - } - yesFlag = cli.BoolFlag{ - Name: config.FlagYes, - Aliases: []string{"y"}, - Usage: "Automatically accepts any confirmation to execute the command", - Required: false, - } componentsFlag = cli.StringSliceFlag{ Name: config.FlagComponents, Aliases: []string{"co"}, @@ -55,7 +43,6 @@ var ( Usage: "Disable default configuration variables, all of them must be defined on config files", Required: false, } - allowDeprecatedFields = cli.BoolFlag{ Name: config.FlagAllowDeprecatedFields, Usage: "Allow that config-files contains deprecated fields", @@ -69,7 +56,6 @@ func main() { app.Version = aggkit.Version flags := []cli.Flag{ &configFileFlag, - &yesFlag, &componentsFlag, &saveConfigFlag, &disableDefaultConfigVars, @@ -87,7 +73,7 @@ func main() { Aliases: []string{}, Usage: "Run the aggkit client", Action: start, - Flags: append(flags, &customNetworkFlag), + Flags: flags, }, } diff --git a/cmd/run.go b/cmd/run.go index a8de796c..49e51fa5 100644 --- a/cmd/run.go +++ b/cmd/run.go @@ -530,7 +530,7 @@ func runBridgeSyncL2IfNeeded( func createBridgeRPC( cfg jRPC.Config, - cdkNetworkID uint32, + l2NetworkID uint32, sponsor *claimsponsor.ClaimSponsor, l1InfoTree *l1infotreesync.L1InfoTreeSync, injectedGERs *lastgersync.LastGERSync, @@ -545,7 +545,7 @@ func createBridgeRPC( logger, cfg.WriteTimeout.Duration, cfg.ReadTimeout.Duration, - cdkNetworkID, + l2NetworkID, sponsor, l1InfoTree, injectedGERs, diff --git a/config/config.go b/config/config.go index 643a8f3e..ecdc902c 100644 --- a/config/config.go +++ b/config/config.go @@ -25,31 +25,10 @@ import ( ) const ( - // FlagYes is the flag for yes. - FlagYes = "yes" // FlagCfg is the flag for cfg. FlagCfg = "cfg" - // FlagCustomNetwork is the flag for the custom network file. - FlagCustomNetwork = "custom-network-file" - // FlagAmount is the flag for amount. - FlagAmount = "amount" - // FlagRemoteMT is the flag for remote-merkletree. - FlagRemoteMT = "remote-merkletree" // FlagComponents is the flag for components. FlagComponents = "components" - // FlagHTTPAPI is the flag for http.api. - FlagHTTPAPI = "http.api" - // FlagKeyStorePath is the path of the key store file containing the private key - // of the account going to sing and approve the tokens. - FlagKeyStorePath = "key-store-path" - // FlagPassword is the password needed to decrypt the key store - FlagPassword = "password" - // FlagMigrations is the flag for migrations. - FlagMigrations = "migrations" - // FlagOutputFile is the flag for the output file - FlagOutputFile = "output" - // FlagMaxAmount is the flag to avoid to use the flag FlagAmount - FlagMaxAmount = "max-amount" // FlagSaveConfigPath is the flag to save the final configuration file FlagSaveConfigPath = "save-config-path" // FlagDisableDefaultConfigVars is the flag to force all variables to be set on config-files diff --git a/crates/aggkit-config/Cargo.toml b/crates/aggkit-config/Cargo.toml deleted file mode 100644 index d93a2895..00000000 --- a/crates/aggkit-config/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "aggkit-config" -version.workspace = true -edition.workspace = true - -[dependencies] -serde = { workspace = true, features = ["derive"] } -serde_with.workspace = true -thiserror.workspace = true -tracing-appender.workspace = true -tracing-subscriber = { workspace = true, features = ["env-filter"] } -tracing.workspace = true -url = { workspace = true, features = ["serde"] } -ethers.workspace = true -jsonrpsee.workspace = true - -[dev-dependencies] -serde_json = { workspace = true } -toml.workspace = true - -[features] -default = [] -testutils = [] diff --git a/crates/aggkit-config/src/l1.rs b/crates/aggkit-config/src/l1.rs deleted file mode 100644 index 4c339b98..00000000 --- a/crates/aggkit-config/src/l1.rs +++ /dev/null @@ -1,38 +0,0 @@ -use ethers::types::Address; -use serde::Deserialize; - -/// The L1 configuration. -#[derive(Deserialize, Debug, Clone)] -pub struct L1 { - #[serde(rename = "L1ChainID", alias = "ChainID", default)] - pub l1_chain_id: String, - #[serde(rename = "PolAddr", default)] - pub pol_addr: Address, - #[serde(rename = "ZkEVMAddr", default)] - pub zk_evm_addr: Address, - #[serde(rename = "RollupManagerAddr", default)] - pub rollup_manager_addr: Address, - #[serde(rename = "GlobalExitRootManagerAddr", default)] - pub global_exit_root_manager_addr: Address, -} - -impl Default for L1 { - fn default() -> Self { - // Values are coming from https://github.com/0xPolygon/agglayer/blob/main/config/default.go#L11 - Self { - l1_chain_id: "1337".to_string(), - pol_addr: "0x5b06837A43bdC3dD9F114558DAf4B26ed49842Ed" - .parse() - .unwrap(), - zk_evm_addr: "0x2F50ef6b8e8Ee4E579B17619A92dE3E2ffbD8AD2" - .parse() - .unwrap(), - rollup_manager_addr: "0x1Fe038B54aeBf558638CA51C91bC8cCa06609e91" - .parse() - .unwrap(), - global_exit_root_manager_addr: "0x1f7ad7caA53e35b4f0D138dC5CBF91aC108a2674" - .parse() - .unwrap(), - } - } -} diff --git a/crates/aggkit-config/src/lib.rs b/crates/aggkit-config/src/lib.rs deleted file mode 100644 index 97901ed0..00000000 --- a/crates/aggkit-config/src/lib.rs +++ /dev/null @@ -1,29 +0,0 @@ -//! Aggkit configuration. -//! -//! The Aggkit is configured via its TOML configuration file, `aggkit.toml` -//! by default, which is deserialized into the [`Config`] struct. -use serde::Deserialize; - -pub(crate) const DEFAULT_IP: std::net::Ipv4Addr = std::net::Ipv4Addr::new(0, 0, 0, 0); - -pub(crate) mod l1; -pub mod log; -pub(crate) mod network_config; -pub(crate) mod telemetry; - -pub use log::Log; - -/// The Agglayer configuration. -#[derive(Deserialize, Debug)] -#[cfg_attr(any(test, feature = "testutils"), derive(Default))] -pub struct Config { - /// The log configuration. - #[serde(rename = "Log", default)] - pub log: Log, - - #[serde(rename = "ForkUpgradeBatchNumber")] - pub fork_upgrade_batch_number: Option, - - #[serde(rename = "NetworkConfig", default)] - pub network_config: network_config::NetworkConfig, -} diff --git a/crates/aggkit-config/src/log.rs b/crates/aggkit-config/src/log.rs deleted file mode 100644 index fc902fb6..00000000 --- a/crates/aggkit-config/src/log.rs +++ /dev/null @@ -1,108 +0,0 @@ -use std::{fmt::Display, path::PathBuf}; - -use serde::{Deserialize, Deserializer}; -use tracing_subscriber::{fmt::writer::BoxMakeWriter, EnvFilter}; - -/// The log configuration. -#[derive(Deserialize, Debug, Clone, Default)] -#[serde(rename_all = "PascalCase")] -pub struct Log { - /// The `RUST_LOG` environment variable will take precedence over the - /// configuration log level. - #[serde(default)] - pub level: LogLevel, - pub outputs: Vec, - #[serde(default)] - pub format: LogFormat, -} - -/// The log format. -#[derive(Deserialize, Debug, Default, Clone, Copy)] -#[serde(rename_all = "lowercase")] -pub enum LogFormat { - #[default] - Pretty, - Json, -} - -/// The log level. -#[derive(Deserialize, Debug, Default, Clone, Copy)] -#[serde(rename_all = "lowercase")] -pub enum LogLevel { - Trace, - Debug, - #[default] - Info, - Warn, - Error, - Fatal, -} - -impl Display for LogLevel { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let level = match self { - LogLevel::Trace => "trace", - LogLevel::Debug => "debug", - LogLevel::Info => "info", - LogLevel::Warn => "warn", - LogLevel::Error => "error", - LogLevel::Fatal => "fatal", - }; - - write!(f, "{}", level) - } -} - -impl From for EnvFilter { - fn from(value: LogLevel) -> Self { - EnvFilter::new(format!("warn,agglayer={}", value)) - } -} - -/// The log output. -/// -/// This can be either `stdout`, `stderr`, or a file path. -/// -/// The [`Deserialize`] implementation allows for the configuration file to -/// specify the output location as a string, which is then parsed into the -/// appropriate enum variant. If the string is not recognized to be either -/// `stdout` or `stderr`, it is assumed to be a file path. -#[derive(Debug, Clone, Default)] -pub enum LogOutput { - #[default] - Stdout, - Stderr, - File(PathBuf), -} - -impl<'de> Deserialize<'de> for LogOutput { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - // If the string is not recognized to be either `stdout` or `stderr`, - // it is assumed to be a file path. - match s.as_str() { - "stdout" => Ok(LogOutput::Stdout), - "stderr" => Ok(LogOutput::Stderr), - _ => Ok(LogOutput::File(PathBuf::from(s))), - } - } -} - -impl LogOutput { - /// Get a [`BoxMakeWriter`] for the log output. - /// - /// This can be used to plug the log output into the tracing subscriber. - pub fn as_make_writer(&self) -> BoxMakeWriter { - match self { - LogOutput::Stdout => BoxMakeWriter::new(std::io::stdout), - LogOutput::Stderr => BoxMakeWriter::new(std::io::stderr), - LogOutput::File(path) => { - let appender = tracing_appender::rolling::never(".", path); - BoxMakeWriter::new(appender) - } - } - } -} diff --git a/crates/aggkit-config/src/network_config.rs b/crates/aggkit-config/src/network_config.rs deleted file mode 100644 index 3f49b786..00000000 --- a/crates/aggkit-config/src/network_config.rs +++ /dev/null @@ -1,15 +0,0 @@ -use crate::l1::L1; -use serde::Deserialize; - -/// The L1 configuration. -#[derive(Deserialize, Debug, Clone)] -pub struct NetworkConfig { - #[serde(rename = "L1", default)] - pub l1: L1, -} - -impl Default for NetworkConfig { - fn default() -> Self { - Self { l1: L1::default() } - } -} diff --git a/crates/aggkit-config/src/telemetry.rs b/crates/aggkit-config/src/telemetry.rs deleted file mode 100644 index 728611ce..00000000 --- a/crates/aggkit-config/src/telemetry.rs +++ /dev/null @@ -1,23 +0,0 @@ -use super::DEFAULT_IP; -use serde::Deserialize; -use std::net::SocketAddr; - -#[derive(Deserialize, Debug, Clone, Copy)] -#[serde(rename_all = "PascalCase")] -#[allow(dead_code)] -pub struct TelemetryConfig { - #[serde(rename = "PrometheusAddr", default = "default_metrics_api_addr")] - pub addr: SocketAddr, -} - -impl Default for TelemetryConfig { - fn default() -> Self { - Self { - addr: default_metrics_api_addr(), - } - } -} - -const fn default_metrics_api_addr() -> SocketAddr { - SocketAddr::V4(std::net::SocketAddrV4::new(DEFAULT_IP, 3000)) -} diff --git a/crates/aggkit/Cargo.toml b/crates/aggkit/Cargo.toml deleted file mode 100644 index f57cab2f..00000000 --- a/crates/aggkit/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "cdk" -version.workspace = true -edition.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -anyhow = "1.0.86" -clap = { version = "4.5.7", features = ["derive", "env"] } -dotenvy = "0.15.7" -execute = "0.2.13" -toml = "0.8.14" -tracing.workspace = true -tracing-subscriber = { workspace = true, features = ["env-filter", "json"] } -url = { workspace = true, features = ["serde"] } -colored = "2.0" -aggkit-config = { path = "../aggkit-config" } -serde.workspace = true -serde_json.workspace = true -tempfile = "3.12.0" -alloy-rpc-client = "0.5.4" -alloy-transport-http = "0.5.4" -tokio = "1.40.0" -alloy-json-rpc = "0.5.4" - -[build-dependencies] -reqwest = {version = "0.12.8", features = ["blocking"]} -serde_json.workspace = true -regex = "1.11.1" diff --git a/crates/aggkit/build.rs b/crates/aggkit/build.rs deleted file mode 100644 index 3eabedb2..00000000 --- a/crates/aggkit/build.rs +++ /dev/null @@ -1,103 +0,0 @@ -use regex::Regex; -use reqwest::blocking::get; -use std::env; -use std::fs::File; -use std::io::Write; -use std::path::Path; -use std::path::PathBuf; -use std::process::Command; - -fn main() { - let _ = build_versions(); - - let build_script_disabled = env::var("BUILD_SCRIPT_DISABLED") - .map(|v| v == "1") - .unwrap_or(false); // run by default - - if build_script_disabled { - println!("cargo:warning=Build script is disabled. Skipping build."); - return; - } - - // Determine the directory where the build script is located - let dir = env::var("CARGO_MANIFEST_DIR").unwrap(); - let build_path = PathBuf::from(dir + "/../.."); - println!("cargo:rerun-if-changed=*.go"); - - // Optionally, specify the directory where your Makefile is located - // For this example, it's assumed to be the same as the build script's directory - // If your Makefile is in a different directory, adjust `build_path` accordingly - - // Call the make command - let output = Command::new("make") - .arg("build-go") // Create a new make command - .current_dir(build_path) // Set the current directory for the command - .output() // Execute the command and capture the output - .expect("Failed to execute make command"); - - // Check the output and react accordingly - if !output.status.success() { - // If the make command failed, print the error and exit - let error_message = String::from_utf8_lossy(&output.stderr); - panic!("Make command failed with error: {}", error_message); - } - - // Optionally, print the output of the make command - println!( - "Make command output: {}", - String::from_utf8_lossy(&output.stdout) - ); - - // Here you can also add additional commands to inform Cargo about - // how to rerun the build script. For example, to rerun this script - // only when a specific file changes: - // println!("cargo:rerun-if-changed=path/to/file"); -} - -// build_versions retrieves the versions from the Starlark file and embeds them in the binary. -fn build_versions() -> std::io::Result<()> { - // Retrieve the contents of the file from the URL - let url = "https://raw.githubusercontent.com/0xPolygon/kurtosis-cdk/refs/heads/main/input_parser.star"; - let response = get(url).expect("Failed to send request"); - let content = response.text().expect("Failed to read response text"); - - // Write the contents to a file - let out_dir = std::env::var("OUT_DIR").unwrap(); - let dest_path = Path::new(&out_dir).join("input_parser.star"); - let mut file = File::create(&dest_path)?; - file.write_all(content.as_bytes())?; - - // Get the corresponding lines from the contents of the starlark file - let versions = content - .lines() - .skip(34) - .take(15) - .collect::>() - .join("\n"); - - // Replace the string DEFAULT_IMAGES = from the versions string - let versions = versions.replace("DEFAULT_IMAGES = ", ""); - - // Remove all comments to the end of the line using a regexp - let re = Regex::new(r"\s#\s.*\n").unwrap(); - let versions = re.replace_all(&versions, ""); - - // Replace the trailing comma on the last line - let versions = versions.replace(", }", " }"); - - // The versions string is a JSON object we can parse - let versions_json: serde_json::Value = serde_json::from_str(&versions).unwrap(); - - // Write the versions to a file - let dest_path = Path::new(".").join("versions.json"); - let mut file = File::create(&dest_path)?; - file.write_all( - format!( - "{}\n", - serde_json::to_string_pretty(&versions_json).unwrap() - ) - .as_bytes(), - )?; - - Ok(()) -} diff --git a/crates/aggkit/src/allocs_render.rs b/crates/aggkit/src/allocs_render.rs deleted file mode 100644 index 3b881149..00000000 --- a/crates/aggkit/src/allocs_render.rs +++ /dev/null @@ -1,99 +0,0 @@ -use anyhow::{Context, Result}; -use serde::{Deserialize, Serialize}; -use serde_json::{self, Value}; -use std::collections::HashMap; -use std::fs::File; -use std::io::Read; -use std::path::Path; - -#[derive(Serialize, Deserialize, Debug, Clone)] -struct Input { - #[serde(rename = "contractName", skip_serializing_if = "Option::is_none")] - contract_name: Option, - #[serde(rename = "accountName", skip_serializing_if = "Option::is_none")] - account_name: Option, - balance: String, - nonce: String, - address: String, - #[serde(skip_serializing_if = "Option::is_none")] - bytecode: Option, - #[serde(skip_serializing_if = "Option::is_none")] - storage: Option>, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct Wrapper { - pub root: String, - #[serde(rename = "L1Config")] - pub l1_config: L1Config, - genesis: Vec, - #[serde(rename = "rollupCreationBlockNumber")] - pub rollup_creation_block_number: u64, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct L1Config { - #[serde(rename = "chainId")] - pub chain_id: u64, - #[serde(rename = "polygonZkEVMGlobalExitRootAddress")] - pub zkevm_global_exit_root_address: String, - #[serde(rename = "polygonRollupManagerAddress")] - pub rollup_manager_address: String, - #[serde(rename = "polTokenAddress")] - pub pol_token_address: String, - #[serde(rename = "polygonZkEVMAddress")] - pub zkevm_address: String, -} - -#[derive(Serialize, Deserialize, Debug)] -struct Output { - #[serde(rename = "contractName", skip_serializing_if = "Option::is_none")] - contract_name: Option, - #[serde(rename = "accountName", skip_serializing_if = "Option::is_none")] - account_name: Option, - balance: Option, - nonce: Option, - code: Option, - storage: Option, -} - -pub struct Rendered { - pub output: String, - pub wrapper: Wrapper, -} - -pub fn render_allocs(genesis_file_path: &str) -> Result { - let path = Path::new(genesis_file_path); - let display = path.display(); - - let mut file = File::open(&path).with_context(|| format!("couldn't open {}", display))?; - - let mut data = String::new(); - file.read_to_string(&mut data) - .with_context(|| format!("couldn't read {}", display))?; - - let wrapper: Wrapper = serde_json::from_str(&data) - .with_context(|| format!("couldn't parse JSON from {}", display))?; - - let mut outputs: HashMap = HashMap::new(); - - for input in wrapper.genesis.clone() { - let output = Output { - contract_name: input.contract_name, - account_name: input.account_name, - balance: Some(input.balance), - nonce: Some(input.nonce), - code: input.bytecode, - storage: input.storage.map(|s| serde_json::to_value(s).unwrap()), - }; - outputs.insert(input.address, output); - } - - // outputs.sort_by(|a, b| a.contract_name.cmp(&b.contract_name)); - - Ok(Rendered { - output: serde_json::to_string_pretty(&outputs) - .with_context(|| "couldn't serialize outputs to JSON")?, - wrapper, - }) -} diff --git a/crates/aggkit/src/cli.rs b/crates/aggkit/src/cli.rs deleted file mode 100644 index f94ee063..00000000 --- a/crates/aggkit/src/cli.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! Command line interface. -use std::path::PathBuf; - -use clap::{Parser, Subcommand, ValueHint}; - -/// Command line interface. -#[derive(Parser)] -#[command(author, version, about, long_about = None)] -pub(crate) struct Cli { - #[command(subcommand)] - pub(crate) cmd: Commands, -} - -#[derive(Subcommand)] -pub(crate) enum Commands { - /// Run the aggkit with the provided configuration - Node { - /// The path to the configuration file - #[arg( - long, - short = 'C', - value_hint = ValueHint::FilePath, - env = "AGGKIT_CONFIG_PATH" - )] - config: PathBuf, - - /// Components to run. - #[arg( - long, - short, - value_hint = ValueHint::CommandString, - env = "AGGKIT_COMPONENTS", - )] - components: Option, - }, - /// Output the corresponding versions of the components - Versions, -} diff --git a/crates/aggkit/src/config_render.rs b/crates/aggkit/src/config_render.rs deleted file mode 100644 index 0abfffec..00000000 --- a/crates/aggkit/src/config_render.rs +++ /dev/null @@ -1,125 +0,0 @@ -use crate::allocs_render::Rendered; -use anyhow::Error; -use aggkit_config::Config; -use std::fs; -use std::path::PathBuf; -use tempfile::{tempdir, TempDir}; - -pub fn render(config: &Config, genesis_file: PathBuf, timestamp: u64) -> Result { - // Create a temporary directory - let tmp_dir = tempdir()?; - let chain_id = "0"; // TODO - this needs to be chainged to the chain_id from the config - let res = crate::allocs_render::render_allocs(genesis_file.to_str().unwrap())?; - // Write the three files to disk - fs::write( - tmp_dir - .path() - .join(format!("dynamic-{}-allocs.json", chain_id.clone())), - res.output.clone(), - )?; - fs::write( - tmp_dir - .path() - .join(format!("dynamic-{}-chainspec.json", chain_id.clone())), - render_chainspec(chain_id.clone()), - )?; - fs::write( - tmp_dir - .path() - .join(format!("dynamic-{}-conf.json", chain_id.clone())), - render_conf(res.wrapper.root.clone(), timestamp), - )?; - - let contents = render_yaml(config, res); - fs::write( - tmp_dir - .path() - .join(format!("dynamic-{}.yaml", chain_id.clone())), - contents, - )?; - - Ok(tmp_dir) -} - -fn render_chainspec(chain_id: String) -> String { - format!( - r#" -{{ - "ChainName": "dynamic-{chain_id}", - "chainId": {chain_id}, - "consensus": "ethash", - "homesteadBlock": 0, - "daoForkBlock": 0, - "eip150Block": 0, - "eip155Block": 0, - "byzantiumBlock": 0, - "constantinopleBlock": 0, - "petersburgBlock": 0, - "istanbulBlock": 0, - "muirGlacierBlock": 0, - "berlinBlock": 0, - "londonBlock": 9999999999999999999999999999999999999999999999999, - "arrowGlacierBlock": 9999999999999999999999999999999999999999999999999, - "grayGlacierBlock": 9999999999999999999999999999999999999999999999999, - "terminalTotalDifficulty": 58750000000000000000000, - "terminalTotalDifficultyPassed": false, - "shanghaiTime": 9999999999999999999999999999999999999999999999999, - "cancunTime": 9999999999999999999999999999999999999999999999999, - "pragueTime": 9999999999999999999999999999999999999999999999999, - "ethash": {{}} -}} - "# - ) -} - -fn render_conf(root: String, timestamp: u64) -> String { - format!( - r#" -{{ - "root": {:?}, - "timestamp": {:?}, - "gasLimit": 0, - "difficulty": 0 -}} - "#, - root, timestamp - ) -} - -// render_config renders the configuration file for the Erigon node. -fn render_yaml(config: &Config, res: Rendered) -> String { - format!( - r#" -chain: dynamic-{chain_id} -zkevm.l2-chain-id: {chain_id} -zkevm.l2-sequencer-rpc-url: {l2_sequencer_rpc_url} -zkevm.l1-chain-id: {l1_chain_id} -zkevm.l1-rpc-url: {l1_rpc_url} -zkevm.address-sequencer: {sequencer_address} -zkevm.address-zkevm: {zkevm_address} -zkevm.address-rollup: {rollup_address} -zkevm.address-ger-manager: {ger_manager_address} -zkevm.l1-matic-contract-address: {pol_token_address} -zkevm.l1-first-block: {l1_first_block} -datadir: ./data/dynamic-{chain_id} - -externalcl: true -http: true -private.api.addr: "localhost:9092" -zkevm.rpc-ratelimit: 250 -zkevm.datastream-version: 3 -http.api: [eth, debug,net,trace,web3,erigon,zkevm] -http.addr: "0.0.0.0" -http.vhosts: any -http.corsdomain: any -ws: true -"#, - l1_chain_id = config.network_config.l1.l1_chain_id, - sequencer_address = config.sequence_sender.l2_coinbase, - zkevm_address = res.wrapper.l1_config.zkevm_address, - rollup_address = res.wrapper.l1_config.rollup_manager_address, - ger_manager_address = res.wrapper.l1_config.zkevm_global_exit_root_address, - pol_token_address = res.wrapper.l1_config.pol_token_address, - l1_first_block = res.wrapper.rollup_creation_block_number - ) -} diff --git a/crates/aggkit/src/helpers.rs b/crates/aggkit/src/helpers.rs deleted file mode 100644 index 0004cac2..00000000 --- a/crates/aggkit/src/helpers.rs +++ /dev/null @@ -1,13 +0,0 @@ -use std::env; - -const AGGKIT_CLIENT_BIN: &str = "aggkit"; - -pub(crate) fn get_bin_path() -> String { - // This is to find the binary when running in development mode - // otherwise it will use system path - let mut bin_path = env::var("CARGO_MANIFEST_DIR").unwrap_or(AGGKIT_CLIENT_BIN.into()); - if bin_path != AGGKIT_CLIENT_BIN { - bin_path = format!("{}/../../target/{}", bin_path, AGGKIT_CLIENT_BIN); - } - bin_path -} diff --git a/crates/aggkit/src/logging.rs b/crates/aggkit/src/logging.rs deleted file mode 100644 index 8a02fc60..00000000 --- a/crates/aggkit/src/logging.rs +++ /dev/null @@ -1,23 +0,0 @@ -use aggkit_config::log::LogFormat; -use tracing_subscriber::{prelude::*, util::SubscriberInitExt, EnvFilter}; - -pub(crate) fn tracing(config: &aggkit_config::Log) { - // TODO: Support multiple outputs. - let writer = config.outputs.first().cloned().unwrap_or_default(); - - let layer = match config.format { - LogFormat::Pretty => tracing_subscriber::fmt::layer() - .pretty() - .with_writer(writer.as_make_writer()) - .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| config.level.into())) - .boxed(), - - LogFormat::Json => tracing_subscriber::fmt::layer() - .json() - .with_writer(writer.as_make_writer()) - .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| config.level.into())) - .boxed(), - }; - - tracing_subscriber::Registry::default().with(layer).init(); -} diff --git a/crates/aggkit/src/main.rs b/crates/aggkit/src/main.rs deleted file mode 100644 index 181c381a..00000000 --- a/crates/aggkit/src/main.rs +++ /dev/null @@ -1,110 +0,0 @@ -//! Command line interface. -use aggkit_config::Config; -use clap::Parser; -use cli::Cli; -use colored::*; -use execute::Execute; -use std::path::PathBuf; -use std::process::Command; - -pub mod allocs_render; -mod cli; -mod helpers; -mod logging; -mod versions; - -#[tokio::main] -async fn main() -> anyhow::Result<()> { - dotenvy::dotenv().ok(); - - let cli = Cli::parse(); - - println!( - "{}", - r#"🐼 - _____ _ _____ _____ _ __ - | __ \ | | / ____| __ \| |/ / - | |__) |__ | |_ _ __ _ ___ _ __ | | | | | | ' / - | ___/ _ \| | | | |/ _` |/ _ \| '_ \ | | | | | | < - | | | (_) | | |_| | (_| | (_) | | | | | |____| |__| | . \ - |_| \___/|_|\__, |\__, |\___/|_| |_| \_____|_____/|_|\_\ - __/ | __/ | - |___/ |___/ -"# - .purple() - ); - - match cli.cmd { - cli::Commands::Node { config, components } => node(config, components)?, - cli::Commands::Versions {} => versions::versions(), - } - - Ok(()) -} - -// read_config reads the configuration file and returns the configuration. -fn read_config(config_path: PathBuf) -> anyhow::Result { - let config = std::fs::read_to_string(config_path) - .map_err(|e| anyhow::anyhow!("Failed to read configuration file: {}", e))?; - let config: Config = toml::from_str(&config)?; - - Ok(config) -} - -/// This is the main node entrypoint. -/// -/// This function starts everything needed to run an Agglayer node. -/// Starting by a Tokio runtime which can be used by the different components. -/// The configuration file is parsed and used to configure the node. -/// -/// This function returns on fatal error or after graceful shutdown has -/// completed. -pub fn node(config_path: PathBuf, components: Option) -> anyhow::Result<()> { - // Read the config - let config = read_config(config_path.clone())?; - - // Initialize the logger - logging::tracing(&config.log); - - // This is to find the binary when running in development mode - // otherwise it will use system path - let bin_path = helpers::get_bin_path(); - - let components_param = match components { - Some(components) => format!("-components={}", components), - None => "".to_string(), - }; - - // Run the node passing the config file path as argument - let mut command = Command::new(bin_path.clone()); - command.args(&[ - "run", - "-cfg", - config_path.canonicalize()?.to_str().unwrap(), - components_param.as_str(), - ]); - - let output_result = command.execute_output(); - let output = match output_result { - Ok(output) => output, - Err(e) => { - eprintln!( - "Failed to execute command, trying to find executable in path: {}", - bin_path - ); - return Err(e.into()); - } - }; - - if let Some(exit_code) = output.status.code() { - if exit_code == 0 { - println!("Ok."); - } else { - eprintln!("Failed."); - } - } else { - eprintln!("Interrupted!"); - } - - Ok(()) -} diff --git a/crates/aggkit/src/versions.rs b/crates/aggkit/src/versions.rs deleted file mode 100644 index 8ddca296..00000000 --- a/crates/aggkit/src/versions.rs +++ /dev/null @@ -1,39 +0,0 @@ -use colored::*; -use execute::Execute; -use std::io; -use std::process::{Command, Output}; - -fn version() -> Result { - let bin_path = crate::helpers::get_bin_path(); - - // Run the node passing the config file path as argument - let mut command = Command::new(bin_path.clone()); - command.args(&["version"]); - - command.execute_output() -} - -pub(crate) fn versions() { - // Load the versions from the versions.json file in the crate directory - // and parse it using serde_json. - let versions = include_str!("../versions.json"); - let versions_json: serde_json::Value = serde_json::from_str(versions).unwrap(); - - // Convert the JSON object to a HashMap. - let versions_map = versions_json.as_object().unwrap(); - - // Get the version of the aggkit binary. - let output = version().unwrap(); - let version = String::from_utf8(output.stdout).unwrap(); - - println!("{}", format!("{}", version.trim()).green()); - - // Multi-line string to print the versions with colors. - let formatted_versions: Vec = versions_map - .iter() - .map(|(key, value)| format!("{}: {}", key.green(), value.to_string().blue())) - .collect(); - - println!("{}", "Supported up to fork12".yellow()); - println!("{}", formatted_versions.join("\n")); -} diff --git a/crates/aggkit/versions.json b/crates/aggkit/versions.json deleted file mode 100644 index fe782279..00000000 --- a/crates/aggkit/versions.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "agglayer_image": "ghcr.io/agglayer/agglayer:0.2.0-rc.17", - "cdk_erigon_node_image": "hermeznetwork/cdk-erigon:v2.60.0", - "cdk_node_image": "ghcr.io/0xpolygon/cdk:0.5.0-beta10", - "cdk_validium_node_image": "0xpolygon/cdk-validium-node:0.7.0-cdk", - "zkevm_bridge_proxy_image": "haproxy:3.1-bookworm", - "zkevm_bridge_service_image": "hermeznetwork/zkevm-bridge-service:v0.6.0-RC3", - "zkevm_bridge_ui_image": "leovct/zkevm-bridge-ui:multi-network", - "zkevm_contracts_image": "leovct/zkevm-contracts:v8.0.0-fork.12-patch.1", - "zkevm_da_image": "0xpolygon/cdk-data-availability:0.0.11", - "zkevm_node_image": "hermeznetwork/zkevm-node:v0.7.3", - "zkevm_pool_manager_image": "hermeznetwork/zkevm-pool-manager:v0.1.2", - "zkevm_prover_image": "hermeznetwork/zkevm-prover:v8.0.0-RC14-fork.12", - "zkevm_sequence_sender_image": "hermeznetwork/zkevm-sequence-sender:v0.2.4" -} diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index d9d8cddf..ba1e3d61 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -2,6 +2,5 @@ - [Getting Started](./getting_started.md) - [Local Debug](./local_debug.md) -- [DA Integration](./da_integration.md) - [Non-EVM integrations](./non_evm_integration.md) - [AggOracle](./aggoracle.md) diff --git a/docs/da_integration.md b/docs/da_integration.md deleted file mode 100644 index cae9c036..00000000 --- a/docs/da_integration.md +++ /dev/null @@ -1,82 +0,0 @@ -# CDK DA Integration - -The purpose of this document is to explain how a 3rd Party Data Availability (DA) solution can integrate with CDK. - -## Considerations - -The code outlined in this document is under development, and while we’re confident that it will be ready for production in a few weeks, it is currently under heavy development. - -For the first iteration of integrations, on-chain verification is not expected. Although this document shows how this could be done at the contract level (doing such a thing on the ZKPs is out of the scope right now). In any case, Agglayer will assert that the data is actually available before settling ZKPs. - -## Smart Contracts - -The versions of the smart contracts that are being targeted for the DA integrations are found in [zkevm-contracts @ feature/banana](https://github.com/0xPolygonHermez/zkevm-contracts/tree/feature/banana). This new version of the contracts allow for multiple “consensus” implementations but there are two that are included by default: - -- zkEVM to implement a rollup. -- Validium to implement a validium. -- Adding a custom solution. - -This document only considers the first approach, reusing the `PolygonValidium` consensus. That being said, the `PolygonValidium` implementation allows a custom smart contract to be used in the relevant interaction. This could be used by DAs to add custom on-chain verification logic. While verifying the DA integrity is optional, any new protocol will need to develop a custom smart contract in order to be successfully integrated (more details bellow) - -This is by far the [most relevant part of the contract for DAs](https://github.com/0xPolygonHermez/zkevm-contracts/blob/533641301223a1e413b2e8f0323354671f310922/contracts/v2/consensus/validium/PolygonValidiumEtrog.sol#L91C5-L98C36): - -```javascript - function sequenceBatchesValidium( - ValidiumBatchData[] calldata batches, - uint32 indexL1InfoRoot, - uint64 maxSequenceTimestamp, - bytes32 expectedFinalAccInputHash, - address l2Coinbase, - bytes calldata dataAvailabilityMessage - ) external onlyTrustedSequencer { -``` - -And in particular this [piece of code](https://github.com/0xPolygonHermez/zkevm-contracts/blob/feature/banana/contracts/v2/consensus/validium/PolygonValidiumEtrog.sol#L228C13-L230): - -```javascript - // Validate that the data availability protocol accepts the dataAvailabilityMessage - // note This is a view function, so there's not much risk even if this contract was vulnerable to reentrant attacks - dataAvailabilityProtocol.verifyMessage( - accumulatedNonForcedTransactionsHash, - dataAvailabilityMessage - ); -``` - -It's expected that any protocol build their own contract that follows [this interface](https://github.com/0xPolygonHermez/zkevm-contracts/blob/feature/banana/contracts/v2/interfaces/IDataAvailabilityProtocol.sol#L5), in the same way that the `PolygonDataCommittee` does. The implementation of `verifyMessage` is dependant on each protocol, and in a first iteration could be "dummy", since the AggLayer will ensure that the DA is actually available anyway. That being said we expect protocol integrations to evolve towards "trustless verification" - -## Setup the Node - -In order to integrate a DA solution into CDK, the most fundamental part is for the node to be able to post and retrieve data from the DA backend. - -Up until now, DAs would fork the `cdk-validium-node` repo to make such an integration. But maintaining forks can be really painful, so the team is proposing this solution that will allow the different DAs to be 1st class citizens and live on the official `cdk` repo. - -These items would need to be implemented to have a successful integration: - -1. Create a repository that will host the package that implements [this interface](https://github.com/agglayer/aggkit/blob/develop/dataavailability/interfaces.go#L11-L16). You can check how is done for the [DAC case](https://github.com/agglayer/aggkit/blob/develop/dataavailability/datacommittee/datacommittee.go) as an example. -2. Add a new entry on the [supported backend strings](https://github.com/agglayer/aggkit/blob/develop/dataavailability/config.go) -3. [OPTIONAL] Add a config struct in the new package, and add the struct inside the main data availability config struct, this way your package will be able to receive custom configuration using the main config file of the node. -4. `go get` and instantiate your package and use it to create the main data availability instance, as done in the Polygon implementation. - -> [!TIP] -> By default all E2E tests will run using the DAC. It’s possible to run the E2E test using other DA backends changing the test config file. - -## Test the integration - -1. Create an E2E test that uses your protocol by following the [test/e2e/datacommittee_test.go](https://github.com/0xPolygon/cdk-validium-node/blob/develop/test/e2e/datacommittee_test.go) example. -2. Follow the instructions on [Local Debug](local_debug.md) to run Kurtosis enviroment for local testing -4. Deploy the new contract contract to L1 running in Kurtosis -4. Call `setDataAvailabilityProtocol` in validium consensus contract to use the newly deployed contract. -5. Modify the `Makefile` to be able to run your test, take the case of the DAC test as an example here - -### Example flow - -1. Sequencer groups N batches of arbitrary size into a sequence -2. Sequencer calls `PostSequence` -3. The DA BAckend implementation decides to split the N batches into M chunks, so they fit as good as possible to the size of the DA blobs of the protocol (or other considerations that the protocol may have) -4. The DA BAckend crafts the `dataAvailabilityMessage`, this is optional but could be used to: - - Verify the existance of the data on the DA backend on L1 (this message will be passed down to the DA smart contract, and it could include merkle proofs, ...). Realisitcally speaking, we don't expect to be implemented on a first iteration - - Help the data retrival process, for instance by including the block height or root of the blobs used to store the data. If many DA blobs are used to store a single sequence, one interesting trick would be to post some metadata in another blob, or the lates used blob, that points to the other used blobs. This way only the pointer to the metadata is needed to include into the `dataAvailabilityMessage` (since this message will be posted as part of the calldata, it's interesting to minimize it's size) -5. The sequencer [posts the sequence on L1](https://github.com/0xPolygonHermez/zkevm-contracts/blob/develop/contracts/v2/consensus/validium/PolygonValidiumEtrog.sol#L85), including the `dataAvailabilityMessage`. On that call, [the DA smart contract will be called](https://github.com/0xPolygonHermez/zkevm-contracts/blob/develop/contracts/v2/consensus/validium/PolygonValidiumEtrog.sol#L217). This can be used to validate that the DA protocol has been used as expected (optional) -6. After that happens, any node synchronizing the network will realise of it through an event of the smart contract, and will be able to retrieve the hashes of each batch and the `dataAvailabilityMessage` -7. And so it will be able to call `GetSequence(hashes common.Hash, dataAvailabilityMessage []byte)` to the DA Backend -8. The DA BAckend will then retrieve the data, and return it diff --git a/rpc/openrpc.json b/rpc/openrpc.json index 4e3a2518..056f1f1c 100644 --- a/rpc/openrpc.json +++ b/rpc/openrpc.json @@ -1,7 +1,7 @@ { "openrpc": "1.0.0", "info": { - "title": "CDK Endpoints", + "title": "Aggkit Endpoints", "version": "0.0.1" }, "methods": [ diff --git a/scripts/local_config b/scripts/local_config index 87fe60df..ad8df0bd 100755 --- a/scripts/local_config +++ b/scripts/local_config @@ -15,14 +15,14 @@ function log_fatal() { exit 1 } ############################################################################### -function ok_or_fatal(){ +function ok_or_fatal() { if [ $? -ne 0 ]; then log_fatal $* fi } ############################################################################### -function get_value_from_toml_file(){ +function get_value_from_toml_file() { local _FILE="$1" # KEY = . local _SECTION="$2" @@ -35,9 +35,9 @@ function get_value_from_toml_file(){ fi local _return_next_line=0 local _TMP_FILE=$(mktemp) - cat $_FILE > $_TMP_FILE + cat $_FILE >$_TMP_FILE # Maybe the file doesnt end with a new line so we added just in case - echo >> $_TMP_FILE + echo >>$_TMP_FILE while read -r _LINE; do # Clean up line from spaces and tabs _LINE=$(echo $_LINE | tr -d '[:space:]') @@ -46,114 +46,113 @@ function get_value_from_toml_file(){ return 1 fi if [ $_return_next_line -eq 1 ]; then - # sed sentence remove quotes - echo $_LINE | sed 's/^[[:space:]]*"//;s/"$//' - + # sed sentence remove quotes + echo $_LINE | sed 's/^[[:space:]]*"//;s/"$//' + return 0 fi - #local _key_splitted=(${_LINE//=/ }) + #local _key_splitted=(${_LINE//=/ }) local _key_name=$(echo $_LINE | cut -f 1 -d "=") local _key_value=$(echo $_LINE | cut -f 2- -d "=") if [ "$_key_name" == "$_KEY" ]; then if [ $_key_value == "[" ]; then _return_next_line=1 else - rm $_TMP_FILE - # sed sentence remove quotes - echo $_key_value | sed 's/^[[:space:]]*"//;s/"$//' - return 0 + rm $_TMP_FILE + # sed sentence remove quotes + echo $_key_value | sed 's/^[[:space:]]*"//;s/"$//' + return 0 fi fi elif [ "$_LINE" == "[${_SECTION}]" ]; then _inside_section=1 fi - - - done < "$_TMP_FILE" + + done <"$_TMP_FILE" rm $_TMP_FILE return 2 } ############################################################################### -function export_key_from_toml_file_or_fatal(){ +function export_key_from_toml_file_or_fatal() { export_key_from_toml_file "$1" "$2" "$3" "$4" - if [ $? -ne 0 ]; then + if [ $? -ne 0 ]; then local _EXPORTED_VAR_NAME="$1" local _FILE="$2" local _SECTION="$3" local _KEY="$4" - log_fatal "$FUNCNAME: key [$_KEY] not found in section [$_SECTION] in file [$_FILE]" + log_fatal "$FUNCNAME: key [$_KEY] not found in section [$_SECTION] in file [$_FILE]" fi } ############################################################################### -function export_key_from_toml_file(){ +function export_key_from_toml_file() { local _EXPORTED_VAR_NAME="$1" local _FILE="$2" local _SECTION="$3" local _KEY="$4" local _VALUE=$(get_value_from_toml_file $_FILE $_SECTION $_KEY) if [ -z "$_VALUE" ]; then - log_debug "$FUNCNAME: key [$_KEY] not found in section [$_SECTION] in file [$_FILE]" - return 1 + log_debug "$FUNCNAME: key [$_KEY] not found in section [$_SECTION] in file [$_FILE]" + return 1 fi export $_EXPORTED_VAR_NAME="$_VALUE" log_debug "$_EXPORTED_VAR_NAME=${!_EXPORTED_VAR_NAME} \t\t\t# file:$_FILE section:$_SECTION key:$_KEY" return 0 } ############################################################################### -function export_obj_key_from_toml_file_or_fatal(){ - export_obj_key_from_toml_file $* +function export_obj_key_from_toml_file_or_fatal() { + export_obj_key_from_toml_file $* if [ $? -ne 0 ]; then local _EXPORTED_VAR_NAME="$1" local _FILE="$2" local _SECTION="$3" local _KEY="$4" - log_fatal "$FUNCNAME: obj_key [$_KEY] not found in section [$_SECTION] in file [$_FILE]" + log_fatal "$FUNCNAME: obj_key [$_KEY] not found in section [$_SECTION] in file [$_FILE]" fi } ############################################################################### -function export_obj_key_from_toml_file(){ +function export_obj_key_from_toml_file() { local _EXPORTED_VAR_NAME="$1" local _FILE="$2" local _SECTION="$3" local _KEY="$4" local _OBJ_KEY="$5" log_debug "export_obj_key_from_toml_file: $_EXPORTED_VAR_NAME $_FILE $_SECTION $_KEY $_OBJ_KEY" - local _VALUE=$(get_value_from_toml_file $_FILE $_SECTION $_KEY) + local _VALUE=$(get_value_from_toml_file $_FILE $_SECTION $_KEY) if [ -z "$_VALUE" ]; then - log_debug "export_obj_key_from_toml_file: obj_key $_KEY not found in section [$_SECTION]" - return 1 + log_debug "export_obj_key_from_toml_file: obj_key $_KEY not found in section [$_SECTION]" + return 1 fi - local _CLEAN_VALUE=$(echo $_VALUE | tr -d '{' | tr -d '}' | tr ',' '\n') + local _CLEAN_VALUE=$(echo $_VALUE | tr -d '{' | tr -d '}' | tr ',' '\n') while read -r _LINE; do - local _key_splitted=(${_LINE//=/ }) - + local _key_splitted=(${_LINE//=/ }) + if [ "${_key_splitted[0]}" == "$_OBJ_KEY" ]; then local _KEY_VALUE=${_key_splitted[1]} - if [ "$_KEY_VALUE" == "[" ]; then + if [ "$_KEY_VALUE" == "[" ]; then read -r _LINE - _KEY_VALUE=$LINE + _KEY_VALUE=$LINE echo "zzz $_KEY_VALUE" fi - local _RES=$(echo $_KEY_VALUE | sed 's/^[[:space:]]*"//;s/"$//') + local _RES=$(echo $_KEY_VALUE | sed 's/^[[:space:]]*"//;s/"$//') export $_EXPORTED_VAR_NAME="${_RES}" log_debug "$_EXPORTED_VAR_NAME=${!_EXPORTED_VAR_NAME} \t\t\t# file:$_FILE section:$_SECTION key:$_KEY obj_key:$_OBJ_KEY" return 0 fi - done <<< "$_CLEAN_VALUE" + done <<<"$_CLEAN_VALUE" log_debug "export_obj_key_from_toml_file: obj_key $_OBJ_KEY not found in section $_SECTION/ $_KEY = $_VALUE" return 1 } ############################################################################### -function export_values_of_genesis(){ +function export_values_of_genesis() { local _GENESIS_FILE=$1 if [ ! -f $_GENESIS_FILE ]; then log_fatal "Error: genesis file not found: $_GENESIS_FILE" fi - export l1_chain_id=$(jq -r '.L1Config.chainId' $_GENESIS_FILE | tr -d '"') + export l1_chain_id=$(jq -r '.L1Config.chainId' $_GENESIS_FILE | tr -d '"') export pol_token_address=$(jq -r '.L1Config.polTokenAddress' $_GENESIS_FILE) export zkevm_rollup_address=$(jq -r '.L1Config.polygonZkEVMAddress' $_GENESIS_FILE) export zkevm_rollup_manager_address=$(jq -r '.L1Config.polygonRollupManagerAddress' $_GENESIS_FILE) @@ -162,33 +161,33 @@ function export_values_of_genesis(){ } ############################################################################### -function export_values_of_cdk_node_config(){ - local _CDK_CONFIG_FILE=$1 - export_key_from_toml_file_or_fatal zkevm_l2_sequencer_address $_CDK_CONFIG_FILE "." L2Coinbase - export_key_from_toml_file_or_fatal zkevm_l2_sequencer_keystore_password $_CDK_CONFIG_FILE "." SequencerPrivateKeyPassword - export_key_from_toml_file_or_fatal l1_chain_id $_CDK_CONFIG_FILE L1Config chainId - export_key_from_toml_file zkevm_is_validium $_CDK_CONFIG_FILE Common IsValidiumMode - if [ $? -ne 0 ]; then - export_key_from_toml_file_or_fatal zkevm_is_validium $_CDK_CONFIG_FILE "." IsValidiumMode +function export_values_of_cdk_node_config() { + local _AGGKIT_CONFIG_FILE=$1 + export_key_from_toml_file_or_fatal zkevm_l2_sequencer_address $_AGGKIT_CONFIG_FILE "." L2Coinbase + export_key_from_toml_file_or_fatal zkevm_l2_sequencer_keystore_password $_AGGKIT_CONFIG_FILE "." SequencerPrivateKeyPassword + export_key_from_toml_file_or_fatal l1_chain_id $_AGGKIT_CONFIG_FILE L1Config chainId + export_key_from_toml_file zkevm_is_validium $_AGGKIT_CONFIG_FILE Common IsValidiumMode + if [ $? -ne 0 ]; then + export_key_from_toml_file_or_fatal zkevm_is_validium $_AGGKIT_CONFIG_FILE "." IsValidiumMode fi - export_key_from_toml_file zkevm_contract_versions $_CDK_CONFIG_FILE Common ContractVersions - if [ $? -ne 0 ]; then - export_key_from_toml_file_or_fatal zkevm_contract_versions $_CDK_CONFIG_FILE "." ContractVersions + export_key_from_toml_file zkevm_contract_versions $_AGGKIT_CONFIG_FILE Common ContractVersions + if [ $? -ne 0 ]; then + export_key_from_toml_file_or_fatal zkevm_contract_versions $_AGGKIT_CONFIG_FILE "." ContractVersions fi - export_key_from_toml_file l2_chain_id $_CDK_CONFIG_FILE Aggregator ChainID - if [ $? -ne 0 ]; then + export_key_from_toml_file l2_chain_id $_AGGKIT_CONFIG_FILE Aggregator ChainID + if [ $? -ne 0 ]; then log_debug "l2_chain_id not found in Aggregator section, using 0" export l2_chain_id="0" fi - export_key_from_toml_file_or_fatal zkevm_l2_agglayer_address $_CDK_CONFIG_FILE "." SenderProofToL1Addr - export_key_from_toml_file_or_fatal zkevm_rollup_fork_id $_CDK_CONFIG_FILE "." ForkId - export_key_from_toml_file zkevm_l2_agglayer_keystore_password $_CDK_CONFIG_FILE AggSender.SequencerPrivateKey Password + export_key_from_toml_file_or_fatal zkevm_l2_agglayer_address $_AGGKIT_CONFIG_FILE "." SenderProofToL1Addr + export_key_from_toml_file_or_fatal zkevm_rollup_fork_id $_AGGKIT_CONFIG_FILE "." ForkId + export_key_from_toml_file zkevm_l2_agglayer_keystore_password $_AGGKIT_CONFIG_FILE AggSender.SequencerPrivateKey Password if [ $? -ne 0 ]; then - export_key_from_toml_file_or_fatal zkevm_l2_agglayer_keystore_password $_CDK_CONFIG_FILE "." SequencerPrivateKeyPassword + export_key_from_toml_file_or_fatal zkevm_l2_agglayer_keystore_password $_AGGKIT_CONFIG_FILE "." SequencerPrivateKeyPassword fi - export_key_from_toml_file zkevm_bridge_address $_CDK_CONFIG_FILE BridgeL1Sync BridgeAddr + export_key_from_toml_file zkevm_bridge_address $_AGGKIT_CONFIG_FILE BridgeL1Sync BridgeAddr if [ $? -ne 0 ]; then - export_key_from_toml_file_or_fatal zkevm_bridge_address $_CDK_CONFIG_FILE "." polygonBridgeAddr + export_key_from_toml_file_or_fatal zkevm_bridge_address $_AGGKIT_CONFIG_FILE "." polygonBridgeAddr fi export is_cdk_validium=$zkevm_is_validium export zkevm_rollup_chain_id=$l2_chain_id @@ -200,12 +199,12 @@ function export_values_of_cdk_node_config(){ export zkevm_l2_keystore_password=$zkevm_l2_sequencer_keystore_password } ############################################################################### -# params: +# params: # $1 -> exported variable name # $2 -> service name # $3...$n -> endpoint names (will try all of them until one is found) ############################################################################### -function export_value_from_kurtosis_or_fail(){ +function export_value_from_kurtosis_or_fail() { local _EXPORTED_VAR_NAME="$1" shift local _SERVICE="$1" @@ -229,14 +228,14 @@ function export_value_from_kurtosis_or_fail(){ log_debug "$_EXPORTED_VAR_NAME=${!_EXPORTED_VAR_NAME} \t\t\t# Kurtosis $KURTOSIS_ENCLAVE $_SERVICE $_END_POINT" } ############################################################################### -function export_portnum_from_kurtosis_or_fail(){ +function export_portnum_from_kurtosis_or_fail() { local _EXPORTED_VAR_NAME="$1" - export_value_from_kurtosis_or_fail $* > /dev/null + export_value_from_kurtosis_or_fail $* >/dev/null local _VALUE eval "_VALUE=\$$1" # sed sentece eliminate protocol (xyz://) is have it - # kurtosis sometimes include protocol but not always - local _PORT=$(echo "$_VALUE" | sed -E 's|^[a-zA-Z]+://||' | cut -f 2 -d ":") + # kurtosis sometimes include protocol but not always + local _PORT=$(echo "$_VALUE" | sed -E 's|^[a-zA-Z]+://||' | cut -f 2 -d ":") if [ -z $_PORT ]; then log_fatal "Error getting port number from kurtosis: $2 $3 -> $_VALUE" fi @@ -244,12 +243,12 @@ function export_portnum_from_kurtosis_or_fail(){ log_debug "$_EXPORTED_VAR_NAME=${!_EXPORTED_VAR_NAME} \t\t\t# Kurtosis $KURTOSIS_ENCLAVE $2 $3" } ############################################################################### -function export_ports_from_kurtosis(){ - export_portnum_from_kurtosis_or_fail l1_rpc_port el-1-geth-lighthouse rpc - export_portnum_from_kurtosis_or_fail zkevm_rpc_http_port cdk-erigon-rpc-001 http-rpc rpc - export_portnum_from_kurtosis_or_fail zkevm_data_streamer_port cdk-erigon-sequencer-001 data-streamer - export_portnum_from_kurtosis_or_fail aggregator_db_port postgres-001 postgres - export_portnum_from_kurtosis_or_fail agglayer_port agglayer agglayer +function export_ports_from_kurtosis() { + export_portnum_from_kurtosis_or_fail l1_rpc_port el-1-geth-lighthouse rpc + export_portnum_from_kurtosis_or_fail zkevm_rpc_http_port cdk-erigon-rpc-001 http-rpc rpc + export_portnum_from_kurtosis_or_fail zkevm_data_streamer_port cdk-erigon-sequencer-001 data-streamer + export_portnum_from_kurtosis_or_fail aggregator_db_port postgres-001 postgres + export_portnum_from_kurtosis_or_fail agglayer_port agglayer agglayer export aggregator_db_hostname="127.0.0.1" export l1_rpc_url="http://localhost:${l1_rpc_port}" export l2_rpc_url="http://localhost:${zkevm_rpc_http_port}" @@ -257,32 +256,32 @@ function export_ports_from_kurtosis(){ } ############################################################################### -function export_forced_values(){ +function export_forced_values() { export global_log_level="debug" export l2_rpc_name="localhost" export sequencer_name="localhost" export deployment_suffix="" } ############################################################################### -function check_requirements(){ - which kurtosis > /dev/null +function check_requirements() { + which kurtosis >/dev/null if [ $? -ne 0 ]; then - log_error "kurtosis is not installed. Please install it:" - cat << EOF + log_error "kurtosis is not installed. Please install it:" + cat < /dev/null + kurtosis enclave inspect $KURTOSIS_ENCLAVE >/dev/null if [ $? -ne 0 ]; then - log_error "Error inspecting enclave $KURTOSIS_ENCLAVE" + log_error "Error inspecting enclave $KURTOSIS_ENCLAVE" echo "You must start kurtosis environment before running this script" echo "- start kurtosis:" echo " kurtosis clean --all; kurtosis run --enclave $KURTOSIS_ENCLAVE --args-file params.yml --image-download always ." @@ -291,7 +290,7 @@ EOF fi } ############################################################################### -function create_dest_folder(){ +function create_dest_folder() { export DEST=${TMP_AGGKIT_FOLDER}/local_config export zkevm_path_rw_data=${TMP_AGGKIT_FOLDER}/runtime [ ! -d ${DEST} ] && mkdir -p ${DEST} @@ -299,42 +298,37 @@ function create_dest_folder(){ mkdir $zkevm_path_rw_data } ############################################################################### -function download_kurtosis_artifacts(){ +function download_kurtosis_artifacts() { kurtosis files download $KURTOSIS_ENCLAVE genesis $DEST ok_or_fatal "Error downloading kurtosis artifact genesis to $DEST" export genesis_file=$DEST/genesis.json - + kurtosis files download $KURTOSIS_ENCLAVE sequencer-keystore $DEST ok_or_fatal "Error downloading kurtosis artifact sequencer-keystore to $DEST" export zkevm_l2_sequencer_keystore_file=$DEST/sequencer.keystore - - kurtosis files download $KURTOSIS_ENCLAVE cdk-node-config-artifact $DEST - ok_or_fatal "Error downloading kurtosis artifact cdk-node-config-artifact to $DEST" - kurtosis files download $KURTOSIS_ENCLAVE aggregator-keystore $DEST + kurtosis files download $KURTOSIS_ENCLAVE cdk-node-config-artifact $DEST ok_or_fatal "Error downloading kurtosis artifact cdk-node-config-artifact to $DEST" - export zkevm_l2_aggregator_keystore_file=$DEST/aggregator.keystore kurtosis files download $KURTOSIS_ENCLAVE agglayer-keystore $DEST ok_or_fatal "Error downloading kurtosis artifact agglayer to $DEST" export zkevm_l2_agglayer_keystore_file=$DEST/agglayer.keystore - } ############################################################################### -function add_translation_rules_for_validium(){ +function add_translation_rules_for_validium() { if [ $is_cdk_validium != "true" ]; then return fi log_debug " For Validium mode, we need to reach the DAC SERVER: adding translation rules" - + echo "[Aggregator.Synchronizer.Etherman.Validium.Translator]" echo "FullMatchRules = [" - echo " {Old=\"http://zkevm-dac-001:8484\", New=\"http://127.0.0.1:${dac_port}\"}," + echo " {Old=\"http://zkevm-dac-001:8484\", New=\"http://127.0.0.1:${dac_port}\"}," echo " ]" } ############################################################################### -function check_generated_config_file(){ - grep "" $DEST_TEMPLATE_FILE > /dev/null +function check_generated_config_file() { + grep "" $DEST_TEMPLATE_FILE >/dev/null if [ $? -ne 1 ]; then log_error "some values are not set, check $ORIG_TEMPLATE_FILE" echo "" @@ -345,23 +339,23 @@ function check_generated_config_file(){ fi } ############################################################################### -function parse_command_line_args(){ +function parse_command_line_args() { while [[ $# -gt 0 ]]; do case $1 in - -h|--help) - echo "Usage: $0" - echo " -h: help" - exit 0 - ;; - -e|--enclave) - KURTOSIS_ENCLAVE=$2 - shift - shift - ;; - -*) - echo "Invalid Option: $1" 1>&2 - exit 1 - ;; + -h | --help) + echo "Usage: $0" + echo " -h: help" + exit 0 + ;; + -e | --enclave) + KURTOSIS_ENCLAVE=$2 + shift + shift + ;; + -*) + echo "Invalid Option: $1" 1>&2 + exit 1 + ;; esac done } @@ -384,7 +378,7 @@ ORIG_TEMPLATE_FILE=test/config/kurtosis-cdk-node-config.toml.template DEST_TEMPLATE_FILE=$DEST/test.kurtosis.toml # Generate config file -go run scripts/run_template.go $ORIG_TEMPLATE_FILE > $DEST_TEMPLATE_FILE +go run scripts/run_template.go $ORIG_TEMPLATE_FILE >$DEST_TEMPLATE_FILE ok_or_fatal "Error generating template" check_generated_config_file @@ -402,7 +396,7 @@ echo " kurtosis service stop aggkit cdk-node-001" echo " " echo "- Add next configuration to vscode launch.json" echo " -----------------------------------------------------------" -cat << EOF +cat <&3 local _prev=$(kurtosis service exec $enclave agglayer "grep \"2 = \" /etc/zkevm/agglayer-config.toml || true" | tail -n +2) if [ ! -z "$_prev" ]; then diff --git a/test/bats/helpers/common-setup.bash b/test/bats/helpers/common-setup.bash index 5f53cbf8..485d2337 100644 --- a/test/bats/helpers/common-setup.bash +++ b/test/bats/helpers/common-setup.bash @@ -18,7 +18,7 @@ _common_setup() { # Kurtosis enclave and service identifiers - readonly enclave=${KURTOSIS_ENCLAVE:-cdk} + readonly enclave=${KURTOSIS_ENCLAVE:-aggkit} readonly contracts_container=${KURTOSIS_CONTRACTS:-contracts-001} readonly contracts_service_wrapper=${KURTOSIS_CONTRACTS_WRAPPER:-"kurtosis service exec $enclave $contracts_container"} readonly erigon_rpc_node=${KURTOSIS_ERIGON_RPC:-cdk-erigon-rpc-001} diff --git a/test/bats/pp-multi/bridge-l2_to_l2-e2e.bats b/test/bats/pp-multi/bridge-l2_to_l2-e2e.bats index a9a904b6..69b7036e 100644 --- a/test/bats/pp-multi/bridge-l2_to_l2-e2e.bats +++ b/test/bats/pp-multi/bridge-l2_to_l2-e2e.bats @@ -12,7 +12,7 @@ setup() { return 1 fi - add_cdk_network2_to_agglayer + add_network2_to_agglayer fund_claim_tx_manager mint_pol_token diff --git a/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml b/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml index 18e555d2..8ee35fb0 100644 --- a/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml +++ b/test/combinations/fork12-pessimistic-multi-attach-second-cdk.yml @@ -26,7 +26,7 @@ args: zkevm_l2_proofsigner_private_key: "0xc7fe3a006d75ba9326d9792523385abb49057c66aee0b8b4248821a89713f975" - cdk_node_image: cdk:latest + cdk_node_image: aggkit:latest cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.61.2 zkevm_contracts_image: leovct/zkevm-contracts:v9.0.0-rc.3-pp-fork.12-patch.1 additional_services: [] diff --git a/test/combinations/fork12-pessimistic-multi.yml b/test/combinations/fork12-pessimistic-multi.yml index 36bb57ea..ac9247f5 100644 --- a/test/combinations/fork12-pessimistic-multi.yml +++ b/test/combinations/fork12-pessimistic-multi.yml @@ -1,5 +1,5 @@ args: - cdk_node_image: cdk:latest + cdk_node_image: aggkit:latest agglayer_image: ghcr.io/agglayer/agglayer:0.2.0-rc.20 cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.61.2 zkevm_contracts_image: leovct/zkevm-contracts:v9.0.0-rc.3-pp-fork.12-patch.1 diff --git a/test/combinations/fork12-pessimistic.yml b/test/combinations/fork12-pessimistic.yml index 96b3ec18..add84430 100644 --- a/test/combinations/fork12-pessimistic.yml +++ b/test/combinations/fork12-pessimistic.yml @@ -1,7 +1,7 @@ args: agglayer_image: ghcr.io/agglayer/agglayer:0.2.0-rc.20 cdk_erigon_node_image: hermeznetwork/cdk-erigon:v2.61.2 - cdk_node_image: cdk + cdk_node_image: aggkit:latest zkevm_bridge_proxy_image: haproxy:3.0-bookworm zkevm_bridge_service_image: hermeznetwork/zkevm-bridge-service:v0.6.0-RC1 zkevm_bridge_ui_image: leovct/zkevm-bridge-ui:multi-network diff --git a/test/config/test.config.toml b/test/config/test.config.toml deleted file mode 100644 index f21ae965..00000000 --- a/test/config/test.config.toml +++ /dev/null @@ -1,3 +0,0 @@ -[Common] -IsValidiumMode = false -ContractVersions = "banana" diff --git a/test/config/test.genesis.json b/test/config/test.genesis.json deleted file mode 100644 index 9744f7b9..00000000 --- a/test/config/test.genesis.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "l1Config": { - "chainId": 1337, - "polygonZkEVMAddress": "0x8dAF17A20c9DBA35f005b6324F493785D239719d", - "polygonRollupManagerAddress": "0xB7f8BC63BbcaD18155201308C8f3540b07f84F5e", - "polTokenAddress": "0x5FbDB2315678afecb367f032d93F642f64180aa3", - "polygonZkEVMGlobalExitRootAddress": "0x8A791620dd6260079BF849Dc5567aDC3F2FdC318" - }, - "genesisBlockNumber": 67, - "root": "0xcc9ec17819f4ac7f282949ca8c379c4d3ee1b8b7908c51b9b405b6319af67b32", - "genesis": [ - { - "contractName": "PolygonZkEVMDeployer", - "balance": "0", - "nonce": "4", - "address": "0x51dbd54FCCb6b3A07738fd3E156D588e71f79973", - "bytecode": "0x6080604052600436106100705760003560e01c8063715018a61161004e578063715018a6146100e65780638da5cb5b146100fb578063e11ae6cb14610126578063f2fde38b1461013957600080fd5b80632b79805a146100755780634a94d4871461008a5780636d07dbf81461009d575b600080fd5b610088610083366004610927565b610159565b005b6100886100983660046109c7565b6101cb565b3480156100a957600080fd5b506100bd6100b8366004610a1e565b61020d565b60405173ffffffffffffffffffffffffffffffffffffffff909116815260200160405180910390f35b3480156100f257600080fd5b50610088610220565b34801561010757600080fd5b5060005473ffffffffffffffffffffffffffffffffffffffff166100bd565b610088610134366004610a40565b610234565b34801561014557600080fd5b50610088610154366004610a90565b61029b565b610161610357565b600061016e8585856103d8565b905061017a8183610537565b5060405173ffffffffffffffffffffffffffffffffffffffff821681527fba82f25fed02cd2a23d9f5d11c2ef588d22af5437cbf23bfe61d87257c480e4c9060200160405180910390a15050505050565b6101d3610357565b6101de83838361057b565b506040517f25adb19089b6a549831a273acdf7908cff8b7ee5f551f8d1d37996cf01c5df5b90600090a1505050565b600061021983836105a9565b9392505050565b610228610357565b61023260006105b6565b565b61023c610357565b60006102498484846103d8565b60405173ffffffffffffffffffffffffffffffffffffffff821681529091507fba82f25fed02cd2a23d9f5d11c2ef588d22af5437cbf23bfe61d87257c480e4c9060200160405180910390a150505050565b6102a3610357565b73ffffffffffffffffffffffffffffffffffffffff811661034b576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602660248201527f4f776e61626c653a206e6577206f776e657220697320746865207a65726f206160448201527f646472657373000000000000000000000000000000000000000000000000000060648201526084015b60405180910390fd5b610354816105b6565b50565b60005473ffffffffffffffffffffffffffffffffffffffff163314610232576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610342565b600083471015610444576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601d60248201527f437265617465323a20696e73756666696369656e742062616c616e63650000006044820152606401610342565b81516000036104af576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f437265617465323a2062797465636f6465206c656e677468206973207a65726f6044820152606401610342565b8282516020840186f5905073ffffffffffffffffffffffffffffffffffffffff8116610219576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601960248201527f437265617465323a204661696c6564206f6e206465706c6f79000000000000006044820152606401610342565b6060610219838360006040518060400160405280601e81526020017f416464726573733a206c6f772d6c6576656c2063616c6c206661696c6564000081525061062b565b60606105a1848484604051806060016040528060298152602001610b3d6029913961062b565b949350505050565b6000610219838330610744565b6000805473ffffffffffffffffffffffffffffffffffffffff8381167fffffffffffffffffffffffff0000000000000000000000000000000000000000831681178455604051919092169283917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e09190a35050565b6060824710156106bd576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602660248201527f416464726573733a20696e73756666696369656e742062616c616e636520666f60448201527f722063616c6c00000000000000000000000000000000000000000000000000006064820152608401610342565b6000808673ffffffffffffffffffffffffffffffffffffffff1685876040516106e69190610acf565b60006040518083038185875af1925050503d8060008114610723576040519150601f19603f3d011682016040523d82523d6000602084013e610728565b606091505b50915091506107398783838761076e565b979650505050505050565b6000604051836040820152846020820152828152600b8101905060ff815360559020949350505050565b606083156108045782516000036107fd5773ffffffffffffffffffffffffffffffffffffffff85163b6107fd576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601d60248201527f416464726573733a2063616c6c20746f206e6f6e2d636f6e74726163740000006044820152606401610342565b50816105a1565b6105a183838151156108195781518083602001fd5b806040517f08c379a00000000000000000000000000000000000000000000000000000000081526004016103429190610aeb565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b600082601f83011261088d57600080fd5b813567ffffffffffffffff808211156108a8576108a861084d565b604051601f83017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f011681019082821181831017156108ee576108ee61084d565b8160405283815286602085880101111561090757600080fd5b836020870160208301376000602085830101528094505050505092915050565b6000806000806080858703121561093d57600080fd5b8435935060208501359250604085013567ffffffffffffffff8082111561096357600080fd5b61096f8883890161087c565b9350606087013591508082111561098557600080fd5b506109928782880161087c565b91505092959194509250565b803573ffffffffffffffffffffffffffffffffffffffff811681146109c257600080fd5b919050565b6000806000606084860312156109dc57600080fd5b6109e58461099e565b9250602084013567ffffffffffffffff811115610a0157600080fd5b610a0d8682870161087c565b925050604084013590509250925092565b60008060408385031215610a3157600080fd5b50508035926020909101359150565b600080600060608486031215610a5557600080fd5b8335925060208401359150604084013567ffffffffffffffff811115610a7a57600080fd5b610a868682870161087c565b9150509250925092565b600060208284031215610aa257600080fd5b6102198261099e565b60005b83811015610ac6578181015183820152602001610aae565b50506000910152565b60008251610ae1818460208701610aab565b9190910192915050565b6020815260008251806020840152610b0a816040850160208701610aab565b601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016919091016040019291505056fe416464726573733a206c6f772d6c6576656c2063616c6c20776974682076616c7565206661696c6564a2646970667358221220964619cee0e0baf94c6f8763f013be157da5d54c89e5cff4a8caf4266e13f13a64736f6c63430008140033", - "storage": { - "0x0000000000000000000000000000000000000000000000000000000000000000": "0x000000000000000000000000f39fd6e51aad88f6f4ce6ab8827279cfffb92266" - } - }, - { - "contractName": "ProxyAdmin", - "balance": "0", - "nonce": "1", - "address": "0xe34Fe58DDa5b8c6D547E4857E987633aa86a5e90", - "bytecode": "0x60806040526004361061007b5760003560e01c80639623609d1161004e5780639623609d1461012b57806399a88ec41461013e578063f2fde38b1461015e578063f3b7dead1461017e57600080fd5b8063204e1c7a14610080578063715018a6146100c95780637eff275e146100e05780638da5cb5b14610100575b600080fd5b34801561008c57600080fd5b506100a061009b366004610608565b61019e565b60405173ffffffffffffffffffffffffffffffffffffffff909116815260200160405180910390f35b3480156100d557600080fd5b506100de610255565b005b3480156100ec57600080fd5b506100de6100fb36600461062c565b610269565b34801561010c57600080fd5b5060005473ffffffffffffffffffffffffffffffffffffffff166100a0565b6100de610139366004610694565b6102f7565b34801561014a57600080fd5b506100de61015936600461062c565b61038c565b34801561016a57600080fd5b506100de610179366004610608565b6103e8565b34801561018a57600080fd5b506100a0610199366004610608565b6104a4565b60008060008373ffffffffffffffffffffffffffffffffffffffff166040516101ea907f5c60da1b00000000000000000000000000000000000000000000000000000000815260040190565b600060405180830381855afa9150503d8060008114610225576040519150601f19603f3d011682016040523d82523d6000602084013e61022a565b606091505b50915091508161023957600080fd5b8080602001905181019061024d9190610788565b949350505050565b61025d6104f0565b6102676000610571565b565b6102716104f0565b6040517f8f28397000000000000000000000000000000000000000000000000000000000815273ffffffffffffffffffffffffffffffffffffffff8281166004830152831690638f283970906024015b600060405180830381600087803b1580156102db57600080fd5b505af11580156102ef573d6000803e3d6000fd5b505050505050565b6102ff6104f0565b6040517f4f1ef28600000000000000000000000000000000000000000000000000000000815273ffffffffffffffffffffffffffffffffffffffff841690634f1ef28690349061035590869086906004016107a5565b6000604051808303818588803b15801561036e57600080fd5b505af1158015610382573d6000803e3d6000fd5b5050505050505050565b6103946104f0565b6040517f3659cfe600000000000000000000000000000000000000000000000000000000815273ffffffffffffffffffffffffffffffffffffffff8281166004830152831690633659cfe6906024016102c1565b6103f06104f0565b73ffffffffffffffffffffffffffffffffffffffff8116610498576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602660248201527f4f776e61626c653a206e6577206f776e657220697320746865207a65726f206160448201527f646472657373000000000000000000000000000000000000000000000000000060648201526084015b60405180910390fd5b6104a181610571565b50565b60008060008373ffffffffffffffffffffffffffffffffffffffff166040516101ea907ff851a44000000000000000000000000000000000000000000000000000000000815260040190565b60005473ffffffffffffffffffffffffffffffffffffffff163314610267576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e6572604482015260640161048f565b6000805473ffffffffffffffffffffffffffffffffffffffff8381167fffffffffffffffffffffffff0000000000000000000000000000000000000000831681178455604051919092169283917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e09190a35050565b73ffffffffffffffffffffffffffffffffffffffff811681146104a157600080fd5b60006020828403121561061a57600080fd5b8135610625816105e6565b9392505050565b6000806040838503121561063f57600080fd5b823561064a816105e6565b9150602083013561065a816105e6565b809150509250929050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6000806000606084860312156106a957600080fd5b83356106b4816105e6565b925060208401356106c4816105e6565b9150604084013567ffffffffffffffff808211156106e157600080fd5b818601915086601f8301126106f557600080fd5b81358181111561070757610707610665565b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f0116810190838211818310171561074d5761074d610665565b8160405282815289602084870101111561076657600080fd5b8260208601602083013760006020848301015280955050505050509250925092565b60006020828403121561079a57600080fd5b8151610625816105e6565b73ffffffffffffffffffffffffffffffffffffffff8316815260006020604081840152835180604085015260005b818110156107ef578581018301518582016060015282016107d3565b5060006060828601015260607fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f83011685010192505050939250505056fea2646970667358221220c9867ffac53151bdb1305d8f5e3e883cd83e5270c7ec09cdc24e837b2e65239064736f6c63430008140033", - "storage": { - "0x0000000000000000000000000000000000000000000000000000000000000000": "0x0000000000000000000000000165878a594ca255338adfa4d48449f69242eb8f" - } - }, - { - "contractName": "PolygonZkEVMBridge implementation", - "balance": "0", - "nonce": "1", - "address": "0x493732fB136a380920C390a85fc27d79C7b70756", - "bytecode": "0x6080604052600436106101a35760003560e01c806383f24403116100e2578063ccaa2d1111610085578063ccaa2d1114610511578063cd58657914610531578063d02103ca14610544578063dbc169761461056b578063ee25560b14610580578063f5efcd79146105ad578063f811bff7146105cd578063fb570834146105ed57600080fd5b806383f244031461040b5780638ed7e3f21461042b578063aaa13cc21461044b578063b8b284d01461046b578063bab161bf1461048b578063be5831c7146104ad578063c00f14ab146104d1578063cc461632146104f157600080fd5b80633cbc795b1161014a5780633cbc795b146102fd5780633e197043146103365780634b2f336d146103565780635ca1e165146103765780637843298b1461038b57806379e2cf97146103ab57806381b1c174146103c057806383c43a55146103f657600080fd5b806315064c96146101a85780632072f6c5146101d757806322e95f2c146101ee578063240ff3781461021b57806327aef4e81461022e5780632dfdf0b514610250578063318aee3d146102745780633c351e10146102dd575b600080fd5b3480156101b457600080fd5b506068546101c29060ff1681565b60405190151581526020015b60405180910390f35b3480156101e357600080fd5b506101ec61060d565b005b3480156101fa57600080fd5b5061020e610209366004612b65565b610642565b6040516101ce9190612b9c565b6101ec610229366004612c06565b610693565b34801561023a57600080fd5b50610243610703565b6040516101ce9190612ccf565b34801561025c57600080fd5b5061026660535481565b6040519081526020016101ce565b34801561028057600080fd5b506102b961028f366004612ce9565b606b6020526000908152604090205463ffffffff811690600160201b90046001600160a01b031682565b6040805163ffffffff90931683526001600160a01b039091166020830152016101ce565b3480156102e957600080fd5b50606d5461020e906001600160a01b031681565b34801561030957600080fd5b50606d5461032190600160a01b900463ffffffff1681565b60405163ffffffff90911681526020016101ce565b34801561034257600080fd5b50610266610351366004612d15565b610791565b34801561036257600080fd5b50606f5461020e906001600160a01b031681565b34801561038257600080fd5b5061026661081e565b34801561039757600080fd5b5061020e6103a6366004612d94565b6108fb565b3480156103b757600080fd5b506101ec610925565b3480156103cc57600080fd5b5061020e6103db366004612ddd565b606a602052600090815260409020546001600160a01b031681565b34801561040257600080fd5b50610243610946565b34801561041757600080fd5b50610266610426366004612e08565b610965565b34801561043757600080fd5b50606c5461020e906001600160a01b031681565b34801561045757600080fd5b5061020e610466366004612f12565b610a3b565b34801561047757600080fd5b506101ec610486366004612fad565b610b3d565b34801561049757600080fd5b5060685461032190610100900463ffffffff1681565b3480156104b957600080fd5b5060685461032190600160c81b900463ffffffff1681565b3480156104dd57600080fd5b506102436104ec366004612ce9565b610c04565b3480156104fd57600080fd5b506101c261050c36600461302f565b610c49565b34801561051d57600080fd5b506101ec61052c366004613062565b610cd2565b6101ec61053f36600461314d565b6111c7565b34801561055057600080fd5b5060685461020e90600160281b90046001600160a01b031681565b34801561057757600080fd5b506101ec611621565b34801561058c57600080fd5b5061026661059b366004612ddd565b60696020526000908152604090205481565b3480156105b957600080fd5b506101ec6105c8366004613062565b611654565b3480156105d957600080fd5b506101ec6105e83660046131e2565b6118ef565b3480156105f957600080fd5b506101c261060836600461328a565b611b62565b606c546001600160a01b0316331461063857604051631736745960e31b815260040160405180910390fd5b610640611b7a565b565b6000606a6000848460405160200161065b9291906132d2565b60408051601f19818403018152918152815160209283012083529082019290925201600020546001600160a01b031690505b92915050565b60685460ff16156106b757604051630bc011ff60e21b815260040160405180910390fd5b34158015906106d05750606f546001600160a01b031615155b156106ee576040516301bd897160e61b815260040160405180910390fd5b6106fc858534868686611bd6565b5050505050565b606e8054610710906132fc565b80601f016020809104026020016040519081016040528092919081815260200182805461073c906132fc565b80156107895780601f1061075e57610100808354040283529160200191610789565b820191906000526020600020905b81548152906001019060200180831161076c57829003601f168201915b505050505081565b6040516001600160f81b031960f889901b1660208201526001600160e01b031960e088811b821660218401526001600160601b0319606089811b821660258601529188901b909216603984015285901b16603d8201526051810183905260718101829052600090609101604051602081830303815290604052805190602001209050979650505050505050565b605354600090819081805b60208110156108f2578083901c600116600103610886576033816020811061085357610853613336565b015460408051602081019290925281018590526060016040516020818303038152906040528051906020012093506108b3565b60408051602081018690529081018390526060016040516020818303038152906040528051906020012093505b604080516020810184905290810183905260600160405160208183030381529060405280519060200120915080806108ea90613362565b915050610829565b50919392505050565b600061091d848461090b85611ca0565b61091486611d5f565b61046687611e17565b949350505050565b605354606854600160c81b900463ffffffff16101561064057610640611ecf565b60405180611ba00160405280611b668152602001613a7a611b66913981565b600083815b6020811015610a3257600163ffffffff8516821c811690036109d55784816020811061099857610998613336565b6020020135826040516020016109b8929190918252602082015260400190565b604051602081830303815290604052805190602001209150610a20565b818582602081106109e8576109e8613336565b6020020135604051602001610a07929190918252602082015260400190565b6040516020818303038152906040528051906020012091505b80610a2a81613362565b91505061096a565b50949350505050565b6000808686604051602001610a519291906132d2565b604051602081830303815290604052805190602001209050600060ff60f81b308360405180611ba00160405280611b668152602001613a7a611b669139898989604051602001610aa39392919061337b565b60408051601f1981840301815290829052610ac192916020016133b4565b60405160208183030381529060405280519060200120604051602001610b1994939291906001600160f81b031994909416845260609290921b6001600160601b03191660018401526015830152603582015260550190565b60408051808303601f19018152919052805160209091012098975050505050505050565b60685460ff1615610b6157604051630bc011ff60e21b815260040160405180910390fd5b606f546001600160a01b0316610b8a5760405163dde3cda760e01b815260040160405180910390fd5b606f54604051632770a7eb60e21b81526001600160a01b0390911690639dc29fac90610bbc90339088906004016133e3565b600060405180830381600087803b158015610bd657600080fd5b505af1158015610bea573d6000803e3d6000fd5b50505050610bfc868686868686611bd6565b505050505050565b6060610c0f82611ca0565b610c1883611d5f565b610c2184611e17565b604051602001610c339392919061337b565b6040516020818303038152906040529050919050565b6068546000908190610100900463ffffffff16158015610c6f575063ffffffff83166001145b15610c81575063ffffffff8316610ca8565b610c95600160201b63ffffffff85166133fc565b610ca59063ffffffff8616613413565b90505b600881901c600090815260696020526040902054600160ff9092169190911b908116149392505050565b60685460ff1615610cf657604051630bc011ff60e21b815260040160405180910390fd5b60685463ffffffff8681166101009092041614610d26576040516302caf51760e11b815260040160405180910390fd5b610d5a8c8c8c8c8c610d5560008e8e8e8e8e8e8e604051610d48929190613426565b6040518091039020610791565b611f68565b6001600160a01b038616610e9257606f546001600160a01b0316610e295760006001600160a01b03851684825b6040519080825280601f01601f191660200182016040528015610db1576020820181803683370190505b50604051610dbf9190613436565b60006040518083038185875af1925050503d8060008114610dfc576040519150601f19603f3d011682016040523d82523d6000602084013e610e01565b606091505b5050905080610e2357604051630ce8f45160e31b815260040160405180910390fd5b5061117a565b606f546040516340c10f1960e01b81526001600160a01b03909116906340c10f1990610e5b90879087906004016133e3565b600060405180830381600087803b158015610e7557600080fd5b505af1158015610e89573d6000803e3d6000fd5b5050505061117a565b606d546001600160a01b038781169116148015610ec05750606d5463ffffffff888116600160a01b90920416145b15610ed85760006001600160a01b0385168482610d87565b60685463ffffffff610100909104811690881603610f0957610f046001600160a01b03871685856120c7565b61117a565b60008787604051602001610f1e9291906132d2565b60408051601f1981840301815291815281516020928301206000818152606a9093529120549091506001600160a01b031680611116576000610f968386868080601f01602080910402602001604051908101604052809392919081815260200183838082843760009201919091525061212292505050565b6040516340c10f1960e01b81529091506001600160a01b038216906340c10f1990610fc7908a908a906004016133e3565b600060405180830381600087803b158015610fe157600080fd5b505af1158015610ff5573d6000803e3d6000fd5b5050505080606a600085815260200190815260200160002060006101000a8154816001600160a01b0302191690836001600160a01b0316021790555060405180604001604052808b63ffffffff1681526020018a6001600160a01b0316815250606b6000836001600160a01b03166001600160a01b0316815260200190815260200160002060008201518160000160006101000a81548163ffffffff021916908363ffffffff16021790555060208201518160000160046101000a8154816001600160a01b0302191690836001600160a01b031602179055509050507f490e59a1701b938786ac72570a1efeac994a3dbe96e2e883e19e902ace6e6a398a8a83888860405161110895949392919061347b565b60405180910390a150611177565b6040516340c10f1960e01b81526001600160a01b038216906340c10f199061114490899089906004016133e3565b600060405180830381600087803b15801561115e57600080fd5b505af1158015611172573d6000803e3d6000fd5b505050505b50505b7f1df3f2a973a00d6635911755c260704e95e8a5876997546798770f76396fda4d8a888887876040516111b19594939291906134b4565b60405180910390a1505050505050505050505050565b60685460ff16156111eb57604051630bc011ff60e21b815260040160405180910390fd5b6111f361219e565b60685463ffffffff610100909104811690881603611224576040516302caf51760e11b815260040160405180910390fd5b6000806060876001600160a01b03881661130a578834146112585760405163b89240f560e01b815260040160405180910390fd5b606d54606e80546001600160a01b0383169650600160a01b90920463ffffffff16945090611285906132fc565b80601f01602080910402602001604051908101604052809291908181526020018280546112b1906132fc565b80156112fe5780601f106112d3576101008083540402835291602001916112fe565b820191906000526020600020905b8154815290600101906020018083116112e157829003601f168201915b50505050509150611596565b34156113295760405163798ee6f160e01b815260040160405180910390fd5b606f546001600160a01b03908116908916036113a457604051632770a7eb60e21b81526001600160a01b03891690639dc29fac9061136d9033908d906004016133e3565b600060405180830381600087803b15801561138757600080fd5b505af115801561139b573d6000803e3d6000fd5b50505050611596565b6001600160a01b038089166000908152606b602090815260409182902082518084019093525463ffffffff81168352600160201b9004909216918101829052901561145c57604051632770a7eb60e21b81526001600160a01b038a1690639dc29fac906114179033908e906004016133e3565b600060405180830381600087803b15801561143157600080fd5b505af1158015611445573d6000803e3d6000fd5b505050508060200151945080600001519350611589565b851561146e5761146e898b89896121f7565b6040516370a0823160e01b81526000906001600160a01b038b16906370a082319061149d903090600401612b9c565b602060405180830381865afa1580156114ba573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906114de91906134e6565b90506114f56001600160a01b038b1633308e61253d565b6040516370a0823160e01b81526000906001600160a01b038c16906370a0823190611524903090600401612b9c565b602060405180830381865afa158015611541573d6000803e3d6000fd5b505050506040513d601f19601f8201168201806040525081019061156591906134e6565b905061157182826134ff565b6068548c9850610100900463ffffffff169650935050505b61159289610c04565b9250505b7f501781209a1f8899323b96b4ef08b168df93e0a90c673d1e4cce39366cb62f9b600084868e8e86886053546040516115d6989796959493929190613512565b60405180910390a16115fd6115f8600085878f8f878980519060200120610791565b612575565b861561160b5761160b611ecf565b5050505061161860018055565b50505050505050565b606c546001600160a01b0316331461164c57604051631736745960e31b815260040160405180910390fd5b610640612660565b60685460ff161561167857604051630bc011ff60e21b815260040160405180910390fd5b60685463ffffffff86811661010090920416146116a8576040516302caf51760e11b815260040160405180910390fd5b6116ca8c8c8c8c8c610d5560018e8e8e8e8e8e8e604051610d48929190613426565b606f546000906001600160a01b031661178157846001600160a01b031684888a86866040516024016116ff949392919061357d565b60408051601f198184030181529181526020820180516001600160e01b0316630c035af960e11b179052516117349190613436565b60006040518083038185875af1925050503d8060008114611771576040519150601f19603f3d011682016040523d82523d6000602084013e611776565b606091505b505080915050611883565b606f546040516340c10f1960e01b81526001600160a01b03909116906340c10f19906117b390889088906004016133e3565b600060405180830381600087803b1580156117cd57600080fd5b505af11580156117e1573d6000803e3d6000fd5b50505050846001600160a01b031687898585604051602401611806949392919061357d565b60408051601f198184030181529181526020820180516001600160e01b0316630c035af960e11b1790525161183b9190613436565b6000604051808303816000865af19150503d8060008114611878576040519150601f19603f3d011682016040523d82523d6000602084013e61187d565b606091505b50909150505b806118a1576040516337e391c360e01b815260040160405180910390fd5b7f1df3f2a973a00d6635911755c260704e95e8a5876997546798770f76396fda4d8b898988886040516118d89594939291906134b4565b60405180910390a150505050505050505050505050565b600054610100900460ff161580801561190f5750600054600160ff909116105b806119295750303b158015611929575060005460ff166001145b6119915760405162461bcd60e51b815260206004820152602e60248201527f496e697469616c697a61626c653a20636f6e747261637420697320616c72656160448201526d191e481a5b9a5d1a585b1a5e995960921b60648201526084015b60405180910390fd5b6000805460ff1916600117905580156119b4576000805461ff0019166101001790555b60688054610100600160c81b03191661010063ffffffff8a160265010000000000600160c81b03191617600160281b6001600160a01b038781169190910291909117909155606c80546001600160a01b0319168583161790558616611a3d5763ffffffff851615611a3857604051630d43a60960e11b815260040160405180910390fd5b611b0c565b606d805463ffffffff8716600160a01b026001600160c01b03199091166001600160a01b03891617179055606e611a7483826135fe565b50611aeb6000801b6012604051602001611ad791906060808252600d908201526c2bb930b83832b21022ba3432b960991b608082015260a060208201819052600490820152630ae8aa8960e31b60c082015260ff91909116604082015260e00190565b604051602081830303815290604052612122565b606f80546001600160a01b0319166001600160a01b03929092169190911790555b611b146126b8565b8015611618576000805461ff0019169055604051600181527f7f26b83ff96e1f2b6a682f133852f6798a09c465da95921460cefb38474024989060200160405180910390a150505050505050565b600081611b70868686610965565b1495945050505050565b60685460ff1615611b9e57604051630bc011ff60e21b815260040160405180910390fd5b6068805460ff191660011790556040517f2261efe5aef6fedc1fd1550b25facc9181745623049c7901287030b9ad1a549790600090a1565b60685463ffffffff610100909104811690871603611c07576040516302caf51760e11b815260040160405180910390fd5b7f501781209a1f8899323b96b4ef08b168df93e0a90c673d1e4cce39366cb62f9b6001606860019054906101000a900463ffffffff16338989898888605354604051611c5b999897969594939291906136bd565b60405180910390a1611c926115f86001606860019054906101000a900463ffffffff16338a8a8a8989604051610d48929190613426565b8215610bfc57610bfc611ecf565b60408051600481526024810182526020810180516001600160e01b03166306fdde0360e01b179052905160609160009182916001600160a01b03861691611ce79190613436565b600060405180830381855afa9150503d8060008114611d22576040519150601f19603f3d011682016040523d82523d6000602084013e611d27565b606091505b509150915081611d5657604051806040016040528060078152602001664e4f5f4e414d4560c81b81525061091d565b61091d816126e7565b60408051600481526024810182526020810180516001600160e01b03166395d89b4160e01b179052905160609160009182916001600160a01b03861691611da69190613436565b600060405180830381855afa9150503d8060008114611de1576040519150601f19603f3d011682016040523d82523d6000602084013e611de6565b606091505b509150915081611d5657604051806040016040528060098152602001681393d7d4d6535093d360ba1b81525061091d565b60408051600481526024810182526020810180516001600160e01b031663313ce56760e01b1790529051600091829182916001600160a01b03861691611e5d9190613436565b600060405180830381855afa9150503d8060008114611e98576040519150601f19603f3d011682016040523d82523d6000602084013e611e9d565b606091505b5091509150818015611eb0575080516020145b611ebb57601261091d565b8080602001905181019061091d919061372a565b6053546068805463ffffffff909216600160c81b0263ffffffff60c81b1990921691909117908190556001600160a01b03600160281b909104166333d6247d611f1661081e565b6040518263ffffffff1660e01b8152600401611f3491815260200190565b600060405180830381600087803b158015611f4e57600080fd5b505af1158015611f62573d6000803e3d6000fd5b50505050565b606854604080516020808201879052818301869052825180830384018152606083019384905280519101206312bd9b1960e11b9092526064810191909152600091600160281b90046001600160a01b03169063257b3632906084016020604051808303816000875af1158015611fe2573d6000803e3d6000fd5b505050506040513d601f19601f8201168201806040525081019061200691906134e6565b90508060000361202857604051622f6fad60e01b815260040160405180910390fd5b600080600160401b87161561206857869150612046848a8489611b62565b612063576040516338105f3b60e21b815260040160405180910390fd5b6120b2565b602087901c612078816001613747565b915087925061209361208b868c86610965565b8a8389611b62565b6120b0576040516338105f3b60e21b815260040160405180910390fd5b505b6120bc8282612875565b505050505050505050565b61211d8363a9059cbb60e01b84846040516024016120e69291906133e3565b60408051601f198184030181529190526020810180516001600160e01b03166001600160e01b03199093169290921790915261291d565b505050565b60008060405180611ba00160405280611b668152602001613a7a611b669139836040516020016121539291906133b4565b6040516020818303038152906040529050838151602083016000f591506001600160a01b038216612197576040516305f7d84960e51b815260040160405180910390fd5b5092915050565b6002600154036121f05760405162461bcd60e51b815260206004820152601f60248201527f5265656e7472616e637947756172643a207265656e7472616e742063616c6c006044820152606401611988565b6002600155565b60006122066004828486613764565b61220f9161378e565b9050632afa533160e01b6001600160e01b03198216016123a357600080808080808061223e896004818d613764565b81019061224b91906137be565b9650965096509650965096509650336001600160a01b0316876001600160a01b03161461228b5760405163912ecce760e01b815260040160405180910390fd5b6001600160a01b03861630146122b45760405163750643af60e01b815260040160405180910390fd5b8a85146122d4576040516303fffc4b60e01b815260040160405180910390fd5b604080516001600160a01b0389811660248301528881166044830152606482018890526084820187905260ff861660a483015260c4820185905260e48083018590528351808403909101815261010490920183526020820180516001600160e01b031663d505accf60e01b1790529151918e16916123529190613436565b6000604051808303816000865af19150503d806000811461238f576040519150601f19603f3d011682016040523d82523d6000602084013e612394565b606091505b505050505050505050506106fc565b6001600160e01b031981166323f2ebc360e21b146123d457604051637141605d60e11b815260040160405180910390fd5b6000808080808080806123ea8a6004818e613764565b8101906123f79190613812565b97509750975097509750975097509750336001600160a01b0316886001600160a01b0316146124395760405163912ecce760e01b815260040160405180910390fd5b6001600160a01b03871630146124625760405163750643af60e01b815260040160405180910390fd5b604080516001600160a01b038a811660248301528981166044830152606482018990526084820188905286151560a483015260ff861660c483015260e482018590526101048083018590528351808403909101815261012490920183526020820180516001600160e01b03166323f2ebc360e21b1790529151918f16916124e99190613436565b6000604051808303816000865af19150503d8060008114612526576040519150601f19603f3d011682016040523d82523d6000602084013e61252b565b606091505b50505050505050505050505050505050565b6040516001600160a01b0380851660248301528316604482015260648101829052611f629085906323b872dd60e01b906084016120e6565b80600161258460206002613979565b61258e91906134ff565b605354106125af576040516377ae67b360e11b815260040160405180910390fd5b60006053600081546125c090613362565b9182905550905060005b6020811015612651578082901c6001166001036125fd5782603382602081106125f5576125f5613336565b015550505050565b6033816020811061261057612610613336565b01546040805160208101929092528101849052606001604051602081830303815290604052805190602001209250808061264990613362565b9150506125ca565b5061211d613985565b60018055565b60685460ff1661268357604051635386698160e01b815260040160405180910390fd5b6068805460ff191690556040517f1e5e34eea33501aecf2ebec9fe0e884a40804275ea7fe10b2ba084c8374308b390600090a1565b600054610100900460ff166126df5760405162461bcd60e51b81526004016119889061399b565b6106406129ef565b60606040825110612706578180602001905181019061068d91906139e6565b81516020036128425760005b602081108015612741575082818151811061272f5761272f613336565b01602001516001600160f81b03191615155b15612758578061275081613362565b915050612712565b806000036127905750506040805180820190915260128152714e4f545f56414c49445f454e434f44494e4760701b6020820152919050565b6000816001600160401b038111156127aa576127aa612e47565b6040519080825280601f01601f1916602001820160405280156127d4576020820181803683370190505b50905060005b8281101561283a578481815181106127f4576127f4613336565b602001015160f81c60f81b82828151811061281157612811613336565b60200101906001600160f81b031916908160001a9053508061283281613362565b9150506127da565b509392505050565b50506040805180820190915260128152714e4f545f56414c49445f454e434f44494e4760701b602082015290565b919050565b606854600090610100900463ffffffff16158015612899575063ffffffff82166001145b156128ab575063ffffffff82166128d2565b6128bf600160201b63ffffffff84166133fc565b6128cf9063ffffffff8516613413565b90505b600881901c60008181526069602052604081208054600160ff861690811b9182189283905592909190818316900361161857604051630c8d9eab60e31b815260040160405180910390fd5b6000612972826040518060400160405280602081526020017f5361666545524332303a206c6f772d6c6576656c2063616c6c206661696c6564815250856001600160a01b0316612a169092919063ffffffff16565b80519091501561211d57808060200190518101906129909190613a5c565b61211d5760405162461bcd60e51b815260206004820152602a60248201527f5361666545524332303a204552433230206f7065726174696f6e20646964206e6044820152691bdd081cdd58d8d9595960b21b6064820152608401611988565b600054610100900460ff1661265a5760405162461bcd60e51b81526004016119889061399b565b606061091d848460008585600080866001600160a01b03168587604051612a3d9190613436565b60006040518083038185875af1925050503d8060008114612a7a576040519150601f19603f3d011682016040523d82523d6000602084013e612a7f565b606091505b5091509150612a9087838387612a9b565b979650505050505050565b60608315612b0a578251600003612b03576001600160a01b0385163b612b035760405162461bcd60e51b815260206004820152601d60248201527f416464726573733a2063616c6c20746f206e6f6e2d636f6e74726163740000006044820152606401611988565b508161091d565b61091d8383815115612b1f5781518083602001fd5b8060405162461bcd60e51b81526004016119889190612ccf565b803563ffffffff8116811461287057600080fd5b6001600160a01b0381168114612b6257600080fd5b50565b60008060408385031215612b7857600080fd5b612b8183612b39565b91506020830135612b9181612b4d565b809150509250929050565b6001600160a01b0391909116815260200190565b8015158114612b6257600080fd5b60008083601f840112612bd057600080fd5b5081356001600160401b03811115612be757600080fd5b602083019150836020828501011115612bff57600080fd5b9250929050565b600080600080600060808688031215612c1e57600080fd5b612c2786612b39565b94506020860135612c3781612b4d565b93506040860135612c4781612bb0565b925060608601356001600160401b03811115612c6257600080fd5b612c6e88828901612bbe565b969995985093965092949392505050565b60005b83811015612c9a578181015183820152602001612c82565b50506000910152565b60008151808452612cbb816020860160208601612c7f565b601f01601f19169290920160200192915050565b602081526000612ce26020830184612ca3565b9392505050565b600060208284031215612cfb57600080fd5b8135612ce281612b4d565b60ff81168114612b6257600080fd5b600080600080600080600060e0888a031215612d3057600080fd5b8735612d3b81612d06565b9650612d4960208901612b39565b95506040880135612d5981612b4d565b9450612d6760608901612b39565b93506080880135612d7781612b4d565b9699959850939692959460a0840135945060c09093013592915050565b600080600060608486031215612da957600080fd5b612db284612b39565b92506020840135612dc281612b4d565b91506040840135612dd281612b4d565b809150509250925092565b600060208284031215612def57600080fd5b5035919050565b80610400810183101561068d57600080fd5b60008060006104408486031215612e1e57600080fd5b83359250612e2f8560208601612df6565b9150612e3e6104208501612b39565b90509250925092565b634e487b7160e01b600052604160045260246000fd5b604051601f8201601f191681016001600160401b0381118282101715612e8557612e85612e47565b604052919050565b60006001600160401b03821115612ea657612ea6612e47565b50601f01601f191660200190565b6000612ec7612ec284612e8d565b612e5d565b9050828152838383011115612edb57600080fd5b828260208301376000602084830101529392505050565b600082601f830112612f0357600080fd5b612ce283833560208501612eb4565b600080600080600060a08688031215612f2a57600080fd5b612f3386612b39565b94506020860135612f4381612b4d565b935060408601356001600160401b0380821115612f5f57600080fd5b612f6b89838a01612ef2565b94506060880135915080821115612f8157600080fd5b50612f8e88828901612ef2565b9250506080860135612f9f81612d06565b809150509295509295909350565b60008060008060008060a08789031215612fc657600080fd5b612fcf87612b39565b95506020870135612fdf81612b4d565b9450604087013593506060870135612ff681612bb0565b925060808701356001600160401b0381111561301157600080fd5b61301d89828a01612bbe565b979a9699509497509295939492505050565b6000806040838503121561304257600080fd5b61304b83612b39565b915061305960208401612b39565b90509250929050565b6000806000806000806000806000806000806109208d8f03121561308557600080fd5b61308f8e8e612df6565b9b5061309f8e6104008f01612df6565b9a506108008d013599506108208d013598506108408d013597506130c66108608e01612b39565b96506130d66108808e0135612b4d565b6108808d013595506130eb6108a08e01612b39565b94506130fb6108c08e0135612b4d565b6108c08d013593506108e08d013592506001600160401b036109008e0135111561312457600080fd5b6131358e6109008f01358f01612bbe565b81935080925050509295989b509295989b509295989b565b600080600080600080600060c0888a03121561316857600080fd5b61317188612b39565b9650602088013561318181612b4d565b955060408801359450606088013561319881612b4d565b935060808801356131a881612bb0565b925060a08801356001600160401b038111156131c357600080fd5b6131cf8a828b01612bbe565b989b979a50959850939692959293505050565b60008060008060008060c087890312156131fb57600080fd5b61320487612b39565b9550602087013561321481612b4d565b945061322260408801612b39565b9350606087013561323281612b4d565b9250608087013561324281612b4d565b915060a08701356001600160401b0381111561325d57600080fd5b8701601f8101891361326e57600080fd5b61327d89823560208401612eb4565b9150509295509295509295565b60008060008061046085870312156132a157600080fd5b843593506132b28660208701612df6565b92506132c16104208601612b39565b939692955092936104400135925050565b60e09290921b6001600160e01b031916825260601b6001600160601b031916600482015260180190565b600181811c9082168061331057607f821691505b60208210810361333057634e487b7160e01b600052602260045260246000fd5b50919050565b634e487b7160e01b600052603260045260246000fd5b634e487b7160e01b600052601160045260246000fd5b6000600182016133745761337461334c565b5060010190565b60608152600061338e6060830186612ca3565b82810360208401526133a08186612ca3565b91505060ff83166040830152949350505050565b600083516133c6818460208801612c7f565b8351908301906133da818360208801612c7f565b01949350505050565b6001600160a01b03929092168252602082015260400190565b808202811582820484141761068d5761068d61334c565b8082018082111561068d5761068d61334c565b8183823760009101908152919050565b60008251613448818460208701612c7f565b9190910192915050565b81835281816020850137506000828201602090810191909152601f909101601f19169091010190565b63ffffffff861681526001600160a01b03858116602083015284166040820152608060608201819052600090612a909083018486613452565b94855263ffffffff9390931660208501526001600160a01b039182166040850152166060830152608082015260a00190565b6000602082840312156134f857600080fd5b5051919050565b8181038181111561068d5761068d61334c565b60ff8916815263ffffffff88811660208301526001600160a01b03888116604084015287821660608401528616608083015260a0820185905261010060c0830181905260009161356484830187612ca3565b925080851660e085015250509998505050505050505050565b6001600160a01b038516815263ffffffff841660208201526060604082018190526000906135ae9083018486613452565b9695505050505050565b601f82111561211d57600081815260208120601f850160051c810160208610156135df5750805b601f850160051c820191505b81811015610bfc578281556001016135eb565b81516001600160401b0381111561361757613617612e47565b61362b8161362584546132fc565b846135b8565b602080601f83116001811461366057600084156136485750858301515b600019600386901b1c1916600185901b178555610bfc565b600085815260208120601f198616915b8281101561368f57888601518255948401946001909101908401613670565b50858210156136ad5787850151600019600388901b60f8161c191681555b5050505050600190811b01905550565b60ff8a16815263ffffffff89811660208301526001600160a01b03898116604084015288821660608401528716608083015260a0820186905261010060c083018190526000916137108483018789613452565b925080851660e085015250509a9950505050505050505050565b60006020828403121561373c57600080fd5b8151612ce281612d06565b63ffffffff8181168382160190808211156121975761219761334c565b6000808585111561377457600080fd5b8386111561378157600080fd5b5050820193919092039150565b6001600160e01b031981358181169160048510156137b65780818660040360031b1b83161692505b505092915050565b600080600080600080600060e0888a0312156137d957600080fd5b87356137e481612b4d565b965060208801356137f481612b4d565b955060408801359450606088013593506080880135612d7781612d06565b600080600080600080600080610100898b03121561382f57600080fd5b883561383a81612b4d565b9750602089013561384a81612b4d565b96506040890135955060608901359450608089013561386881612bb0565b935060a089013561387881612d06565b979a969950949793969295929450505060c08201359160e0013590565b600181815b808511156138d05781600019048211156138b6576138b661334c565b808516156138c357918102915b93841c939080029061389a565b509250929050565b6000826138e75750600161068d565b816138f45750600061068d565b816001811461390a576002811461391457613930565b600191505061068d565b60ff8411156139255761392561334c565b50506001821b61068d565b5060208310610133831016604e8410600b8410161715613953575081810a61068d565b61395d8383613895565b80600019048211156139715761397161334c565b029392505050565b6000612ce283836138d8565b634e487b7160e01b600052600160045260246000fd5b6020808252602b908201527f496e697469616c697a61626c653a20636f6e7472616374206973206e6f74206960408201526a6e697469616c697a696e6760a81b606082015260800190565b6000602082840312156139f857600080fd5b81516001600160401b03811115613a0e57600080fd5b8201601f81018413613a1f57600080fd5b8051613a2d612ec282612e8d565b818152856020838501011115613a4257600080fd5b613a53826020830160208601612c7f565b95945050505050565b600060208284031215613a6e57600080fd5b8151612ce281612bb056fe6101006040523480156200001257600080fd5b5060405162001b6638038062001b6683398101604081905262000035916200028d565b82826003620000458382620003a1565b506004620000548282620003a1565b50503360c0525060ff811660e052466080819052620000739062000080565b60a052506200046d915050565b60007f8b73c3c69bb8fe3d512ecc4cf759cc79239f7b179b0ffacaa9a75d522b39400f620000ad6200012e565b805160209182012060408051808201825260018152603160f81b90840152805192830193909352918101919091527fc89efdaa54c0f20c7adf612882df0950f5a951637e0307cdcb4c672f298b8bc66060820152608081018390523060a082015260c001604051602081830303815290604052805190602001209050919050565b6060600380546200013f9062000312565b80601f01602080910402602001604051908101604052809291908181526020018280546200016d9062000312565b8015620001be5780601f106200019257610100808354040283529160200191620001be565b820191906000526020600020905b815481529060010190602001808311620001a057829003601f168201915b5050505050905090565b634e487b7160e01b600052604160045260246000fd5b600082601f830112620001f057600080fd5b81516001600160401b03808211156200020d576200020d620001c8565b604051601f8301601f19908116603f01168101908282118183101715620002385762000238620001c8565b816040528381526020925086838588010111156200025557600080fd5b600091505b838210156200027957858201830151818301840152908201906200025a565b600093810190920192909252949350505050565b600080600060608486031215620002a357600080fd5b83516001600160401b0380821115620002bb57600080fd5b620002c987838801620001de565b94506020860151915080821115620002e057600080fd5b50620002ef86828701620001de565b925050604084015160ff811681146200030757600080fd5b809150509250925092565b600181811c908216806200032757607f821691505b6020821081036200034857634e487b7160e01b600052602260045260246000fd5b50919050565b601f8211156200039c57600081815260208120601f850160051c81016020861015620003775750805b601f850160051c820191505b81811015620003985782815560010162000383565b5050505b505050565b81516001600160401b03811115620003bd57620003bd620001c8565b620003d581620003ce845462000312565b846200034e565b602080601f8311600181146200040d5760008415620003f45750858301515b600019600386901b1c1916600185901b17855562000398565b600085815260208120601f198616915b828110156200043e578886015182559484019460019091019084016200041d565b50858210156200045d5787850151600019600388901b60f8161c191681555b5050505050600190811b01905550565b60805160a05160c05160e0516116aa620004bc6000396000610237015260008181610307015281816105c001526106a70152600061053a015260008181610379015261050401526116aa6000f3fe608060405234801561001057600080fd5b50600436106101775760003560e01c806370a08231116100d8578063a457c2d71161008c578063d505accf11610066578063d505accf1461039b578063dd62ed3e146103ae578063ffa1ad74146103f457600080fd5b8063a457c2d71461034e578063a9059cbb14610361578063cd0d00961461037457600080fd5b806395d89b41116100bd57806395d89b41146102e75780639dc29fac146102ef578063a3c573eb1461030257600080fd5b806370a08231146102915780637ecebe00146102c757600080fd5b806330adf81f1161012f5780633644e515116101145780633644e51514610261578063395093511461026957806340c10f191461027c57600080fd5b806330adf81f14610209578063313ce5671461023057600080fd5b806318160ddd1161016057806318160ddd146101bd57806320606b70146101cf57806323b872dd146101f657600080fd5b806306fdde031461017c578063095ea7b31461019a575b600080fd5b610184610430565b60405161019191906113e4565b60405180910390f35b6101ad6101a8366004611479565b6104c2565b6040519015158152602001610191565b6002545b604051908152602001610191565b6101c17f8b73c3c69bb8fe3d512ecc4cf759cc79239f7b179b0ffacaa9a75d522b39400f81565b6101ad6102043660046114a3565b6104dc565b6101c17f6e71edae12b1b97f4d1f60370fef10105fa2faae0126114a169c64845d6126c981565b60405160ff7f0000000000000000000000000000000000000000000000000000000000000000168152602001610191565b6101c1610500565b6101ad610277366004611479565b61055c565b61028f61028a366004611479565b6105a8565b005b6101c161029f3660046114df565b73ffffffffffffffffffffffffffffffffffffffff1660009081526020819052604090205490565b6101c16102d53660046114df565b60056020526000908152604090205481565b610184610680565b61028f6102fd366004611479565b61068f565b6103297f000000000000000000000000000000000000000000000000000000000000000081565b60405173ffffffffffffffffffffffffffffffffffffffff9091168152602001610191565b6101ad61035c366004611479565b61075e565b6101ad61036f366004611479565b61082f565b6101c17f000000000000000000000000000000000000000000000000000000000000000081565b61028f6103a9366004611501565b61083d565b6101c16103bc366004611574565b73ffffffffffffffffffffffffffffffffffffffff918216600090815260016020908152604080832093909416825291909152205490565b6101846040518060400160405280600181526020017f310000000000000000000000000000000000000000000000000000000000000081525081565b60606003805461043f906115a7565b80601f016020809104026020016040519081016040528092919081815260200182805461046b906115a7565b80156104b85780601f1061048d576101008083540402835291602001916104b8565b820191906000526020600020905b81548152906001019060200180831161049b57829003601f168201915b5050505050905090565b6000336104d0818585610b73565b60019150505b92915050565b6000336104ea858285610d27565b6104f5858585610dfe565b506001949350505050565b60007f00000000000000000000000000000000000000000000000000000000000000004614610537576105324661106d565b905090565b507f000000000000000000000000000000000000000000000000000000000000000090565b33600081815260016020908152604080832073ffffffffffffffffffffffffffffffffffffffff871684529091528120549091906104d090829086906105a3908790611629565b610b73565b3373ffffffffffffffffffffffffffffffffffffffff7f00000000000000000000000000000000000000000000000000000000000000001614610672576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152603060248201527f546f6b656e577261707065643a3a6f6e6c794272696467653a204e6f7420506f60448201527f6c79676f6e5a6b45564d4272696467650000000000000000000000000000000060648201526084015b60405180910390fd5b61067c8282611135565b5050565b60606004805461043f906115a7565b3373ffffffffffffffffffffffffffffffffffffffff7f00000000000000000000000000000000000000000000000000000000000000001614610754576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152603060248201527f546f6b656e577261707065643a3a6f6e6c794272696467653a204e6f7420506f60448201527f6c79676f6e5a6b45564d427269646765000000000000000000000000000000006064820152608401610669565b61067c8282611228565b33600081815260016020908152604080832073ffffffffffffffffffffffffffffffffffffffff8716845290915281205490919083811015610822576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602560248201527f45524332303a2064656372656173656420616c6c6f77616e63652062656c6f7760448201527f207a65726f0000000000000000000000000000000000000000000000000000006064820152608401610669565b6104f58286868403610b73565b6000336104d0818585610dfe565b834211156108cc576040517f08c379a0000000000000000000000000000000000000000000000000000000008152602060048201526024808201527f546f6b656e577261707065643a3a7065726d69743a204578706972656420706560448201527f726d6974000000000000000000000000000000000000000000000000000000006064820152608401610669565b73ffffffffffffffffffffffffffffffffffffffff8716600090815260056020526040812080547f6e71edae12b1b97f4d1f60370fef10105fa2faae0126114a169c64845d6126c9918a918a918a9190866109268361163c565b9091555060408051602081019690965273ffffffffffffffffffffffffffffffffffffffff94851690860152929091166060840152608083015260a082015260c0810186905260e0016040516020818303038152906040528051906020012090506000610991610500565b6040517f19010000000000000000000000000000000000000000000000000000000000006020820152602281019190915260428101839052606201604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08184030181528282528051602091820120600080855291840180845281905260ff89169284019290925260608301879052608083018690529092509060019060a0016020604051602081039080840390855afa158015610a55573d6000803e3d6000fd5b50506040517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0015191505073ffffffffffffffffffffffffffffffffffffffff811615801590610ad057508973ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16145b610b5c576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602760248201527f546f6b656e577261707065643a3a7065726d69743a20496e76616c696420736960448201527f676e6174757265000000000000000000000000000000000000000000000000006064820152608401610669565b610b678a8a8a610b73565b50505050505050505050565b73ffffffffffffffffffffffffffffffffffffffff8316610c15576040517f08c379a0000000000000000000000000000000000000000000000000000000008152602060048201526024808201527f45524332303a20617070726f76652066726f6d20746865207a65726f2061646460448201527f72657373000000000000000000000000000000000000000000000000000000006064820152608401610669565b73ffffffffffffffffffffffffffffffffffffffff8216610cb8576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602260248201527f45524332303a20617070726f766520746f20746865207a65726f20616464726560448201527f73730000000000000000000000000000000000000000000000000000000000006064820152608401610669565b73ffffffffffffffffffffffffffffffffffffffff83811660008181526001602090815260408083209487168084529482529182902085905590518481527f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92591015b60405180910390a3505050565b73ffffffffffffffffffffffffffffffffffffffff8381166000908152600160209081526040808320938616835292905220547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8114610df85781811015610deb576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601d60248201527f45524332303a20696e73756666696369656e7420616c6c6f77616e63650000006044820152606401610669565b610df88484848403610b73565b50505050565b73ffffffffffffffffffffffffffffffffffffffff8316610ea1576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602560248201527f45524332303a207472616e736665722066726f6d20746865207a65726f20616460448201527f64726573730000000000000000000000000000000000000000000000000000006064820152608401610669565b73ffffffffffffffffffffffffffffffffffffffff8216610f44576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602360248201527f45524332303a207472616e7366657220746f20746865207a65726f206164647260448201527f65737300000000000000000000000000000000000000000000000000000000006064820152608401610669565b73ffffffffffffffffffffffffffffffffffffffff831660009081526020819052604090205481811015610ffa576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602660248201527f45524332303a207472616e7366657220616d6f756e742065786365656473206260448201527f616c616e636500000000000000000000000000000000000000000000000000006064820152608401610669565b73ffffffffffffffffffffffffffffffffffffffff848116600081815260208181526040808320878703905593871680835291849020805487019055925185815290927fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef910160405180910390a3610df8565b60007f8b73c3c69bb8fe3d512ecc4cf759cc79239f7b179b0ffacaa9a75d522b39400f611098610430565b8051602091820120604080518082018252600181527f310000000000000000000000000000000000000000000000000000000000000090840152805192830193909352918101919091527fc89efdaa54c0f20c7adf612882df0950f5a951637e0307cdcb4c672f298b8bc66060820152608081018390523060a082015260c001604051602081830303815290604052805190602001209050919050565b73ffffffffffffffffffffffffffffffffffffffff82166111b2576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601f60248201527f45524332303a206d696e7420746f20746865207a65726f2061646472657373006044820152606401610669565b80600260008282546111c49190611629565b909155505073ffffffffffffffffffffffffffffffffffffffff8216600081815260208181526040808320805486019055518481527fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef910160405180910390a35050565b73ffffffffffffffffffffffffffffffffffffffff82166112cb576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602160248201527f45524332303a206275726e2066726f6d20746865207a65726f2061646472657360448201527f73000000000000000000000000000000000000000000000000000000000000006064820152608401610669565b73ffffffffffffffffffffffffffffffffffffffff821660009081526020819052604090205481811015611381576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602260248201527f45524332303a206275726e20616d6f756e7420657863656564732062616c616e60448201527f63650000000000000000000000000000000000000000000000000000000000006064820152608401610669565b73ffffffffffffffffffffffffffffffffffffffff83166000818152602081815260408083208686039055600280548790039055518581529192917fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9101610d1a565b600060208083528351808285015260005b81811015611411578581018301518582016040015282016113f5565b5060006040828601015260407fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f8301168501019250505092915050565b803573ffffffffffffffffffffffffffffffffffffffff8116811461147457600080fd5b919050565b6000806040838503121561148c57600080fd5b61149583611450565b946020939093013593505050565b6000806000606084860312156114b857600080fd5b6114c184611450565b92506114cf60208501611450565b9150604084013590509250925092565b6000602082840312156114f157600080fd5b6114fa82611450565b9392505050565b600080600080600080600060e0888a03121561151c57600080fd5b61152588611450565b965061153360208901611450565b95506040880135945060608801359350608088013560ff8116811461155757600080fd5b9699959850939692959460a0840135945060c09093013592915050565b6000806040838503121561158757600080fd5b61159083611450565b915061159e60208401611450565b90509250929050565b600181811c908216806115bb57607f821691505b6020821081036115f4577f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b808201808211156104d6576104d66115fa565b60007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820361166d5761166d6115fa565b506001019056fea26469706673582212208d88fee561cff7120d381c345cfc534cef8229a272dc5809d4bbb685ad67141164736f6c63430008110033a2646970667358221220914f18d5b241f0d10b2ebc814aadeee338ad60bad704683e414dad415cb2e14d64736f6c63430008140033" - }, - { - "contractName": "PolygonZkEVMBridge proxy", - "balance": "340282366920938463463374607431768211455", - "nonce": "1", - "address": "0xB7098a13a48EcE087d3DA15b2D28eCE0f89819B8", - "bytecode": "0x60806040526004361061005e5760003560e01c80635c60da1b116100435780635c60da1b146100a85780638f283970146100e6578063f851a440146101065761006d565b80633659cfe6146100755780634f1ef286146100955761006d565b3661006d5761006b61011b565b005b61006b61011b565b34801561008157600080fd5b5061006b61009036600461086f565b610135565b61006b6100a336600461088a565b61017f565b3480156100b457600080fd5b506100bd6101f3565b60405173ffffffffffffffffffffffffffffffffffffffff909116815260200160405180910390f35b3480156100f257600080fd5b5061006b61010136600461086f565b610231565b34801561011257600080fd5b506100bd61025e565b61012361028c565b61013361012e610363565b61036d565b565b61013d610391565b73ffffffffffffffffffffffffffffffffffffffff16330361017757610174816040518060200160405280600081525060006103d1565b50565b61017461011b565b610187610391565b73ffffffffffffffffffffffffffffffffffffffff1633036101eb576101e68383838080601f016020809104026020016040519081016040528093929190818152602001838380828437600092019190915250600192506103d1915050565b505050565b6101e661011b565b60006101fd610391565b73ffffffffffffffffffffffffffffffffffffffff16330361022657610221610363565b905090565b61022e61011b565b90565b610239610391565b73ffffffffffffffffffffffffffffffffffffffff16330361017757610174816103fc565b6000610268610391565b73ffffffffffffffffffffffffffffffffffffffff16330361022657610221610391565b610294610391565b73ffffffffffffffffffffffffffffffffffffffff163303610133576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152604260248201527f5472616e73706172656e745570677261646561626c6550726f78793a2061646d60448201527f696e2063616e6e6f742066616c6c6261636b20746f2070726f7879207461726760648201527f6574000000000000000000000000000000000000000000000000000000000000608482015260a4015b60405180910390fd5b600061022161045d565b3660008037600080366000845af43d6000803e80801561038c573d6000f35b3d6000fd5b60007fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b5473ffffffffffffffffffffffffffffffffffffffff16919050565b6103da83610485565b6000825111806103e75750805b156101e6576103f683836104d2565b50505050565b7f7e644d79422f17c01e4894b5f4f588d331ebfa28653d42ae832dc59e38c9798f610425610391565b6040805173ffffffffffffffffffffffffffffffffffffffff928316815291841660208301520160405180910390a1610174816104fe565b60007f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc6103b5565b61048e8161060a565b60405173ffffffffffffffffffffffffffffffffffffffff8216907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b90600090a250565b60606104f7838360405180606001604052806027815260200161099f602791396106d5565b9392505050565b73ffffffffffffffffffffffffffffffffffffffff81166105a1576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602660248201527f455243313936373a206e65772061646d696e20697320746865207a65726f206160448201527f6464726573730000000000000000000000000000000000000000000000000000606482015260840161035a565b807fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b80547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff9290921691909117905550565b73ffffffffffffffffffffffffffffffffffffffff81163b6106ae576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602d60248201527f455243313936373a206e657720696d706c656d656e746174696f6e206973206e60448201527f6f74206120636f6e747261637400000000000000000000000000000000000000606482015260840161035a565b807f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc6105c4565b60606000808573ffffffffffffffffffffffffffffffffffffffff16856040516106ff9190610931565b600060405180830381855af49150503d806000811461073a576040519150601f19603f3d011682016040523d82523d6000602084013e61073f565b606091505b50915091506107508683838761075a565b9695505050505050565b606083156107f05782516000036107e95773ffffffffffffffffffffffffffffffffffffffff85163b6107e9576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601d60248201527f416464726573733a2063616c6c20746f206e6f6e2d636f6e7472616374000000604482015260640161035a565b50816107fa565b6107fa8383610802565b949350505050565b8151156108125781518083602001fd5b806040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161035a919061094d565b803573ffffffffffffffffffffffffffffffffffffffff8116811461086a57600080fd5b919050565b60006020828403121561088157600080fd5b6104f782610846565b60008060006040848603121561089f57600080fd5b6108a884610846565b9250602084013567ffffffffffffffff808211156108c557600080fd5b818601915086601f8301126108d957600080fd5b8135818111156108e857600080fd5b8760208285010111156108fa57600080fd5b6020830194508093505050509250925092565b60005b83811015610928578181015183820152602001610910565b50506000910152565b6000825161094381846020870161090d565b9190910192915050565b602081526000825180602084015261096c81604085016020870161090d565b601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016919091016040019291505056fe416464726573733a206c6f772d6c6576656c2064656c65676174652063616c6c206661696c6564a2646970667358221220701a0c26bdd76686e63fc3c65e4f28a20ba3ecc8a60246733c0627e679c9804e64736f6c63430008140033", - "storage": { - "0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103": "0x000000000000000000000000e34fe58dda5b8c6d547e4857e987633aa86a5e90", - "0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc": "0x000000000000000000000000493732fb136a380920c390a85fc27d79c7b70756" - } - }, - { - "contractName": "PolygonZkEVMGlobalExitRootL2 implementation", - "balance": "0", - "nonce": "1", - "address": "0xDc64a140Aa3E981100a9becA4E685f962f0cF6C9", - "bytecode": "0x608060405234801561001057600080fd5b506004361061004c5760003560e01c806301fd904414610051578063257b36321461006d57806333d6247d1461008d578063a3c573eb146100a2575b600080fd5b61005a60015481565b6040519081526020015b60405180910390f35b61005a61007b366004610162565b60006020819052908152604090205481565b6100a061009b366004610162565b6100ee565b005b6100c97f000000000000000000000000b7098a13a48ece087d3da15b2d28ece0f89819b881565b60405173ffffffffffffffffffffffffffffffffffffffff9091168152602001610064565b3373ffffffffffffffffffffffffffffffffffffffff7f000000000000000000000000b7098a13a48ece087d3da15b2d28ece0f89819b8161461015d576040517fb49365dd00000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b600155565b60006020828403121561017457600080fd5b503591905056fea2646970667358221220ea2171e2c85c8bff947affc409ef6fc6a8fe82fb8c174ddeda988651e595d66564736f6c63430008140033" - }, - { - "contractName": "PolygonZkEVMGlobalExitRootL2 proxy", - "balance": "0", - "nonce": "1", - "address": "0xa40d5f56745a118d0906a34e69aec8c0db1cb8fa", - "bytecode": "0x60806040523661001357610011610017565b005b6100115b61001f6101b7565b6001600160a01b0316336001600160a01b0316141561016f5760606001600160e01b031960003516631b2ce7f360e11b8114156100655761005e6101ea565b9150610167565b6001600160e01b0319811663278f794360e11b14156100865761005e610241565b6001600160e01b031981166308f2839760e41b14156100a75761005e610287565b6001600160e01b031981166303e1469160e61b14156100c85761005e6102b8565b6001600160e01b03198116635c60da1b60e01b14156100e95761005e6102f8565b60405162461bcd60e51b815260206004820152604260248201527f5472616e73706172656e745570677261646561626c6550726f78793a2061646d60448201527f696e2063616e6e6f742066616c6c6261636b20746f2070726f78792074617267606482015261195d60f21b608482015260a4015b60405180910390fd5b815160208301f35b61017761030c565b565b606061019e83836040518060600160405280602781526020016108576027913961031c565b9392505050565b90565b6001600160a01b03163b151590565b60007fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b546001600160a01b0316919050565b60606101f4610394565b600061020336600481846106a2565b81019061021091906106e8565b905061022d8160405180602001604052806000815250600061039f565b505060408051602081019091526000815290565b606060008061025336600481846106a2565b8101906102609190610719565b915091506102708282600161039f565b604051806020016040528060008152509250505090565b6060610291610394565b60006102a036600481846106a2565b8101906102ad91906106e8565b905061022d816103cb565b60606102c2610394565b60006102cc6101b7565b604080516001600160a01b03831660208201529192500160405160208183030381529060405291505090565b6060610302610394565b60006102cc610422565b610177610317610422565b610431565b6060600080856001600160a01b0316856040516103399190610807565b600060405180830381855af49150503d8060008114610374576040519150601f19603f3d011682016040523d82523d6000602084013e610379565b606091505b509150915061038a86838387610455565b9695505050505050565b341561017757600080fd5b6103a8836104d3565b6000825111806103b55750805b156103c6576103c48383610179565b505b505050565b7f7e644d79422f17c01e4894b5f4f588d331ebfa28653d42ae832dc59e38c9798f6103f46101b7565b604080516001600160a01b03928316815291841660208301520160405180910390a161041f81610513565b50565b600061042c6105bc565b905090565b3660008037600080366000845af43d6000803e808015610450573d6000f35b3d6000fd5b606083156104c15782516104ba576001600160a01b0385163b6104ba5760405162461bcd60e51b815260206004820152601d60248201527f416464726573733a2063616c6c20746f206e6f6e2d636f6e7472616374000000604482015260640161015e565b50816104cb565b6104cb83836105e4565b949350505050565b6104dc8161060e565b6040516001600160a01b038216907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b90600090a250565b6001600160a01b0381166105785760405162461bcd60e51b815260206004820152602660248201527f455243313936373a206e65772061646d696e20697320746865207a65726f206160448201526564647265737360d01b606482015260840161015e565b807fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b80546001600160a01b0319166001600160a01b039290921691909117905550565b60007f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc6101db565b8151156105f45781518083602001fd5b8060405162461bcd60e51b815260040161015e9190610823565b6001600160a01b0381163b61067b5760405162461bcd60e51b815260206004820152602d60248201527f455243313936373a206e657720696d706c656d656e746174696f6e206973206e60448201526c1bdd08184818dbdb9d1c9858dd609a1b606482015260840161015e565b807f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc61059b565b600080858511156106b257600080fd5b838611156106bf57600080fd5b5050820193919092039150565b80356001600160a01b03811681146106e357600080fd5b919050565b6000602082840312156106fa57600080fd5b61019e826106cc565b634e487b7160e01b600052604160045260246000fd5b6000806040838503121561072c57600080fd5b610735836106cc565b9150602083013567ffffffffffffffff8082111561075257600080fd5b818501915085601f83011261076657600080fd5b81358181111561077857610778610703565b604051601f8201601f19908116603f011681019083821181831017156107a0576107a0610703565b816040528281528860208487010111156107b957600080fd5b8260208601602083013760006020848301015280955050505050509250929050565b60005b838110156107f65781810151838201526020016107de565b838111156103c45750506000910152565b600082516108198184602087016107db565b9190910192915050565b60208152600082518060208401526108428160408501602087016107db565b601f01601f1916919091016040019291505056fe416464726573733a206c6f772d6c6576656c2064656c65676174652063616c6c206661696c6564a264697066735822122012bb4f564f73959a03513dc74fc3c6e40e8386e6f02c16b78d6db00ce0aa16af64736f6c63430008090033", - "storage": { - "0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103": "0x000000000000000000000000e34fe58dda5b8c6d547e4857e987633aa86a5e90", - "0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc": "0x000000000000000000000000dc64a140aa3e981100a9beca4e685f962f0cf6c9" - } - }, - { - "contractName": "PolygonZkEVMTimelock", - "balance": "0", - "nonce": "1", - "address": "0x0165878A594ca255338adfa4d48449f69242Eb8F", - "bytecode": "0x6080604052600436106101c65760003560e01c806364d62353116100f7578063b1c5f42711610095578063d547741f11610064578063d547741f14610661578063e38335e514610681578063f23a6e6114610694578063f27a0c92146106d957600080fd5b8063b1c5f427146105af578063bc197c81146105cf578063c4d252f514610614578063d45c44351461063457600080fd5b80638f61f4f5116100d15780638f61f4f5146104e157806391d1485414610515578063a217fddf14610566578063b08e51c01461057b57600080fd5b806364d62353146104815780638065657f146104a15780638f2a0bb0146104c157600080fd5b8063248a9ca31161016457806331d507501161013e57806331d50750146103c857806336568abe146103e85780633a6aae7214610408578063584b153e1461046157600080fd5b8063248a9ca3146103475780632ab0f529146103775780632f2ff15d146103a857600080fd5b80630d3cf6fc116101a05780630d3cf6fc1461026b578063134008d31461029f57806313bc9f20146102b2578063150b7a02146102d257600080fd5b806301d5062a146101d257806301ffc9a7146101f457806307bd02651461022957600080fd5b366101cd57005b600080fd5b3480156101de57600080fd5b506101f26101ed366004611c52565b6106ee565b005b34801561020057600080fd5b5061021461020f366004611cc7565b610783565b60405190151581526020015b60405180910390f35b34801561023557600080fd5b5061025d7fd8aa0f3194971a2a116679f7c2090f6939c8d4e01a2a8d7e41d55e5351469e6381565b604051908152602001610220565b34801561027757600080fd5b5061025d7f5f58e3a2316349923ce3780f8d587db2d72378aed66a8261c916544fa6846ca581565b6101f26102ad366004611d09565b6107df565b3480156102be57600080fd5b506102146102cd366004611d75565b6108d7565b3480156102de57600080fd5b506103166102ed366004611e9a565b7f150b7a0200000000000000000000000000000000000000000000000000000000949350505050565b6040517fffffffff000000000000000000000000000000000000000000000000000000009091168152602001610220565b34801561035357600080fd5b5061025d610362366004611d75565b60009081526020819052604090206001015490565b34801561038357600080fd5b50610214610392366004611d75565b6000908152600160208190526040909120541490565b3480156103b457600080fd5b506101f26103c3366004611f02565b6108fd565b3480156103d457600080fd5b506102146103e3366004611d75565b610927565b3480156103f457600080fd5b506101f2610403366004611f02565b610940565b34801561041457600080fd5b5061043c7f000000000000000000000000000000000000000000000000000000000000000081565b60405173ffffffffffffffffffffffffffffffffffffffff9091168152602001610220565b34801561046d57600080fd5b5061021461047c366004611d75565b6109f8565b34801561048d57600080fd5b506101f261049c366004611d75565b610a0e565b3480156104ad57600080fd5b5061025d6104bc366004611d09565b610ade565b3480156104cd57600080fd5b506101f26104dc366004611f73565b610b1d565b3480156104ed57600080fd5b5061025d7fb09aa5aeb3702cfd50b6b62bc4532604938f21248a27a1d5ca736082b6819cc181565b34801561052157600080fd5b50610214610530366004611f02565b60009182526020828152604080842073ffffffffffffffffffffffffffffffffffffffff93909316845291905290205460ff1690565b34801561057257600080fd5b5061025d600081565b34801561058757600080fd5b5061025d7ffd643c72710c63c0180259aba6b2d05451e3591a24e58b62239378085726f78381565b3480156105bb57600080fd5b5061025d6105ca366004612025565b610d4f565b3480156105db57600080fd5b506103166105ea36600461214e565b7fbc197c810000000000000000000000000000000000000000000000000000000095945050505050565b34801561062057600080fd5b506101f261062f366004611d75565b610d94565b34801561064057600080fd5b5061025d61064f366004611d75565b60009081526001602052604090205490565b34801561066d57600080fd5b506101f261067c366004611f02565b610e8f565b6101f261068f366004612025565b610eb4565b3480156106a057600080fd5b506103166106af3660046121f8565b7ff23a6e610000000000000000000000000000000000000000000000000000000095945050505050565b3480156106e557600080fd5b5061025d611161565b7fb09aa5aeb3702cfd50b6b62bc4532604938f21248a27a1d5ca736082b6819cc161071881611244565b6000610728898989898989610ade565b90506107348184611251565b6000817f4cf4410cc57040e44862ef0f45f3dd5a5e02db8eb8add648d4b0e236f1d07dca8b8b8b8b8b8a604051610770969594939291906122a6565b60405180910390a3505050505050505050565b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f4e2312e00000000000000000000000000000000000000000000000000000000014806107d957506107d98261139e565b92915050565b600080527fdae2aa361dfd1ca020a396615627d436107c35eff9fe7738a3512819782d70696020527f5ba6852781629bcdcd4bdaa6de76d786f1c64b16acdac474e55bebc0ea157951547fd8aa0f3194971a2a116679f7c2090f6939c8d4e01a2a8d7e41d55e5351469e639060ff1661085c5761085c8133611435565b600061086c888888888888610ade565b905061087881856114ed565b6108848888888861162a565b6000817fc2617efa69bab66782fa219543714338489c4e9e178271560a91b82c3f612b588a8a8a8a6040516108bc94939291906122f1565b60405180910390a36108cd8161172e565b5050505050505050565b6000818152600160205260408120546001811180156108f65750428111155b9392505050565b60008281526020819052604090206001015461091881611244565b61092283836117d7565b505050565b60008181526001602052604081205481905b1192915050565b73ffffffffffffffffffffffffffffffffffffffff811633146109ea576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602f60248201527f416363657373436f6e74726f6c3a2063616e206f6e6c792072656e6f756e636560448201527f20726f6c657320666f722073656c66000000000000000000000000000000000060648201526084015b60405180910390fd5b6109f482826118c7565b5050565b6000818152600160208190526040822054610939565b333014610a9d576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602b60248201527f54696d656c6f636b436f6e74726f6c6c65723a2063616c6c6572206d7573742060448201527f62652074696d656c6f636b00000000000000000000000000000000000000000060648201526084016109e1565b60025460408051918252602082018390527f11c24f4ead16507c69ac467fbd5e4eed5fb5c699626d2cc6d66421df253886d5910160405180910390a1600255565b6000868686868686604051602001610afb969594939291906122a6565b6040516020818303038152906040528051906020012090509695505050505050565b7fb09aa5aeb3702cfd50b6b62bc4532604938f21248a27a1d5ca736082b6819cc1610b4781611244565b888714610bd6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602360248201527f54696d656c6f636b436f6e74726f6c6c65723a206c656e677468206d69736d6160448201527f746368000000000000000000000000000000000000000000000000000000000060648201526084016109e1565b888514610c65576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602360248201527f54696d656c6f636b436f6e74726f6c6c65723a206c656e677468206d69736d6160448201527f746368000000000000000000000000000000000000000000000000000000000060648201526084016109e1565b6000610c778b8b8b8b8b8b8b8b610d4f565b9050610c838184611251565b60005b8a811015610d415780827f4cf4410cc57040e44862ef0f45f3dd5a5e02db8eb8add648d4b0e236f1d07dca8e8e85818110610cc357610cc3612331565b9050602002016020810190610cd89190612360565b8d8d86818110610cea57610cea612331565b905060200201358c8c87818110610d0357610d03612331565b9050602002810190610d15919061237b565b8c8b604051610d29969594939291906122a6565b60405180910390a3610d3a8161240f565b9050610c86565b505050505050505050505050565b60008888888888888888604051602001610d709897969594939291906124f7565b60405160208183030381529060405280519060200120905098975050505050505050565b7ffd643c72710c63c0180259aba6b2d05451e3591a24e58b62239378085726f783610dbe81611244565b610dc7826109f8565b610e53576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152603160248201527f54696d656c6f636b436f6e74726f6c6c65723a206f7065726174696f6e20636160448201527f6e6e6f742062652063616e63656c6c656400000000000000000000000000000060648201526084016109e1565b6000828152600160205260408082208290555183917fbaa1eb22f2a492ba1a5fea61b8df4d27c6c8b5f3971e63bb58fa14ff72eedb7091a25050565b600082815260208190526040902060010154610eaa81611244565b61092283836118c7565b600080527fdae2aa361dfd1ca020a396615627d436107c35eff9fe7738a3512819782d70696020527f5ba6852781629bcdcd4bdaa6de76d786f1c64b16acdac474e55bebc0ea157951547fd8aa0f3194971a2a116679f7c2090f6939c8d4e01a2a8d7e41d55e5351469e639060ff16610f3157610f318133611435565b878614610fc0576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602360248201527f54696d656c6f636b436f6e74726f6c6c65723a206c656e677468206d69736d6160448201527f746368000000000000000000000000000000000000000000000000000000000060648201526084016109e1565b87841461104f576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602360248201527f54696d656c6f636b436f6e74726f6c6c65723a206c656e677468206d69736d6160448201527f746368000000000000000000000000000000000000000000000000000000000060648201526084016109e1565b60006110618a8a8a8a8a8a8a8a610d4f565b905061106d81856114ed565b60005b8981101561114b5760008b8b8381811061108c5761108c612331565b90506020020160208101906110a19190612360565b905060008a8a848181106110b7576110b7612331565b9050602002013590503660008a8a868181106110d5576110d5612331565b90506020028101906110e7919061237b565b915091506110f78484848461162a565b84867fc2617efa69bab66782fa219543714338489c4e9e178271560a91b82c3f612b588686868660405161112e94939291906122f1565b60405180910390a350505050806111449061240f565b9050611070565b506111558161172e565b50505050505050505050565b60007f000000000000000000000000000000000000000000000000000000000000000073ffffffffffffffffffffffffffffffffffffffff161580159061123257507f000000000000000000000000000000000000000000000000000000000000000073ffffffffffffffffffffffffffffffffffffffff166315064c966040518163ffffffff1660e01b8152600401602060405180830381865afa15801561120e573d6000803e3d6000fd5b505050506040513d601f19601f8201168201806040525081019061123291906125be565b1561123d5750600090565b5060025490565b61124e8133611435565b50565b61125a82610927565b156112e7576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602f60248201527f54696d656c6f636b436f6e74726f6c6c65723a206f7065726174696f6e20616c60448201527f7265616479207363686564756c6564000000000000000000000000000000000060648201526084016109e1565b6112ef611161565b81101561137e576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602660248201527f54696d656c6f636b436f6e74726f6c6c65723a20696e73756666696369656e7460448201527f2064656c6179000000000000000000000000000000000000000000000000000060648201526084016109e1565b61138881426125e0565b6000928352600160205260409092209190915550565b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f7965db0b0000000000000000000000000000000000000000000000000000000014806107d957507f01ffc9a7000000000000000000000000000000000000000000000000000000007fffffffff000000000000000000000000000000000000000000000000000000008316146107d9565b60008281526020818152604080832073ffffffffffffffffffffffffffffffffffffffff8516845290915290205460ff166109f4576114738161197e565b61147e83602061199d565b60405160200161148f929190612617565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0818403018152908290527f08c379a00000000000000000000000000000000000000000000000000000000082526109e191600401612698565b6114f6826108d7565b611582576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602a60248201527f54696d656c6f636b436f6e74726f6c6c65723a206f7065726174696f6e20697360448201527f206e6f742072656164790000000000000000000000000000000000000000000060648201526084016109e1565b80158061159e5750600081815260016020819052604090912054145b6109f4576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602660248201527f54696d656c6f636b436f6e74726f6c6c65723a206d697373696e67206465706560448201527f6e64656e6379000000000000000000000000000000000000000000000000000060648201526084016109e1565b60008473ffffffffffffffffffffffffffffffffffffffff168484846040516116549291906126e9565b60006040518083038185875af1925050503d8060008114611691576040519150601f19603f3d011682016040523d82523d6000602084013e611696565b606091505b5050905080611727576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152603360248201527f54696d656c6f636b436f6e74726f6c6c65723a20756e6465726c79696e67207460448201527f72616e73616374696f6e2072657665727465640000000000000000000000000060648201526084016109e1565b5050505050565b611737816108d7565b6117c3576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602a60248201527f54696d656c6f636b436f6e74726f6c6c65723a206f7065726174696f6e20697360448201527f206e6f742072656164790000000000000000000000000000000000000000000060648201526084016109e1565b600090815260016020819052604090912055565b60008281526020818152604080832073ffffffffffffffffffffffffffffffffffffffff8516845290915290205460ff166109f45760008281526020818152604080832073ffffffffffffffffffffffffffffffffffffffff85168452909152902080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff001660011790556118693390565b73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16837f2f8788117e7eff1d82e926ec794901d17c78024a50270940304540a733656f0d60405160405180910390a45050565b60008281526020818152604080832073ffffffffffffffffffffffffffffffffffffffff8516845290915290205460ff16156109f45760008281526020818152604080832073ffffffffffffffffffffffffffffffffffffffff8516808552925280832080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0016905551339285917ff6391f5c32d9c69d2a47ea670b442974b53935d1edc7fd64eb21e047a839171b9190a45050565b60606107d973ffffffffffffffffffffffffffffffffffffffff831660145b606060006119ac8360026126f9565b6119b79060026125e0565b67ffffffffffffffff8111156119cf576119cf611d8e565b6040519080825280601f01601f1916602001820160405280156119f9576020820181803683370190505b5090507f300000000000000000000000000000000000000000000000000000000000000081600081518110611a3057611a30612331565b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053507f780000000000000000000000000000000000000000000000000000000000000081600181518110611a9357611a93612331565b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053506000611acf8460026126f9565b611ada9060016125e0565b90505b6001811115611b77577f303132333435363738396162636465660000000000000000000000000000000085600f1660108110611b1b57611b1b612331565b1a60f81b828281518110611b3157611b31612331565b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535060049490941c93611b7081612710565b9050611add565b5083156108f6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f537472696e67733a20686578206c656e67746820696e73756666696369656e7460448201526064016109e1565b803573ffffffffffffffffffffffffffffffffffffffff81168114611c0457600080fd5b919050565b60008083601f840112611c1b57600080fd5b50813567ffffffffffffffff811115611c3357600080fd5b602083019150836020828501011115611c4b57600080fd5b9250929050565b600080600080600080600060c0888a031215611c6d57600080fd5b611c7688611be0565b965060208801359550604088013567ffffffffffffffff811115611c9957600080fd5b611ca58a828b01611c09565b989b979a50986060810135976080820135975060a09091013595509350505050565b600060208284031215611cd957600080fd5b81357fffffffff00000000000000000000000000000000000000000000000000000000811681146108f657600080fd5b60008060008060008060a08789031215611d2257600080fd5b611d2b87611be0565b955060208701359450604087013567ffffffffffffffff811115611d4e57600080fd5b611d5a89828a01611c09565b979a9699509760608101359660809091013595509350505050565b600060208284031215611d8757600080fd5b5035919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016810167ffffffffffffffff81118282101715611e0457611e04611d8e565b604052919050565b600082601f830112611e1d57600080fd5b813567ffffffffffffffff811115611e3757611e37611d8e565b611e6860207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f84011601611dbd565b818152846020838601011115611e7d57600080fd5b816020850160208301376000918101602001919091529392505050565b60008060008060808587031215611eb057600080fd5b611eb985611be0565b9350611ec760208601611be0565b925060408501359150606085013567ffffffffffffffff811115611eea57600080fd5b611ef687828801611e0c565b91505092959194509250565b60008060408385031215611f1557600080fd5b82359150611f2560208401611be0565b90509250929050565b60008083601f840112611f4057600080fd5b50813567ffffffffffffffff811115611f5857600080fd5b6020830191508360208260051b8501011115611c4b57600080fd5b600080600080600080600080600060c08a8c031215611f9157600080fd5b893567ffffffffffffffff80821115611fa957600080fd5b611fb58d838e01611f2e565b909b50995060208c0135915080821115611fce57600080fd5b611fda8d838e01611f2e565b909950975060408c0135915080821115611ff357600080fd5b506120008c828d01611f2e565b9a9d999c50979a969997986060880135976080810135975060a0013595509350505050565b60008060008060008060008060a0898b03121561204157600080fd5b883567ffffffffffffffff8082111561205957600080fd5b6120658c838d01611f2e565b909a50985060208b013591508082111561207e57600080fd5b61208a8c838d01611f2e565b909850965060408b01359150808211156120a357600080fd5b506120b08b828c01611f2e565b999c989b509699959896976060870135966080013595509350505050565b600082601f8301126120df57600080fd5b8135602067ffffffffffffffff8211156120fb576120fb611d8e565b8160051b61210a828201611dbd565b928352848101820192828101908785111561212457600080fd5b83870192505b848310156121435782358252918301919083019061212a565b979650505050505050565b600080600080600060a0868803121561216657600080fd5b61216f86611be0565b945061217d60208701611be0565b9350604086013567ffffffffffffffff8082111561219a57600080fd5b6121a689838a016120ce565b945060608801359150808211156121bc57600080fd5b6121c889838a016120ce565b935060808801359150808211156121de57600080fd5b506121eb88828901611e0c565b9150509295509295909350565b600080600080600060a0868803121561221057600080fd5b61221986611be0565b945061222760208701611be0565b93506040860135925060608601359150608086013567ffffffffffffffff81111561225157600080fd5b6121eb88828901611e0c565b8183528181602085013750600060208284010152600060207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f840116840101905092915050565b73ffffffffffffffffffffffffffffffffffffffff8716815285602082015260a0604082015260006122dc60a08301868861225d565b60608301949094525060800152949350505050565b73ffffffffffffffffffffffffffffffffffffffff8516815283602082015260606040820152600061232760608301848661225d565b9695505050505050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b60006020828403121561237257600080fd5b6108f682611be0565b60008083357fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe18436030181126123b057600080fd5b83018035915067ffffffffffffffff8211156123cb57600080fd5b602001915036819003821315611c4b57600080fd5b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b60007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8203612440576124406123e0565b5060010190565b81835260006020808501808196508560051b810191508460005b878110156124ea57828403895281357fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe18836030181126124a057600080fd5b8701858101903567ffffffffffffffff8111156124bc57600080fd5b8036038213156124cb57600080fd5b6124d686828461225d565b9a87019a9550505090840190600101612461565b5091979650505050505050565b60a0808252810188905260008960c08301825b8b8110156125455773ffffffffffffffffffffffffffffffffffffffff61253084611be0565b1682526020928301929091019060010161250a565b5083810360208501528881527f07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff89111561257e57600080fd5b8860051b9150818a602083013701828103602090810160408501526125a69082018789612447565b60608401959095525050608001529695505050505050565b6000602082840312156125d057600080fd5b815180151581146108f657600080fd5b808201808211156107d9576107d96123e0565b60005b8381101561260e5781810151838201526020016125f6565b50506000910152565b7f416363657373436f6e74726f6c3a206163636f756e742000000000000000000081526000835161264f8160178501602088016125f3565b7f206973206d697373696e6720726f6c6520000000000000000000000000000000601791840191820152835161268c8160288401602088016125f3565b01602801949350505050565b60208152600082518060208401526126b78160408501602087016125f3565b601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169190910160400192915050565b8183823760009101908152919050565b80820281158282048414176107d9576107d96123e0565b60008161271f5761271f6123e0565b507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff019056fea2646970667358221220c474c39da3523b28ebfa5fd66c05b42d6ddcc4a57055483bdda32888b366016164736f6c63430008140033", - "storage": { - "0x0000000000000000000000000000000000000000000000000000000000000002": "0x0000000000000000000000000000000000000000000000000000000000000e10", - "0xaedcc9e7897c0d335bdc5d92fe3a8b4f23727fe558cd1c19f332b28716a30559": "0x0000000000000000000000000000000000000000000000000000000000000001", - "0xf5e61edb9c9cc6bfbae4463e9a2b1dd6ac3b44ddef38f18016e56ba0363910d9": "0x0000000000000000000000000000000000000000000000000000000000000001", - "0x64494413541ff93b31aa309254e3fed72a7456e9845988b915b4c7a7ceba8814": "0x5f58e3a2316349923ce3780f8d587db2d72378aed66a8261c916544fa6846ca5", - "0x60b9d94c75b7b3f721925089391e4644cd890cb5e6466f9596dfbd2c54e0b280": "0x0000000000000000000000000000000000000000000000000000000000000001", - "0x3412d5605ac6cd444957cedb533e5dacad6378b4bc819ebe3652188a665066d6": "0x5f58e3a2316349923ce3780f8d587db2d72378aed66a8261c916544fa6846ca5", - "0x4b63b79f1e338a49559dcd3193ac9eecc50d0f275d24e97cc8c319e5a31a8bd0": "0x0000000000000000000000000000000000000000000000000000000000000001", - "0xdae2aa361dfd1ca020a396615627d436107c35eff9fe7738a3512819782d706a": "0x5f58e3a2316349923ce3780f8d587db2d72378aed66a8261c916544fa6846ca5", - "0x800d5dfe4bba53eedee06cd4546a27da8de00f12db83f56062976d4493fda899": "0x0000000000000000000000000000000000000000000000000000000000000001", - "0xc3ad33e20b0c56a223ad5104fff154aa010f8715b9c981fd38fdc60a4d1a52fc": "0x5f58e3a2316349923ce3780f8d587db2d72378aed66a8261c916544fa6846ca5" - } - }, - { - "accountName": "keyless Deployer", - "balance": "0", - "nonce": "1", - "address": "0x28BB4e66addE1f042B77E04cf7D3784C1dcDBbA3" - }, - { - "accountName": "deployer", - "balance": "100000000000000000000000", - "nonce": "8", - "address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" - } - ] - } \ No newline at end of file diff --git a/test/config/test.prover.config.json b/test/config/test.prover.config.json deleted file mode 100644 index 810ddce6..00000000 --- a/test/config/test.prover.config.json +++ /dev/null @@ -1,93 +0,0 @@ -{ - "runExecutorServer": false, - "runExecutorClient": false, - "runExecutorClientMultithread": false, - - "runHashDBServer": false, - "runHashDBTest": false, - - "runAggregatorServer": false, - "runAggregatorClient": true, - "runAggregatorClientMock": false, - "aggregatorClientMockTimeout": 1, - "proverName": "stateless-prover-test", - - "runFileGenBatchProof": false, - "runFileGenAggregatedProof": false, - "runFileGenFinalProof": false, - "runFileProcessBatch": false, - "runFileProcessBatchMultithread": false, - - "runKeccakScriptGenerator": false, - "runKeccakTest": false, - "runStorageSMTest": false, - "runBinarySMTest": false, - "runMemAlignSMTest": false, - "runSHA256Test": false, - "runBlakeTest": false, - - "executeInParallel": true, - "useMainExecGenerated": true, - "saveRequestToFile": false, - "saveInputToFile": false, - "saveDbReadsToFile": false, - "saveDbReadsToFileOnChange": false, - "saveOutputToFile": true, - "saveProofToFile": true, - "saveResponseToFile": false, - "loadDBToMemCache": true, - "opcodeTracer": false, - "logRemoteDbReads": false, - "logExecutorServerResponses": false, - - "proverServerPort": 50051, - "proverServerMockPort": 50052, - "proverServerMockTimeout": 10000000, - "proverClientPort": 50051, - "proverClientHost": "127.0.0.1", - - "executorServerPort": 50071, - "executorROMLineTraces": false, - "executorClientPort": 50071, - "executorClientHost": "127.0.0.1", - - "hashDBServerPort": 50061, - "hashDBURL": "local", - - "aggregatorServerPort": 50081, - "aggregatorClientPort": 50081, - "aggregatorClientHost": "zkevm-aggregator", - "aggregatorClientWatchdogTimeout": 120000000, - - "mapConstPolsFile": false, - "mapConstantsTreeFile": false, - - "inputFile": "input_executor_0.json", - "inputFile2": "input_executor_1.json", - - "keccakScriptFile": "config/scripts/keccak_script.json", - "storageRomFile": "config/scripts/storage_sm_rom.json", - - "outputPath": "output", - "configPath": "config", - - "databaseURL": "local", - "dbNodesTableName": "state.nodes", - "dbProgramTableName": "state.program", - "dbMultiWrite": true, - "dbFlushInParallel": false, - "dbMTCacheSize": 1024, - "dbProgramCacheSize": 512, - "dbNumberOfPoolConnections": 30, - "dbGetTree": true, - "cleanerPollingPeriod": 600, - "requestsPersistence": 3600, - "maxExecutorThreads": 256, - "maxProverThreads": 8, - "maxHashDBThreads": 256, - "ECRecoverPrecalc": false, - "ECRecoverPrecalcNThreads": 32, - "stateManager": true, - "useAssociativeCache" : false, - "jsonLogs": false -} \ No newline at end of file diff --git a/test/docker-compose.yml b/test/docker-compose.yml deleted file mode 100644 index 8241d119..00000000 --- a/test/docker-compose.yml +++ /dev/null @@ -1,48 +0,0 @@ -networks: - default: - name: cdk - -services: - cdk-sequence-sender: - container_name: cdk-sequence-sender - restart: no - image: cdk - build: . - volumes: - - ./config/test.config.toml:/app/config.toml - - ./config/test.genesis.json:/app/genesis.json - - ./sequencer.keystore:/app/keystore/sequencer.keystore - command: - - "/bin/sh" - - "-c" - - "/app/cdk run --cfg /app/config.toml --network custom --custom-network-file /app/genesis.json --components sequence-sender" - - zkevm-prover: - container_name: zkevm-prover - restart: unless-stopped - image: hermeznetwork/zkevm-prover:v6.0.3-RC16 - volumes: - - ./config/test.prover.config.json:/usr/src/app/config.json - - ~/stateless-aggregator/prover/config:/app/config - command: > - zkProver -c /usr/src/app/config.json - - cdk-l1-sync-db: - container_name: cdk-l1-sync-db - image: postgres:15 - deploy: - resources: - limits: - memory: 2G - reservations: - memory: 1G - ports: - - 5436:5432 - environment: - - POSTGRES_USER=test_user - - POSTGRES_PASSWORD=test_password - - POSTGRES_DB=sync - command: - - "postgres" - - "-N" - - "500" diff --git a/test/run-e2e-multi_pp.sh b/test/run-e2e-multi_pp.sh index f6301356..d8f60286 100755 --- a/test/run-e2e-multi_pp.sh +++ b/test/run-e2e-multi_pp.sh @@ -17,15 +17,15 @@ function ok_or_fatal(){ } function build_docker_if_required(){ - docker images -q cdk:latest > /dev/null + docker images -q aggkit:latest > /dev/null if [ $? -ne 0 ] ; then - echo "Building cdk:latest" + echo "Building aggkit:latest docker image" pushd $BASE_FOLDER/.. make build-docker ok_or_fatal "Failed to build docker image" popd else - echo "docker cdk:latest already exists" + echo "docker image aggkit:latest already exists" fi } @@ -49,7 +49,7 @@ function resolve_template(){ BASE_FOLDER=$(dirname $0) PP1_ORIGIN_CONFIG_FILE=combinations/fork12-pessimistic-multi.yml PP2_ORIGIN_CONFIG_FILE=combinations/fork12-pessimistic-multi-attach-second-cdk.yml -KURTOSIS_ENCLAVE=cdk +KURTOSIS_ENCLAVE=aggkit [ -z $KURTOSIS_FOLDER ] && echo "KURTOSIS_FOLDER is not set" && exit 1 [ ! -d $KURTOSIS_FOLDER ] && echo "KURTOSIS_FOLDER is not a directory ($KURTOSIS_FOLDER)" && exit 1 diff --git a/test/run-e2e.sh b/test/run-e2e.sh index 300c5413..dcd3eb49 100755 --- a/test/run-e2e.sh +++ b/test/run-e2e.sh @@ -14,22 +14,22 @@ if [ -z $DATA_AVAILABILITY_MODE ]; then fi BASE_FOLDER=$(dirname $0) -docker images -q cdk:latest > /dev/null +docker images -q aggkit:latest > /dev/null if [ $? -ne 0 ] ; then - echo "Building cdk:latest" + echo "Building aggkit:latest docker image" pushd $BASE_FOLDER/.. make build-docker popd else - echo "docker cdk:latest already exists" + echo "docker image aggkit:latest already exists" fi kurtosis clean --all -echo "Override cdk config file" +echo "Override aggkit config file" cp $BASE_FOLDER/config/kurtosis-cdk-node-config.toml.template $KURTOSIS_FOLDER/templates/trusted-node/cdk-node-config.toml KURTOSIS_CONFIG_FILE="combinations/$FORK-$DATA_AVAILABILITY_MODE.yml" TEMP_CONFIG_FILE=$(mktemp --suffix ".yml") echo "rendering $KURTOSIS_CONFIG_FILE to temp file $TEMP_CONFIG_FILE" go run ../scripts/run_template.go $KURTOSIS_CONFIG_FILE > $TEMP_CONFIG_FILE -kurtosis run --enclave cdk --args-file "$TEMP_CONFIG_FILE" --image-download always $KURTOSIS_FOLDER -rm $TEMP_CONFIG_FILE \ No newline at end of file +kurtosis run --enclave $KURTOSIS_ENCLAVE --args-file "$TEMP_CONFIG_FILE" --image-download always $KURTOSIS_FOLDER +rm $TEMP_CONFIG_FILE diff --git a/test/scripts/agglayer_certificates_monitor.sh b/test/scripts/agglayer_certificates_monitor.sh index c530548f..01b1ec30 100755 --- a/test/scripts/agglayer_certificates_monitor.sh +++ b/test/scripts/agglayer_certificates_monitor.sh @@ -1,11 +1,11 @@ #!/usr/bin/env bash # This script monitors the agglayer certificates progress of pessimistic proof. -function parse_params(){ +function parse_params() { # Check if the required arguments are provided. if [ "$#" -lt 3 ]; then - echo "Usage: $0 " - exit 1 + echo "Usage: $0 " + exit 1 fi # The number of batches to be verified. @@ -18,7 +18,7 @@ function parse_params(){ l2_rpc_network_id="$3" } -function check_timeout(){ +function check_timeout() { local _end_time=$1 current_time=$(date +%s) if ((current_time > _end_time)); then @@ -27,8 +27,8 @@ function check_timeout(){ fi } -function check_num_certificates(){ - readonly agglayer_rpc_url="$(kurtosis port print cdk agglayer agglayer)" +function check_num_certificates() { + readonly agglayer_rpc_url="$(kurtosis port print aggkit agglayer agglayer)" cast_output=$(cast rpc --rpc-url "$agglayer_rpc_url" "interop_getLatestKnownCertificateHeader" "$l2_rpc_network_id" 2>&1) @@ -51,17 +51,17 @@ function check_num_certificates(){ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Last known agglayer certificate height: $height, status: $status" >&3 - if (( height > settle_certificates_target - 1 )); then + if ((height > settle_certificates_target - 1)); then echo "[$(date '+%Y-%m-%d %H:%M:%S')] ✅ Success! The number of settled certificates has reached the target." >&3 exit 0 fi - if (( height == settle_certificates_target - 1 )); then + if ((height == settle_certificates_target - 1)); then if [ "$status" == "Settled" ]; then echo "[$(date '+%Y-%m-%d %H:%M:%S')] ✅ Success! The number of settled certificates has reached the target." >&3 exit 0 fi - + echo "[$(date '+%Y-%m-%d %H:%M:%S')] ⚠️ Warning! The number of settled certificates is one less than the target." >&3 fi } @@ -73,7 +73,7 @@ function extract_certificate_height() { function extract_certificate_status() { local cast_output="$1" - echo "$cast_output" | jq -r '.status' + echo "$cast_output" | jq -r '.status' } # MAIN diff --git a/test/scripts/batch_verification_monitor.sh b/test/scripts/batch_verification_monitor.sh index a0bfaefd..4b3e9857 100755 --- a/test/scripts/batch_verification_monitor.sh +++ b/test/scripts/batch_verification_monitor.sh @@ -17,7 +17,7 @@ timeout="$2" start_time=$(date +%s) end_time=$((start_time + timeout)) -rpc_url="$(kurtosis port print cdk cdk-erigon-rpc-001 rpc)" +rpc_url="$(kurtosis port print aggkit cdk-erigon-rpc-001 rpc)" while true; do verified_batches="$(cast to-dec "$(cast rpc --rpc-url "$rpc_url" zkevm_verifiedBatchNumber | sed 's/"//g')")" diff --git a/test/scripts/env.sh b/test/scripts/env.sh index 5dbde9de..af9ff936 100644 --- a/test/scripts/env.sh +++ b/test/scripts/env.sh @@ -1,6 +1,6 @@ #!/bin/bash ### Common variables -KURTOSIS_ENCLAVE=cdk +KURTOSIS_ENCLAVE=aggkit TMP_AGGKIT_FOLDER=tmp/aggkit DEST_KURTOSIS_PARAMS_YML=../$TMP_AGGKIT_FOLDER/e2e-params.yml KURTOSIS_FOLDER=${KURTOSIS_FOLDER:=../kurtosis-cdk}